Stan Math Library  2.9.0
reverse mode automatic differentiation
jacobian.hpp
Go to the documentation of this file.
1 #ifndef STAN_MATH_REV_MAT_FUNCTOR_JACOBIAN_HPP
2 #define STAN_MATH_REV_MAT_FUNCTOR_JACOBIAN_HPP
3 
5 #include <stan/math/rev/core.hpp>
6 #include <vector>
7 
8 namespace stan {
9 
10  namespace math {
11 
12  template <typename F>
13  void
14  jacobian(const F& f,
15  const Eigen::Matrix<double, Eigen::Dynamic, 1>& x,
16  Eigen::Matrix<double, Eigen::Dynamic, 1>& fx,
17  Eigen::Matrix<double, Eigen::Dynamic, Eigen::Dynamic>& J) {
18  using Eigen::Matrix;
19  using Eigen::Dynamic;
20  using stan::math::var;
21  start_nested();
22  try {
23  Matrix<var, Dynamic, 1> x_var(x.size());
24  for (int k = 0; k < x.size(); ++k)
25  x_var(k) = x(k);
26  Matrix<var, Dynamic, 1> fx_var = f(x_var);
27  fx.resize(fx_var.size());
28  for (int i = 0; i < fx_var.size(); ++i)
29  fx(i) = fx_var(i).val();
30  J.resize(x.size(), fx_var.size());
31  for (int i = 0; i < fx_var.size(); ++i) {
32  if (i > 0)
34  grad(fx_var(i).vi_);
35  for (int k = 0; k < x.size(); ++k)
36  J(k, i) = x_var(k).adj();
37  }
38  } catch (const std::exception& e) {
40  throw;
41  }
43  }
44 
45  }
46 }
47 #endif
static void set_zero_all_adjoints()
Reset all adjoint values in the stack to zero.
Independent (input) and dependent (output) variables for gradients.
Definition: var.hpp:31
static void grad(vari *vi)
Compute the gradient for all variables starting from the specified root variable implementation.
Definition: grad.hpp:30
void jacobian(const F &f, const Eigen::Matrix< T, Eigen::Dynamic, 1 > &x, Eigen::Matrix< T, Eigen::Dynamic, 1 > &fx, Eigen::Matrix< T, Eigen::Dynamic, Eigen::Dynamic > &J)
Definition: jacobian.hpp:14
double e()
Return the base of the natural logarithm.
Definition: constants.hpp:95
static void recover_memory_nested()
Recover only the memory used for the top nested call.
static void start_nested()
Record the current position so that recover_memory_nested() can find it.

     [ Stan Home Page ] © 2011–2015, Stan Development Team.