1 #ifndef STAN_MATH_MIX_MAT_FUNCTOR_GRAD_HESSIAN_HPP
2 #define STAN_MATH_MIX_MAT_FUNCTOR_GRAD_HESSIAN_HPP
47 const Eigen::Matrix<double, Dynamic, 1>& x,
49 Eigen::Matrix<double, Dynamic, Dynamic>& H,
50 std::vector<Eigen::Matrix<double, Dynamic, Dynamic> >&
56 grad_H.resize(d, Matrix<double, Dynamic, Dynamic>(d, d));
58 for (
int i = 0; i < d; ++i) {
59 for (
int j = i; j < d; ++j) {
61 Matrix<fvar<fvar<var> >, Dynamic, 1> x_ffvar(d);
62 for (
int k = 0; k < d; ++k)
66 H(i, j) = fx_ffvar.
d_.d_.val();
69 for (
int k = 0; k < d; ++k) {
70 grad_H[i](j, k) = x_ffvar(k).val_.val_.adj();
71 grad_H[j](i, k) = grad_H[i](j, k);
76 }
catch (
const std::exception&
e) {
static void grad(chainable *vi)
Compute the gradient for all variables starting from the specified root variable implementation.
void grad_hessian(const F &f, const Eigen::Matrix< double, Dynamic, 1 > &x, double &fx, Eigen::Matrix< double, Dynamic, Dynamic > &H, std::vector< Eigen::Matrix< double, Dynamic, Dynamic > > &grad_H)
Calculate the value, the Hessian, and the gradient of the Hessian of the specified function at the sp...
double e()
Return the base of the natural logarithm.
static void recover_memory_nested()
Recover only the memory used for the top nested call.
static void start_nested()
Record the current position so that recover_memory_nested() can find it.