1 #ifndef STAN_MATH_MIX_MAT_FUNCTOR_GRAD_HESSIAN_HPP
2 #define STAN_MATH_MIX_MAT_FUNCTOR_GRAD_HESSIAN_HPP
44 const Eigen::Matrix<double, Eigen::Dynamic, 1>& x,
46 Eigen::Matrix<double, Eigen::Dynamic, Eigen::Dynamic>& H,
47 std::vector<Eigen::Matrix<
double,
48 Eigen::Dynamic, Eigen::Dynamic> >&
55 grad_H.resize(d, Matrix<double, Dynamic, Dynamic>(d, d));
57 for (
int i = 0; i < d; ++i) {
58 for (
int j = i; j < d; ++j) {
60 Matrix<fvar<fvar<var> >, Dynamic, 1> x_ffvar(d);
61 for (
int k = 0; k < d; ++k)
65 H(i, j) = fx_ffvar.
d_.d_.val();
68 for (
int k = 0; k < d; ++k) {
69 grad_H[i](j, k) = x_ffvar(k).val_.val_.adj();
70 grad_H[j](i, k) = grad_H[i](j, k);
75 }
catch (
const std::exception&
e) {
static void grad(vari *vi)
Compute the gradient for all variables starting from the specified root variable implementation.
void grad_hessian(const F &f, const Eigen::Matrix< double, Eigen::Dynamic, 1 > &x, double &fx, Eigen::Matrix< double, Eigen::Dynamic, Eigen::Dynamic > &H, std::vector< Eigen::Matrix< double, Eigen::Dynamic, Eigen::Dynamic > > &grad_H)
Calculate the value, the Hessian, and the gradient of the Hessian of the specified function at the sp...
double e()
Return the base of the natural logarithm.
static void recover_memory_nested()
Recover only the memory used for the top nested call.
static void start_nested()
Record the current position so that recover_memory_nested() can find it.