1 #ifndef STAN_MATH_MIX_MAT_FUNCTOR_GRAD_HESSIAN_HPP 2 #define STAN_MATH_MIX_MAT_FUNCTOR_GRAD_HESSIAN_HPP 43 const F& f,
const Eigen::Matrix<double, Eigen::Dynamic, 1>& x,
double& fx,
44 Eigen::Matrix<double, Eigen::Dynamic, Eigen::Dynamic>& H,
45 std::vector<Eigen::Matrix<double, Eigen::Dynamic, Eigen::Dynamic> >&
52 grad_H.resize(d, Matrix<double, Dynamic, Dynamic>(d, d));
54 for (
int i = 0; i < d; ++i) {
55 for (
int j = i; j < d; ++j) {
57 Matrix<fvar<fvar<var> >, Dynamic, 1> x_ffvar(d);
58 for (
int k = 0; k < d; ++k)
62 H(i, j) = fx_ffvar.
d_.d_.val();
65 for (
int k = 0; k < d; ++k) {
66 grad_H[i](j, k) = x_ffvar(k).val_.val_.adj();
67 grad_H[j](i, k) = grad_H[i](j, k);
72 }
catch (
const std::exception&
e) {
T d_
The tangent (derivative) of this variable.
static void grad(vari *vi)
Compute the gradient for all variables starting from the specified root variable implementation.
void grad_hessian(const F &f, const Eigen::Matrix< double, Eigen::Dynamic, 1 > &x, double &fx, Eigen::Matrix< double, Eigen::Dynamic, Eigen::Dynamic > &H, std::vector< Eigen::Matrix< double, Eigen::Dynamic, Eigen::Dynamic > > &grad_H)
Calculate the value, the Hessian, and the gradient of the Hessian of the specified function at the sp...
double e()
Return the base of the natural logarithm.
static void recover_memory_nested()
Recover only the memory used for the top nested call.
static void start_nested()
Record the current position so that recover_memory_nested() can find it.
This template class represents scalars used in forward-mode automatic differentiation, which consist of values and directional derivatives of the specified template type.