9#ifndef PBAT_MATH_OPTIMIZATION_NEWTON_H
10#define PBAT_MATH_OPTIMIZATION_NEWTON_H
25template <
class TScalar = Scalar>
30 Eigen::Vector<TScalar, Eigen::Dynamic>
dxk;
31 Eigen::Vector<TScalar, Eigen::Dynamic>
gk;
59 class FPrepareDerivatives,
62 class FHessianInverseProduct,
65 FPrepareDerivatives prepareDerivatives,
68 FHessianInverseProduct Hinv,
69 Eigen::MatrixBase<TDerivedX>& xk,
73template <
class TScalar>
79template <
class TScalar>
81 class FPrepareDerivatives,
84 class FHessianInverseProduct,
87 FPrepareDerivatives prepareDerivatives,
90 FHessianInverseProduct Hinv,
91 Eigen::MatrixBase<TDerivedX>& xk,
95 prepareDerivatives(xk);
99 gnorm2 =
gk.squaredNorm();
104 lineSearch->Solve(f,
gk,
dxk, xk);
107 prepareDerivatives(xk);
Header file for line search algorithms.
Namespace for optimization algorithms.
Definition BranchAndBound.h:7
std::ptrdiff_t Index
Index type.
Definition Aliases.h:17
Definition LineSearch.h:20
TScalar gtol2
Gradient squared norm threshold for convergence.
Definition Newton.h:29
Newton(int nMaxIters=10, TScalar gtol=TScalar(1e-4), Index n=0)
Construct a new Newton optimizer.
Definition Newton.h:74
Eigen::Vector< TScalar, Eigen::Dynamic > gk
Gradient at current iteration.
Definition Newton.h:31
Eigen::Vector< TScalar, Eigen::Dynamic > dxk
Step direction.
Definition Newton.h:30
int nMaxIters
Maximum number of iterations for the Newton solver.
Definition Newton.h:28
TScalar Solve(FPrepareDerivatives prepareDerivatives, FObjective f, FGradient g, FHessianInverseProduct Hinv, Eigen::MatrixBase< TDerivedX > &xk, std::optional< BackTrackingLineSearch< TScalar > > lineSearch=std::nullopt)
Solve the optimization problem using Newton's method.
Definition Newton.h:86