10 boost::property_tree::ptree
const& pt) :
Optimizer(cost, pt),
11 maxRadius(pt.
get(
"MaxRadius", std::numeric_limits<double>::infinity())),
12 initialRadius(pt.
get(
"InitialRadius", std::min(1.0,maxRadius))),
13 acceptRatio(pt.
get(
"AcceptRatio", 0.05)),
14 shrinkRatio(pt.
get(
"ShrinkRatio", 0.25)),
15 growRatio(pt.
get(
"GrowRatio", 0.75)),
16 shrinkRate(pt.
get(
"ShrinkRate",0.25)),
17 growRate(pt.
get(
"GrowRate", 2.0)),
18 trustTol(pt.
get(
"TrustTol", std::min(1
e-4,xtol_abs))),
19 printLevel(pt.
get(
"PrintLevel", 0)){
24 std::pair<Eigen::VectorXd, double> NewtonTrust::Solve(std::vector<Eigen::VectorXd>
const& inputs) {
29 Eigen::VectorXd
const& x0 = inputs.at(0);
30 Eigen::VectorXd x = x0;
34 std::cout <<
"Using NewtonTrust optimizer..." << std::endl;
35 std::cout <<
" Iteration, TrustRadius, fval, ||g||" << std::endl;
44 Eigen::VectorXd
const& grad =
opt->Gradient();
48 std::sprintf(
buf,
" %9d, %11.3f, %4.3e, %5.3e\n", it,
trustRadius, fval, grad.norm());
49 std::cout << std::string(
buf);
53 return std::make_pair(x,fval);
57 double modDelta = grad.dot(step)+0.5*step.dot(
opt->ApplyHessian(step));
58 Eigen::VectorXd newX = x+step;
60 double newf =
opt->Cost(newX);
61 double rho = (newf-fval)/modDelta;
67 return std::make_pair(newX,newf);
70 return std::make_pair(newX,newf);
85 return std::make_pair(x,fval);
90 Eigen::VectorXd
const& x0,
91 Eigen::VectorXd
const& grad) {
92 const unsigned int dim = x0.size();
95 Eigen::VectorXd z = Eigen::VectorXd::Zero(dim);
98 Eigen::VectorXd r = grad;
99 Eigen::VectorXd d = -r;
107 double alpha, beta, gradd, dBd, rr;
109 for(
int i=0; i<dim; ++i){
110 Bd =
opt->ApplyHessian(d);
113 rr = r.squaredNorm();
120 double dz = d.dot(z);
121 double dd = d.squaredNorm();
122 double zz = z.squaredNorm();
125 double tau1 = (-dz + sqrt(dz*dz - dd*(zz-r2)))/dd;
126 double tau2 = (-dz - sqrt(dz*dz - dd*(zz-r2)))/dd;
128 double zBd = z.dot(Bd);
129 double mval1 = tau1*gradd + tau1*zBd + tau1*tau1*dBd;
130 double mval2 = tau2*gradd + tau2*zBd + tau2*tau2*dBd;
132 return (mval1<mval2) ? (z+tau1*d) : (z+tau2*d);
136 Eigen::VectorXd newZ = z + alpha * d;
140 double dz = d.dot(z);
141 double dd = d.squaredNorm();
142 double zz = z.squaredNorm();
145 double tau = (-dz + sqrt(dz*dz - dd*(zz-r2)))/dd;
156 beta = r.squaredNorm() / rr;
157 d = (-r + beta*d).eval();
REGISTER_OPTIMIZER(NewtonTrust, NewtonTrust) NewtonTrust
Newton optimizer with trust region to ensure global convergence.
const double initialRadius
Eigen::VectorXd SolveSub(double fval, Eigen::VectorXd const &x0, Eigen::VectorXd const &grad)
NewtonTrust(std::shared_ptr< muq::Modeling::ModPiece > const &cost, boost::property_tree::ptree const &pt)
const unsigned int printLevel
Solve an optimization problem.
std::shared_ptr< CostFunction > opt
The cost function that we are trying to minimize.
const unsigned int maxEvals
Maximum number of cost function evaluations.
auto get(const nlohmann::detail::iteration_proxy_value< IteratorType > &i) -> decltype(i.key())