10 #ifndef ROL_NEWTONSTEP_H
11 #define ROL_NEWTONSTEP_H
44 NewtonStep( ROL::ParameterList &parlist,
const bool computeObj =
true )
47 verbosity_ = parlist.sublist(
"General").get(
"Print Verbosity",0);
54 Real tol = std::sqrt(ROL_EPSILON<Real>()), one(1);
57 obj.
invHessVec(s,*(step_state->gradientVec),x,tol);
63 Real tol = std::sqrt(ROL_EPSILON<Real>());
69 (step_state->descentVec)->set(s);
78 obj.
gradient(*(step_state->gradientVec),x,tol);
83 algo_state.
gnorm = (step_state->gradientVec)->norm();
87 std::stringstream hist;
90 hist << std::string(109,
'-') <<
"\n";
92 hist <<
" status output definitions\n\n";
93 hist <<
" iter - Number of iterates (steps taken) \n";
94 hist <<
" value - Objective function value \n";
95 hist <<
" gnorm - Norm of the gradient\n";
96 hist <<
" snorm - Norm of the step (update to optimization vector)\n";
97 hist <<
" #fval - Cumulative number of times the objective function was evaluated\n";
98 hist <<
" #grad - Number of times the gradient was computed\n";
99 hist << std::string(109,
'-') <<
"\n";
103 hist << std::setw(6) << std::left <<
"iter";
104 hist << std::setw(15) << std::left <<
"value";
105 hist << std::setw(15) << std::left <<
"gnorm";
106 hist << std::setw(15) << std::left <<
"snorm";
107 hist << std::setw(10) << std::left <<
"#fval";
108 hist << std::setw(10) << std::left <<
"#grad";
113 std::stringstream hist;
118 std::stringstream hist;
119 hist << std::scientific << std::setprecision(6);
120 if ( algo_state.
iter == 0 ) {
123 if ( print_header ) {
126 if ( algo_state.
iter == 0 ) {
128 hist << std::setw(6) << std::left << algo_state.
iter;
129 hist << std::setw(15) << std::left << algo_state.
value;
130 hist << std::setw(15) << std::left << algo_state.
gnorm;
135 hist << std::setw(6) << std::left << algo_state.
iter;
136 hist << std::setw(15) << std::left << algo_state.
value;
137 hist << std::setw(15) << std::left << algo_state.
gnorm;
138 hist << std::setw(15) << std::left << algo_state.
snorm;
139 hist << std::setw(10) << std::left << algo_state.
nfval;
140 hist << std::setw(10) << std::left << algo_state.
ngrad;
Provides the interface to evaluate objective functions.
NewtonStep(ROL::ParameterList &parlist, const bool computeObj=true)
Constructor.
virtual void scale(const Real alpha)=0
Compute where .
virtual void plus(const Vector &x)=0
Compute , where .
virtual Real value(const Vector< Real > &x, Real &tol)=0
Compute value.
Provides the interface to compute optimization steps.
Contains definitions of custom data types in ROL.
std::string EDescentToString(EDescent tr)
Defines the linear algebra or vector space interface.
virtual void update(const Vector< Real > &x, UpdateType type, int iter=-1)
Update objective function.
std::string printName(void) const
Print step name.
Provides the interface to compute optimization steps with Newton's method globalized using line searc...
State for algorithm class. Will be used for restarts.
virtual void gradient(Vector< Real > &g, const Vector< Real > &x, Real &tol)
Compute gradient.
ROL::Ptr< StepState< Real > > getState(void)
void compute(Vector< Real > &s, const Vector< Real > &x, Objective< Real > &obj, BoundConstraint< Real > &bnd, AlgorithmState< Real > &algo_state)
Compute step.
ROL::Ptr< Vector< Real > > iterateVec
virtual void invHessVec(Vector< Real > &hv, const Vector< Real > &v, const Vector< Real > &x, Real &tol)
Apply inverse Hessian approximation to vector.
Provides the interface to apply upper and lower bound constraints.
void update(Vector< Real > &x, const Vector< Real > &s, Objective< Real > &obj, BoundConstraint< Real > &con, AlgorithmState< Real > &algo_state)
Update step, if successful.
virtual Real norm() const =0
Returns where .
std::string print(AlgorithmState< Real > &algo_state, bool print_header=false) const
Print iterate status.
std::string printHeader(void) const
Print iterate header.