44 #ifndef ROL_NEWTONSTEP_H
45 #define ROL_NEWTONSTEP_H
78 NewtonStep( ROL::ParameterList &parlist,
const bool computeObj =
true )
81 verbosity_ = parlist.sublist(
"General").get(
"Print Verbosity",0);
88 Real tol = std::sqrt(ROL_EPSILON<Real>()), one(1);
91 obj.
invHessVec(s,*(step_state->gradientVec),x,tol);
97 Real tol = std::sqrt(ROL_EPSILON<Real>());
103 (step_state->descentVec)->set(s);
112 obj.
gradient(*(step_state->gradientVec),x,tol);
117 algo_state.
gnorm = (step_state->gradientVec)->norm();
121 std::stringstream hist;
124 hist << std::string(109,
'-') <<
"\n";
126 hist <<
" status output definitions\n\n";
127 hist <<
" iter - Number of iterates (steps taken) \n";
128 hist <<
" value - Objective function value \n";
129 hist <<
" gnorm - Norm of the gradient\n";
130 hist <<
" snorm - Norm of the step (update to optimization vector)\n";
131 hist <<
" #fval - Cumulative number of times the objective function was evaluated\n";
132 hist <<
" #grad - Number of times the gradient was computed\n";
133 hist << std::string(109,
'-') <<
"\n";
137 hist << std::setw(6) << std::left <<
"iter";
138 hist << std::setw(15) << std::left <<
"value";
139 hist << std::setw(15) << std::left <<
"gnorm";
140 hist << std::setw(15) << std::left <<
"snorm";
141 hist << std::setw(10) << std::left <<
"#fval";
142 hist << std::setw(10) << std::left <<
"#grad";
147 std::stringstream hist;
152 std::stringstream hist;
153 hist << std::scientific << std::setprecision(6);
154 if ( algo_state.
iter == 0 ) {
157 if ( print_header ) {
160 if ( algo_state.
iter == 0 ) {
162 hist << std::setw(6) << std::left << algo_state.
iter;
163 hist << std::setw(15) << std::left << algo_state.
value;
164 hist << std::setw(15) << std::left << algo_state.
gnorm;
169 hist << std::setw(6) << std::left << algo_state.
iter;
170 hist << std::setw(15) << std::left << algo_state.
value;
171 hist << std::setw(15) << std::left << algo_state.
gnorm;
172 hist << std::setw(15) << std::left << algo_state.
snorm;
173 hist << std::setw(10) << std::left << algo_state.
nfval;
174 hist << std::setw(10) << std::left << algo_state.
ngrad;
Provides the interface to evaluate objective functions.
NewtonStep(ROL::ParameterList &parlist, const bool computeObj=true)
Constructor.
virtual void scale(const Real alpha)=0
Compute where .
virtual void plus(const Vector &x)=0
Compute , where .
virtual Real value(const Vector< Real > &x, Real &tol)=0
Compute value.
Provides the interface to compute optimization steps.
Contains definitions of custom data types in ROL.
std::string EDescentToString(EDescent tr)
Defines the linear algebra or vector space interface.
std::string printName(void) const
Print step name.
Provides the interface to compute optimization steps with Newton's method globalized using line searc...
State for algorithm class. Will be used for restarts.
virtual void gradient(Vector< Real > &g, const Vector< Real > &x, Real &tol)
Compute gradient.
ROL::Ptr< StepState< Real > > getState(void)
void compute(Vector< Real > &s, const Vector< Real > &x, Objective< Real > &obj, BoundConstraint< Real > &bnd, AlgorithmState< Real > &algo_state)
Compute step.
ROL::Ptr< Vector< Real > > iterateVec
virtual void invHessVec(Vector< Real > &hv, const Vector< Real > &v, const Vector< Real > &x, Real &tol)
Apply inverse Hessian approximation to vector.
Provides the interface to apply upper and lower bound constraints.
void update(Vector< Real > &x, const Vector< Real > &s, Objective< Real > &obj, BoundConstraint< Real > &con, AlgorithmState< Real > &algo_state)
Update step, if successful.
virtual Real norm() const =0
Returns where .
virtual void update(const Vector< Real > &x, bool flag=true, int iter=-1)
Update objective function.
std::string print(AlgorithmState< Real > &algo_state, bool print_header=false) const
Print iterate status.
std::string printHeader(void) const
Print iterate header.