ROL
ROL_ProjectedNewtonStep.hpp
Go to the documentation of this file.
1 // @HEADER
2 // *****************************************************************************
3 // Rapid Optimization Library (ROL) Package
4 //
5 // Copyright 2014 NTESS and the ROL contributors.
6 // SPDX-License-Identifier: BSD-3-Clause
7 // *****************************************************************************
8 // @HEADER
9 
10 #ifndef ROL_PROJECTEDNEWTONSTEP_H
11 #define ROL_PROJECTEDNEWTONSTEP_H
12 
13 #include "ROL_Types.hpp"
14 #include "ROL_Step.hpp"
15 
22 namespace ROL {
23 
24 template <class Real>
25 class ProjectedNewtonStep : public Step<Real> {
26 private:
27 
28  ROL::Ptr<Vector<Real> > gp_;
29  ROL::Ptr<Vector<Real> > d_;
30  int verbosity_;
31  const bool computeObj_;
33 
34 public:
35 
37  using Step<Real>::compute;
38  using Step<Real>::update;
39 
47  ProjectedNewtonStep( ROL::ParameterList &parlist, const bool computeObj = true )
48  : Step<Real>(), gp_(ROL::nullPtr), d_(ROL::nullPtr),
49  verbosity_(0), computeObj_(computeObj), useProjectedGrad_(false) {
50  // Parse ParameterList
51  ROL::ParameterList& Glist = parlist.sublist("General");
52  useProjectedGrad_ = Glist.get("Projected Gradient Criticality Measure", false);
53  verbosity_ = parlist.sublist("General").get("Print Verbosity",0);
54  }
55 
56  void initialize( Vector<Real> &x, const Vector<Real> &s, const Vector<Real> &g,
58  AlgorithmState<Real> &algo_state ) {
59  Step<Real>::initialize(x,s,g,obj,bnd,algo_state);
60  gp_ = g.clone();
61  d_ = s.clone();
62  }
63 
64  void compute( Vector<Real> &s, const Vector<Real> &x,
66  AlgorithmState<Real> &algo_state ) {
67  Real tol = std::sqrt(ROL_EPSILON<Real>()), one(1);
68  ROL::Ptr<StepState<Real> > step_state = Step<Real>::getState();
69 
70  // Compute projected Newton step
71  // ---> Apply inactive-inactive block of inverse hessian to gradient
72  gp_->set(*(step_state->gradientVec));
73  bnd.pruneActive(*gp_,*(step_state->gradientVec),x,algo_state.gnorm);
74  obj.invHessVec(s,*gp_,x,tol);
75  bnd.pruneActive(s,*(step_state->gradientVec),x,algo_state.gnorm);
76  // ---> Add in active gradient components
77  gp_->set(*(step_state->gradientVec));
78  bnd.pruneInactive(*gp_,*(step_state->gradientVec),x,algo_state.gnorm);
79  s.plus(gp_->dual());
80  s.scale(-one);
81  }
82 
83  void update( Vector<Real> &x, const Vector<Real> &s,
85  AlgorithmState<Real> &algo_state ) {
86  Real tol = std::sqrt(ROL_EPSILON<Real>()), one(1);
87  ROL::Ptr<StepState<Real> > step_state = Step<Real>::getState();
88 
89  // Update iterate and store previous step
90  algo_state.iter++;
91  d_->set(x);
92  x.plus(s);
93  bnd.project(x);
94  (step_state->descentVec)->set(x);
95  (step_state->descentVec)->axpy(-one,*d_);
96  algo_state.snorm = s.norm();
97 
98  // Compute new gradient
99  obj.update(x,true,algo_state.iter);
100  if ( computeObj_ ) {
101  algo_state.value = obj.value(x,tol);
102  algo_state.nfval++;
103  }
104  obj.gradient(*(step_state->gradientVec),x,tol);
105  algo_state.ngrad++;
106 
107  // Update algorithm state
108  (algo_state.iterateVec)->set(x);
109  if ( useProjectedGrad_ ) {
110  gp_->set(*(step_state->gradientVec));
111  bnd.computeProjectedGradient( *gp_, x );
112  algo_state.gnorm = gp_->norm();
113  }
114  else {
115  d_->set(x);
116  d_->axpy(-one,(step_state->gradientVec)->dual());
117  bnd.project(*d_);
118  d_->axpy(-one,x);
119  algo_state.gnorm = d_->norm();
120  }
121  }
122 
123  std::string printHeader( void ) const {
124  std::stringstream hist;
125 
126  if( verbosity_>0 ) {
127  hist << std::string(109,'-') << "\n";
129  hist << " status output definitions\n\n";
130  hist << " iter - Number of iterates (steps taken) \n";
131  hist << " value - Objective function value \n";
132  hist << " gnorm - Norm of the gradient\n";
133  hist << " snorm - Norm of the step (update to optimization vector)\n";
134  hist << " #fval - Cumulative number of times the objective function was evaluated\n";
135  hist << " #grad - Number of times the gradient was computed\n";
136  hist << std::string(109,'-') << "\n";
137  }
138 
139  hist << " ";
140  hist << std::setw(6) << std::left << "iter";
141  hist << std::setw(15) << std::left << "value";
142  hist << std::setw(15) << std::left << "gnorm";
143  hist << std::setw(15) << std::left << "snorm";
144  hist << std::setw(10) << std::left << "#fval";
145  hist << std::setw(10) << std::left << "#grad";
146  hist << "\n";
147  return hist.str();
148  }
149  std::string printName( void ) const {
150  std::stringstream hist;
151  hist << "\n" << EDescentToString(DESCENT_NEWTON) << "\n";
152  return hist.str();
153  }
154  std::string print( AlgorithmState<Real> &algo_state, bool print_header = false ) const {
155  std::stringstream hist;
156  hist << std::scientific << std::setprecision(6);
157  if ( algo_state.iter == 0 ) {
158  hist << printName();
159  }
160  if ( print_header ) {
161  hist << printHeader();
162  }
163  if ( algo_state.iter == 0 ) {
164  hist << " ";
165  hist << std::setw(6) << std::left << algo_state.iter;
166  hist << std::setw(15) << std::left << algo_state.value;
167  hist << std::setw(15) << std::left << algo_state.gnorm;
168  hist << "\n";
169  }
170  else {
171  hist << " ";
172  hist << std::setw(6) << std::left << algo_state.iter;
173  hist << std::setw(15) << std::left << algo_state.value;
174  hist << std::setw(15) << std::left << algo_state.gnorm;
175  hist << std::setw(15) << std::left << algo_state.snorm;
176  hist << std::setw(10) << std::left << algo_state.nfval;
177  hist << std::setw(10) << std::left << algo_state.ngrad;
178  hist << "\n";
179  }
180  return hist.str();
181  }
182 }; // class ProjectedNewtonStep
183 
184 } // namespace ROL
185 
186 #endif
Provides the interface to evaluate objective functions.
virtual void scale(const Real alpha)=0
Compute where .
virtual ROL::Ptr< Vector > clone() const =0
Clone to make a new (uninitialized) vector.
virtual void plus(const Vector &x)=0
Compute , where .
ROL::Ptr< Vector< Real > > gp_
Additional vector storage.
Provides the interface to compute optimization steps with projected Newton&#39;s method using line search...
virtual Real value(const Vector< Real > &x, Real &tol)=0
Compute value.
Provides the interface to compute optimization steps.
Definition: ROL_Step.hpp:34
Contains definitions of custom data types in ROL.
std::string printHeader(void) const
Print iterate header.
std::string printName(void) const
Print step name.
std::string EDescentToString(EDescent tr)
Definition: ROL_Types.hpp:386
Defines the linear algebra or vector space interface.
Definition: ROL_Vector.hpp:46
virtual void update(const Vector< Real > &x, UpdateType type, int iter=-1)
Update objective function.
void compute(Vector< Real > &s, const Vector< Real > &x, Objective< Real > &obj, BoundConstraint< Real > &bnd, AlgorithmState< Real > &algo_state)
Compute step.
State for algorithm class. Will be used for restarts.
Definition: ROL_Types.hpp:109
virtual void gradient(Vector< Real > &g, const Vector< Real > &x, Real &tol)
Compute gradient.
ProjectedNewtonStep(ROL::ParameterList &parlist, const bool computeObj=true)
Constructor.
ROL::Ptr< StepState< Real > > getState(void)
Definition: ROL_Step.hpp:39
ROL::Ptr< Vector< Real > > iterateVec
Definition: ROL_Types.hpp:123
ROL::Ptr< Vector< Real > > d_
Additional vector storage.
virtual void invHessVec(Vector< Real > &hv, const Vector< Real > &v, const Vector< Real > &x, Real &tol)
Apply inverse Hessian approximation to vector.
virtual void project(Vector< Real > &x)
Project optimization variables onto the bounds.
void pruneInactive(Vector< Real > &v, const Vector< Real > &x, Real eps=Real(0))
Set variables to zero if they correspond to the -inactive set.
void pruneActive(Vector< Real > &v, const Vector< Real > &x, Real eps=Real(0))
Set variables to zero if they correspond to the -active set.
Provides the interface to apply upper and lower bound constraints.
void computeProjectedGradient(Vector< Real > &g, const Vector< Real > &x)
Compute projected gradient.
void update(Vector< Real > &x, const Vector< Real > &s, Objective< Real > &obj, BoundConstraint< Real > &bnd, AlgorithmState< Real > &algo_state)
Update step, if successful.
bool useProjectedGrad_
Whether or not to use to the projected gradient criticality measure.
virtual void initialize(Vector< Real > &x, const Vector< Real > &g, Objective< Real > &obj, BoundConstraint< Real > &con, AlgorithmState< Real > &algo_state)
Initialize step with bound constraint.
Definition: ROL_Step.hpp:54
void initialize(Vector< Real > &x, const Vector< Real > &s, const Vector< Real > &g, Objective< Real > &obj, BoundConstraint< Real > &bnd, AlgorithmState< Real > &algo_state)
Initialize step with bound constraint.
virtual Real norm() const =0
Returns where .
std::string print(AlgorithmState< Real > &algo_state, bool print_header=false) const
Print iterate status.