ROL
ROL_ProjectedSecantStep.hpp
Go to the documentation of this file.
1 // @HEADER
2 // *****************************************************************************
3 // Rapid Optimization Library (ROL) Package
4 //
5 // Copyright 2014 NTESS and the ROL contributors.
6 // SPDX-License-Identifier: BSD-3-Clause
7 // *****************************************************************************
8 // @HEADER
9 
10 #ifndef ROL_PROJECTEDSECANTSTEP_H
11 #define ROL_PROJECTEDSECANTSTEP_H
12 
13 #include "ROL_Types.hpp"
14 #include "ROL_Step.hpp"
15 #include "ROL_Secant.hpp"
16 
23 namespace ROL {
24 
25 template <class Real>
26 class ProjectedSecantStep : public Step<Real> {
27 private:
28 
29  ROL::Ptr<Secant<Real> > secant_;
31  ROL::Ptr<Vector<Real> > d_;
32  ROL::Ptr<Vector<Real> > gp_;
33  int verbosity_;
34  const bool computeObj_;
36 
37 public:
38 
40  using Step<Real>::compute;
41  using Step<Real>::update;
42 
51  ProjectedSecantStep( ROL::ParameterList &parlist,
52  const ROL::Ptr<Secant<Real> > &secant = ROL::nullPtr,
53  const bool computeObj = true )
54  : Step<Real>(), secant_(secant), d_(ROL::nullPtr), gp_(ROL::nullPtr),
55  verbosity_(0), computeObj_(computeObj), useProjectedGrad_(false) {
56  // Parse ParameterList
57  ROL::ParameterList& Glist = parlist.sublist("General");
58  useProjectedGrad_ = Glist.get("Projected Gradient Criticality Measure", false);
59  verbosity_ = parlist.sublist("General").get("Print Verbosity",0);
60  // Initialize secant object
61  if ( secant == ROL::nullPtr ) {
62  esec_ = StringToESecant(parlist.sublist("General").sublist("Secant").get("Type","Limited-Memory BFGS"));
63  secant_ = SecantFactory<Real>(parlist);
64  }
65  }
66 
67  void initialize( Vector<Real> &x, const Vector<Real> &s, const Vector<Real> &g,
69  AlgorithmState<Real> &algo_state ) {
70  Step<Real>::initialize(x,s,g,obj,bnd,algo_state);
71  d_ = s.clone();
72  gp_ = g.clone();
73  }
74 
75  void compute( Vector<Real> &s, const Vector<Real> &x,
77  AlgorithmState<Real> &algo_state ) {
78  ROL::Ptr<StepState<Real> > step_state = Step<Real>::getState();
79  Real one(1);
80 
81  // Compute projected secant step
82  // ---> Apply inactive-inactive block of inverse secant to gradient
83  gp_->set(*(step_state->gradientVec));
84  bnd.pruneActive(*gp_,*(step_state->gradientVec),x,algo_state.gnorm);
85  secant_->applyH(s,*gp_);
86  bnd.pruneActive(s,*(step_state->gradientVec),x,algo_state.gnorm);
87  // ---> Add in active gradient components
88  gp_->set(*(step_state->gradientVec));
89  bnd.pruneInactive(*gp_,*(step_state->gradientVec),x,algo_state.gnorm);
90  s.plus(gp_->dual());
91  s.scale(-one);
92  }
93 
94  void update( Vector<Real> &x, const Vector<Real> &s,
96  AlgorithmState<Real> &algo_state ) {
97  Real tol = std::sqrt(ROL_EPSILON<Real>()), one(1);
98  ROL::Ptr<StepState<Real> > step_state = Step<Real>::getState();
99 
100  // Update iterate and store previous step
101  algo_state.iter++;
102  d_->set(x);
103  x.plus(s);
104  bnd.project(x);
105  (step_state->descentVec)->set(x);
106  (step_state->descentVec)->axpy(-one,*d_);
107  algo_state.snorm = s.norm();
108 
109  // Compute new gradient
110  gp_->set(*(step_state->gradientVec));
111  obj.update(x,true,algo_state.iter);
112  if ( computeObj_ ) {
113  algo_state.value = obj.value(x,tol);
114  algo_state.nfval++;
115  }
116  obj.gradient(*(step_state->gradientVec),x,tol);
117  algo_state.ngrad++;
118 
119  // Update Secant Information
120  secant_->updateStorage(x,*(step_state->gradientVec),*gp_,s,algo_state.snorm,algo_state.iter+1);
121 
122  // Update algorithm state
123  (algo_state.iterateVec)->set(x);
124  if ( useProjectedGrad_ ) {
125  gp_->set(*(step_state->gradientVec));
126  bnd.computeProjectedGradient( *gp_, x );
127  algo_state.gnorm = gp_->norm();
128  }
129  else {
130  d_->set(x);
131  d_->axpy(-one,(step_state->gradientVec)->dual());
132  bnd.project(*d_);
133  d_->axpy(-one,x);
134  algo_state.gnorm = d_->norm();
135  }
136  }
137 
138  std::string printHeader( void ) const {
139  std::stringstream hist;
140 
141  if( verbosity_>0 ) {
142  hist << std::string(109,'-') << "\n";
144  hist << " status output definitions\n\n";
145  hist << " iter - Number of iterates (steps taken) \n";
146  hist << " value - Objective function value \n";
147  hist << " gnorm - Norm of the gradient\n";
148  hist << " snorm - Norm of the step (update to optimization vector)\n";
149  hist << " #fval - Cumulative number of times the objective function was evaluated\n";
150  hist << " #grad - Number of times the gradient was computed\n";
151  hist << std::string(109,'-') << "\n";
152  }
153 
154  hist << " ";
155  hist << std::setw(6) << std::left << "iter";
156  hist << std::setw(15) << std::left << "value";
157  hist << std::setw(15) << std::left << "gnorm";
158  hist << std::setw(15) << std::left << "snorm";
159  hist << std::setw(10) << std::left << "#fval";
160  hist << std::setw(10) << std::left << "#grad";
161  hist << "\n";
162  return hist.str();
163  }
164  std::string printName( void ) const {
165  std::stringstream hist;
166  hist << "\n" << EDescentToString(DESCENT_SECANT);
167  hist << " with " << ESecantToString(esec_) << "\n";
168  return hist.str();
169  }
170  std::string print( AlgorithmState<Real> &algo_state, bool print_header = false ) const {
171  std::stringstream hist;
172  hist << std::scientific << std::setprecision(6);
173  if ( algo_state.iter == 0 ) {
174  hist << printName();
175  }
176  if ( print_header ) {
177  hist << printHeader();
178  }
179  if ( algo_state.iter == 0 ) {
180  hist << " ";
181  hist << std::setw(6) << std::left << algo_state.iter;
182  hist << std::setw(15) << std::left << algo_state.value;
183  hist << std::setw(15) << std::left << algo_state.gnorm;
184  hist << "\n";
185  }
186  else {
187  hist << " ";
188  hist << std::setw(6) << std::left << algo_state.iter;
189  hist << std::setw(15) << std::left << algo_state.value;
190  hist << std::setw(15) << std::left << algo_state.gnorm;
191  hist << std::setw(15) << std::left << algo_state.snorm;
192  hist << std::setw(10) << std::left << algo_state.nfval;
193  hist << std::setw(10) << std::left << algo_state.ngrad;
194  hist << "\n";
195  }
196  return hist.str();
197  }
198 }; // class ProjectedSecantStep
199 
200 } // namespace ROL
201 
202 #endif
Provides the interface to evaluate objective functions.
virtual void scale(const Real alpha)=0
Compute where .
virtual ROL::Ptr< Vector > clone() const =0
Clone to make a new (uninitialized) vector.
ROL::Ptr< Vector< Real > > d_
Additional vector storage.
virtual void plus(const Vector &x)=0
Compute , where .
virtual Real value(const Vector< Real > &x, Real &tol)=0
Compute value.
Provides the interface to compute optimization steps.
Definition: ROL_Step.hpp:34
Contains definitions of custom data types in ROL.
ROL::Ptr< Vector< Real > > gp_
Additional vector storage.
ESecant StringToESecant(std::string s)
Definition: ROL_Types.hpp:509
std::string EDescentToString(EDescent tr)
Definition: ROL_Types.hpp:386
void update(Vector< Real > &x, const Vector< Real > &s, Objective< Real > &obj, BoundConstraint< Real > &bnd, AlgorithmState< Real > &algo_state)
Update step, if successful.
ProjectedSecantStep(ROL::ParameterList &parlist, const ROL::Ptr< Secant< Real > > &secant=ROL::nullPtr, const bool computeObj=true)
Constructor.
Defines the linear algebra or vector space interface.
Definition: ROL_Vector.hpp:46
Provides the interface to compute optimization steps with projected secant method using line search...
virtual void update(const Vector< Real > &x, UpdateType type, int iter=-1)
Update objective function.
void compute(Vector< Real > &s, const Vector< Real > &x, Objective< Real > &obj, BoundConstraint< Real > &bnd, AlgorithmState< Real > &algo_state)
Compute step.
State for algorithm class. Will be used for restarts.
Definition: ROL_Types.hpp:109
virtual void gradient(Vector< Real > &g, const Vector< Real > &x, Real &tol)
Compute gradient.
ROL::Ptr< Secant< Real > > secant_
Secant object (used for quasi-Newton)
ESecant
Enumeration of secant update algorithms.
Definition: ROL_Types.hpp:452
ROL::Ptr< StepState< Real > > getState(void)
Definition: ROL_Step.hpp:39
void initialize(Vector< Real > &x, const Vector< Real > &s, const Vector< Real > &g, Objective< Real > &obj, BoundConstraint< Real > &bnd, AlgorithmState< Real > &algo_state)
Initialize step with bound constraint.
bool useProjectedGrad_
Whether or not to use to the projected gradient criticality measure.
Provides interface for and implements limited-memory secant operators.
Definition: ROL_Secant.hpp:45
ROL::Ptr< Vector< Real > > iterateVec
Definition: ROL_Types.hpp:123
std::string printName(void) const
Print step name.
virtual void project(Vector< Real > &x)
Project optimization variables onto the bounds.
void pruneInactive(Vector< Real > &v, const Vector< Real > &x, Real eps=Real(0))
Set variables to zero if they correspond to the -inactive set.
void pruneActive(Vector< Real > &v, const Vector< Real > &x, Real eps=Real(0))
Set variables to zero if they correspond to the -active set.
Provides the interface to apply upper and lower bound constraints.
void computeProjectedGradient(Vector< Real > &g, const Vector< Real > &x)
Compute projected gradient.
std::string printHeader(void) const
Print iterate header.
virtual void initialize(Vector< Real > &x, const Vector< Real > &g, Objective< Real > &obj, BoundConstraint< Real > &con, AlgorithmState< Real > &algo_state)
Initialize step with bound constraint.
Definition: ROL_Step.hpp:54
virtual Real norm() const =0
Returns where .
std::string ESecantToString(ESecant tr)
Definition: ROL_Types.hpp:461
std::string print(AlgorithmState< Real > &algo_state, bool print_header=false) const
Print iterate status.