ROL
ROL_LineSearchStep.hpp
Go to the documentation of this file.
1 // @HEADER
2 // *****************************************************************************
3 // Rapid Optimization Library (ROL) Package
4 //
5 // Copyright 2014 NTESS and the ROL contributors.
6 // SPDX-License-Identifier: BSD-3-Clause
7 // *****************************************************************************
8 // @HEADER
9 
10 #ifndef ROL_LINESEARCHSTEP_H
11 #define ROL_LINESEARCHSTEP_H
12 
13 #include "ROL_Types.hpp"
14 #include "ROL_Step.hpp"
15 #include "ROL_LineSearch.hpp"
16 
17 // Unconstrained Methods
18 #include "ROL_GradientStep.hpp"
19 #include "ROL_NonlinearCGStep.hpp"
20 #include "ROL_SecantStep.hpp"
21 #include "ROL_NewtonStep.hpp"
22 #include "ROL_NewtonKrylovStep.hpp"
23 
24 // Bound Constrained Methods
28 
29 #include <sstream>
30 #include <iomanip>
31 
101 namespace ROL {
102 
103 template <class Real>
104 class LineSearchStep : public Step<Real> {
105 private:
106 
107  ROL::Ptr<Step<Real> > desc_;
108  ROL::Ptr<Secant<Real> > secant_;
109  ROL::Ptr<Krylov<Real> > krylov_;
110  ROL::Ptr<NonlinearCG<Real> > nlcg_;
111  ROL::Ptr<LineSearch<Real> > lineSearch_;
112 
113  ROL::Ptr<Vector<Real> > d_;
114 
117 
119 
121 
124  Real fval_;
125 
126  ROL::ParameterList parlist_;
127 
128  std::string lineSearchName_;
129 
130  Real GradDotStep(const Vector<Real> &g, const Vector<Real> &s,
131  const Vector<Real> &x,
132  BoundConstraint<Real> &bnd, Real eps = 0) {
133  Real gs(0), one(1);
134  if (!bnd.isActivated()) {
135  gs = s.dot(g.dual());
136  }
137  else {
138  d_->set(s);
139  bnd.pruneActive(*d_,g,x,eps);
140  gs = d_->dot(g.dual());
141  d_->set(x);
142  d_->axpy(-one,g.dual());
143  bnd.project(*d_);
144  d_->scale(-one);
145  d_->plus(x);
146  bnd.pruneInactive(*d_,g,x,eps);
147  gs -= d_->dot(g.dual());
148  }
149  return gs;
150  }
151 
152 public:
153 
155  using Step<Real>::compute;
156  using Step<Real>::update;
157 
171  LineSearchStep( ROL::ParameterList &parlist,
172  const ROL::Ptr<LineSearch<Real> > &lineSearch = ROL::nullPtr,
173  const ROL::Ptr<Secant<Real> > &secant = ROL::nullPtr,
174  const ROL::Ptr<Krylov<Real> > &krylov = ROL::nullPtr,
175  const ROL::Ptr<NonlinearCG<Real> > &nlcg = ROL::nullPtr )
176  : Step<Real>(), desc_(ROL::nullPtr), secant_(secant),
177  krylov_(krylov), nlcg_(nlcg), lineSearch_(lineSearch),
179  verbosity_(0), computeObj_(true), fval_(0), parlist_(parlist) {
180  // Parse parameter list
181  ROL::ParameterList& Llist = parlist.sublist("Step").sublist("Line Search");
182  ROL::ParameterList& Glist = parlist.sublist("General");
183  econd_ = StringToECurvatureCondition(Llist.sublist("Curvature Condition").get("Type","Strong Wolfe Conditions") );
184  acceptLastAlpha_ = Llist.get("Accept Last Alpha", false);
185  verbosity_ = Glist.get("Print Verbosity",0);
186  computeObj_ = Glist.get("Recompute Objective Function",false);
187  // Initialize Line Search
188  if (lineSearch_ == ROL::nullPtr) {
189  lineSearchName_ = Llist.sublist("Line-Search Method").get("Type","Cubic Interpolation");
191  lineSearch_ = LineSearchFactory<Real>(parlist);
192  }
193  else { // User-defined linesearch provided
194  lineSearchName_ = Llist.sublist("Line-Search Method").get("User Defined Line-Search Name",
195  "Unspecified User Defined Line-Search");
196  }
197 
198  }
199 
200  void initialize( Vector<Real> &x, const Vector<Real> &s, const Vector<Real> &g,
202  AlgorithmState<Real> &algo_state ) {
203  d_ = x.clone();
204 
205  // Initialize unglobalized step
206  ROL::ParameterList& list
207  = parlist_.sublist("Step").sublist("Line Search").sublist("Descent Method");
208  EDescent edesc = StringToEDescent(list.get("Type","Quasi-Newton Method"));
209  if (bnd.isActivated()) {
210  switch(edesc) {
211  case DESCENT_STEEPEST: {
212  desc_ = ROL::makePtr<GradientStep<Real>>(parlist_,computeObj_);
213  break;
214  }
215  case DESCENT_NONLINEARCG: {
216  desc_ = ROL::makePtr<NonlinearCGStep<Real>>(parlist_,nlcg_,computeObj_);
217  break;
218  }
219  case DESCENT_SECANT: {
220  desc_ = ROL::makePtr<ProjectedSecantStep<Real>>(parlist_,secant_,computeObj_);
221  break;
222  }
223  case DESCENT_NEWTON: {
224  desc_ = ROL::makePtr<ProjectedNewtonStep<Real>>(parlist_,computeObj_);
225  break;
226  }
227  case DESCENT_NEWTONKRYLOV: {
228  desc_ = ROL::makePtr<ProjectedNewtonKrylovStep<Real>>(parlist_,krylov_,secant_,computeObj_);
229  break;
230  }
231  default:
232  ROL_TEST_FOR_EXCEPTION(true,std::invalid_argument,
233  ">>> (LineSearchStep::Initialize): Undefined descent type!");
234  }
235  }
236  else {
237  switch(edesc) {
238  case DESCENT_STEEPEST: {
239  desc_ = ROL::makePtr<GradientStep<Real>>(parlist_,computeObj_);
240  break;
241  }
242  case DESCENT_NONLINEARCG: {
243  desc_ = ROL::makePtr<NonlinearCGStep<Real>>(parlist_,nlcg_,computeObj_);
244  break;
245  }
246  case DESCENT_SECANT: {
247  desc_ = ROL::makePtr<SecantStep<Real>>(parlist_,secant_,computeObj_);
248  break;
249  }
250  case DESCENT_NEWTON: {
251  desc_ = ROL::makePtr<NewtonStep<Real>>(parlist_,computeObj_);
252  break;
253  }
254  case DESCENT_NEWTONKRYLOV: {
255  desc_ = ROL::makePtr<NewtonKrylovStep<Real>>(parlist_,krylov_,secant_,computeObj_);
256  break;
257  }
258  default:
259  ROL_TEST_FOR_EXCEPTION(true,std::invalid_argument,
260  ">>> (LineSearchStep::Initialize): Undefined descent type!");
261  }
262  }
263  desc_->initialize(x,s,g,obj,bnd,algo_state);
264 
265  // Initialize line search
266  lineSearch_->initialize(x,s,g,obj,bnd);
267  //const ROL::Ptr<const StepState<Real> > desc_state = desc_->getStepState();
268  //lineSearch_->initialize(x,s,*(desc_state->gradientVec),obj,bnd);
269  }
270 
284  void compute( Vector<Real> &s, const Vector<Real> &x,
286  AlgorithmState<Real> &algo_state ) {
287  Real zero(0), one(1);
288  // Compute unglobalized step
289  desc_->compute(s,x,obj,bnd,algo_state);
290 
291  // Ensure that s is a descent direction
292  // ---> If not, then default to steepest descent
293  const ROL::Ptr<const StepState<Real> > desc_state = desc_->getStepState();
294  Real gs = GradDotStep(*(desc_state->gradientVec),s,x,bnd,algo_state.gnorm);
295  if (gs >= zero) {
296  s.set((desc_state->gradientVec)->dual());
297  s.scale(-one);
298  gs = GradDotStep(*(desc_state->gradientVec),s,x,bnd,algo_state.gnorm);
299  }
300 
301  // Perform line search
302  ROL::Ptr<StepState<Real> > step_state = Step<Real>::getState();
303  fval_ = algo_state.value;
304  step_state->nfval = 0; step_state->ngrad = 0;
305  lineSearch_->setData(algo_state.gnorm,*(desc_state->gradientVec));
306  lineSearch_->run(step_state->searchSize,fval_,step_state->nfval,step_state->ngrad,gs,s,x,obj,bnd);
307 
308  // Make correction if maximum function evaluations reached
309  if(!acceptLastAlpha_) {
310  lineSearch_->setMaxitUpdate(step_state->searchSize,fval_,algo_state.value);
311  }
312 
313  // Compute scaled descent direction
314  s.scale(step_state->searchSize);
315  if ( bnd.isActivated() ) {
316  s.plus(x);
317  bnd.project(s);
318  s.axpy(static_cast<Real>(-1),x);
319  }
320  }
321 
333  void update( Vector<Real> &x, const Vector<Real> &s,
335  AlgorithmState<Real> &algo_state ) {
336  ROL::Ptr<StepState<Real> > step_state = Step<Real>::getState();
337  algo_state.nfval += step_state->nfval;
338  algo_state.ngrad += step_state->ngrad;
339  desc_->update(x,s,obj,bnd,algo_state);
340  step_state->flag = desc_->getStepState()->flag;
341  step_state->SPiter = desc_->getStepState()->SPiter;
342  step_state->SPflag = desc_->getStepState()->SPflag;
343  if ( !computeObj_ ) {
344  algo_state.value = fval_;
345  }
346  }
347 
352  std::string printHeader( void ) const {
353  std::string head = desc_->printHeader();
354  head.erase(std::remove(head.end()-3,head.end(),'\n'), head.end());
355  std::stringstream hist;
356  hist.write(head.c_str(),head.length());
357  hist << std::setw(10) << std::left << "ls_#fval";
358  hist << std::setw(10) << std::left << "ls_#grad";
359  hist << "\n";
360  return hist.str();
361  }
362 
367  std::string printName( void ) const {
368  std::string name = desc_->printName();
369  std::stringstream hist;
370  hist << name;
371  hist << "Line Search: " << lineSearchName_;
372  hist << " satisfying " << ECurvatureConditionToString(econd_) << "\n";
373  return hist.str();
374  }
375 
383  std::string print( AlgorithmState<Real> & algo_state, bool print_header = false ) const {
384  const ROL::Ptr<const StepState<Real> > step_state = Step<Real>::getStepState();
385  std::string desc = desc_->print(algo_state,false);
386  desc.erase(std::remove(desc.end()-3,desc.end(),'\n'), desc.end());
387  std::string name = desc_->printName();
388  size_t pos = desc.find(name);
389  if ( pos != std::string::npos ) {
390  desc.erase(pos, name.length());
391  }
392 
393  std::stringstream hist;
394  if ( algo_state.iter == 0 ) {
395  hist << printName();
396  }
397  if ( print_header ) {
398  hist << printHeader();
399  }
400  hist << desc;
401  if ( algo_state.iter == 0 ) {
402  hist << "\n";
403  }
404  else {
405  hist << std::setw(10) << std::left << step_state->nfval;
406  hist << std::setw(10) << std::left << step_state->ngrad;
407  hist << "\n";
408  }
409  return hist.str();
410  }
411 }; // class LineSearchStep
412 
413 } // namespace ROL
414 
415 #endif
Provides the interface to evaluate objective functions.
virtual const Vector & dual() const
Return dual representation of , for example, the result of applying a Riesz map, or change of basis...
Definition: ROL_Vector.hpp:192
virtual void scale(const Real alpha)=0
Compute where .
virtual ROL::Ptr< Vector > clone() const =0
Clone to make a new (uninitialized) vector.
virtual void plus(const Vector &x)=0
Compute , where .
bool acceptLastAlpha_
For backwards compatibility. When max function evaluations are reached take last step.
virtual void axpy(const Real alpha, const Vector &x)
Compute where .
Definition: ROL_Vector.hpp:119
ELineSearch StringToELineSearch(std::string s)
Definition: ROL_Types.hpp:690
Real GradDotStep(const Vector< Real > &g, const Vector< Real > &s, const Vector< Real > &x, BoundConstraint< Real > &bnd, Real eps=0)
bool isActivated(void) const
Check if bounds are on.
std::string printName(void) const
Print step name.
Provides the interface to compute optimization steps.
Definition: ROL_Step.hpp:34
Contains definitions of custom data types in ROL.
LineSearchStep(ROL::ParameterList &parlist, const ROL::Ptr< LineSearch< Real > > &lineSearch=ROL::nullPtr, const ROL::Ptr< Secant< Real > > &secant=ROL::nullPtr, const ROL::Ptr< Krylov< Real > > &krylov=ROL::nullPtr, const ROL::Ptr< NonlinearCG< Real > > &nlcg=ROL::nullPtr)
Constructor.
ROL::ParameterList parlist_
void update(Vector< Real > &x, const Vector< Real > &s, Objective< Real > &obj, BoundConstraint< Real > &bnd, AlgorithmState< Real > &algo_state)
Update step, if successful.
ELineSearch
Enumeration of line-search types.
Definition: ROL_Types.hpp:624
bool usePreviousAlpha_
If true, use the previously accepted step length (if any) as the new initial step length...
std::string printHeader(void) const
Print iterate header.
Defines the linear algebra or vector space interface.
Definition: ROL_Vector.hpp:46
virtual Real dot(const Vector &x) const =0
Compute where .
Objective_SerialSimOpt(const Ptr< Obj > &obj, const V &ui) z0_ zero()
void initialize(Vector< Real > &x, const Vector< Real > &s, const Vector< Real > &g, Objective< Real > &obj, BoundConstraint< Real > &bnd, AlgorithmState< Real > &algo_state)
Initialize step with bound constraint.
EDescent StringToEDescent(std::string s)
Definition: ROL_Types.hpp:434
State for algorithm class. Will be used for restarts.
Definition: ROL_Types.hpp:109
Provides the interface to compute optimization steps with line search.
std::string ECurvatureConditionToString(ECurvatureCondition ls)
Definition: ROL_Types.hpp:717
ROL::Ptr< NonlinearCG< Real > > nlcg_
Nonlinear CG object (used for nonlinear CG)
Provides interface for and implements line searches.
ROL::Ptr< StepState< Real > > getState(void)
Definition: ROL_Step.hpp:39
Provides interface for and implements limited-memory secant operators.
Definition: ROL_Secant.hpp:45
ROL::Ptr< Step< Real > > desc_
Unglobalized step object.
virtual void project(Vector< Real > &x)
Project optimization variables onto the bounds.
ELineSearch els_
enum determines type of line search
void pruneInactive(Vector< Real > &v, const Vector< Real > &x, Real eps=Real(0))
Set variables to zero if they correspond to the -inactive set.
void pruneActive(Vector< Real > &v, const Vector< Real > &x, Real eps=Real(0))
Set variables to zero if they correspond to the -active set.
std::string print(AlgorithmState< Real > &algo_state, bool print_header=false) const
Print iterate status.
Provides definitions for Krylov solvers.
Definition: ROL_Krylov.hpp:24
Provides the interface to apply upper and lower bound constraints.
ROL::Ptr< LineSearch< Real > > lineSearch_
Line-search object.
void compute(Vector< Real > &s, const Vector< Real > &x, Objective< Real > &obj, BoundConstraint< Real > &bnd, AlgorithmState< Real > &algo_state)
Compute step.
ROL::Ptr< Vector< Real > > d_
ECurvatureCondition
Enumeration of line-search curvature conditions.
Definition: ROL_Types.hpp:707
virtual void set(const Vector &x)
Set where .
Definition: ROL_Vector.hpp:175
ECurvatureCondition StringToECurvatureCondition(std::string s)
Definition: ROL_Types.hpp:767
ROL::Ptr< Krylov< Real > > krylov_
Krylov solver object (used for inexact Newton)
EDescent
Enumeration of descent direction types.
Definition: ROL_Types.hpp:377
ECurvatureCondition econd_
enum determines type of curvature condition
ROL::Ptr< Secant< Real > > secant_
Secant object (used for quasi-Newton)
const ROL::Ptr< const StepState< Real > > getStepState(void) const
Get state for step object.
Definition: ROL_Step.hpp:177