ROL
ROL_LineSearchStep.hpp
Go to the documentation of this file.
1 // @HEADER
2 // ************************************************************************
3 //
4 // Rapid Optimization Library (ROL) Package
5 // Copyright (2014) Sandia Corporation
6 //
7 // Under terms of Contract DE-AC04-94AL85000, there is a non-exclusive
8 // license for use of this work by or on behalf of the U.S. Government.
9 //
10 // Redistribution and use in source and binary forms, with or without
11 // modification, are permitted provided that the following conditions are
12 // met:
13 //
14 // 1. Redistributions of source code must retain the above copyright
15 // notice, this list of conditions and the following disclaimer.
16 //
17 // 2. Redistributions in binary form must reproduce the above copyright
18 // notice, this list of conditions and the following disclaimer in the
19 // documentation and/or other materials provided with the distribution.
20 //
21 // 3. Neither the name of the Corporation nor the names of the
22 // contributors may be used to endorse or promote products derived from
23 // this software without specific prior written permission.
24 //
25 // THIS SOFTWARE IS PROVIDED BY SANDIA CORPORATION "AS IS" AND ANY
26 // EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
27 // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
28 // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SANDIA CORPORATION OR THE
29 // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
30 // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
31 // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
32 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
33 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
34 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
35 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
36 //
37 // Questions? Contact lead developers:
38 // Drew Kouri (dpkouri@sandia.gov) and
39 // Denis Ridzal (dridzal@sandia.gov)
40 //
41 // ************************************************************************
42 // @HEADER
43 
44 #ifndef ROL_LINESEARCHSTEP_H
45 #define ROL_LINESEARCHSTEP_H
46 
47 #include "ROL_Types.hpp"
48 #include "ROL_HelperFunctions.hpp"
49 
50 #include "ROL_Step.hpp"
51 #include "ROL_Secant.hpp"
52 #include "ROL_Krylov.hpp"
53 #include "ROL_NonlinearCG.hpp"
54 #include "ROL_LineSearch.hpp"
55 #include "ROL_ProjectedHessian.hpp"
57 
58 #include <sstream>
59 #include <iomanip>
60 
130 namespace ROL {
131 
132 template <class Real>
133 class LineSearchStep : public Step<Real> {
134 private:
135 
136  Teuchos::RCP<Secant<Real> > secant_;
137  Teuchos::RCP<Krylov<Real> > krylov_;
138  Teuchos::RCP<NonlinearCG<Real> > nlcg_;
139  Teuchos::RCP<LineSearch<Real> > lineSearch_;
140 
141  Teuchos::RCP<ProjectedHessian<Real> > hessian_;
142  Teuchos::RCP<ProjectedPreconditioner<Real> > precond_;
143 
144  Teuchos::RCP<Vector<Real> > d_;
145  Teuchos::RCP<Vector<Real> > gp_;
146 
149 
156 
157  int ls_nfval_;
158  int ls_ngrad_;
159 
162 
164 
165  std::vector<bool> useInexact_;
166 
167  bool softUp_;
168 
169  void LineSearchFactory(Teuchos::ParameterList &parlist) {
170  switch(els_) {
172  lineSearch_ = Teuchos::rcp( new IterationScaling<Real>(parlist) ); break;
174  lineSearch_ = Teuchos::rcp( new PathBasedTargetLevel<Real>(parlist) ); break;
176  lineSearch_ = Teuchos::rcp( new BackTracking<Real>(parlist) ); break;
178  lineSearch_ = Teuchos::rcp( new Bisection<Real>(parlist) ); break;
179  case LINESEARCH_BRENTS:
180  lineSearch_ = Teuchos::rcp( new Brents<Real>(parlist) ); break;
182  lineSearch_ = Teuchos::rcp( new GoldenSection<Real>(parlist) ); break;
184  default:
185  lineSearch_ = Teuchos::rcp( new CubicInterp<Real>(parlist) ); break;
186  }
187  }
188 
189  void KrylovFactory(Teuchos::ParameterList &parlist) {
190  Real absTol = parlist.get("Absolute Krylov Tolerance", 1.e-4);
191  Real relTol = parlist.get("Relative Krylov Tolerance", 1.e-2);
192  int maxit = parlist.get("Maximum Number of Krylov Iterations", 20);
193  switch(ekv_) {
194  case KRYLOV_CR:
195  krylov_ = Teuchos::rcp( new ConjugateResiduals<Real>(absTol,relTol,maxit,useInexact_[2]) ); break;
196  case KRYLOV_CG:
197  default:
198  krylov_ = Teuchos::rcp( new ConjugateGradients<Real>(absTol,relTol,maxit,useInexact_[2]) ); break;
199  }
200  }
201 
202 public:
203 
204  virtual ~LineSearchStep() {}
205 
213  LineSearchStep( Teuchos::ParameterList &parlist ) : Step<Real>() {
214  // Enumerations
215  edesc_ = StringToEDescent(parlist.get("Descent Type","Quasi-Newton Method"));
216  enlcg_ = StringToENonlinearCG(parlist.get("Nonlinear CG Type","Hagar-Zhang"));
217  els_ = StringToELineSearch(parlist.get("Linesearch Type","Cubic Interpolation"));
218  econd_ = StringToECurvatureCondition(parlist.get("Linesearch Curvature Condition","Strong Wolfe Conditions"));
219  esec_ = StringToESecant(parlist.get("Secant Type","Limited-Memory BFGS"));
220  ekv_ = StringToEKrylov(parlist.get("Krylov Type","Conjugate Gradients"));
221 
222  // Inexactness Information
223  useInexact_.clear();
224  useInexact_.push_back(parlist.get("Use Inexact Objective Function", false));
225  useInexact_.push_back(parlist.get("Use Inexact Gradient", false));
226  useInexact_.push_back(parlist.get("Use Inexact Hessian-Times-A-Vector", false));
227 
228  // Initialize Linesearch Object
229  useProjectedGrad_ = parlist.get("Use Projected Gradient Criticality Measure", false);
230  LineSearchFactory(parlist);
231 
232  // Changing Objective Functions
233  softUp_ = parlist.get("Variable Objective Function",false);
234 
235  // Initialize Krylov Object
236  useSecantHessVec_ = parlist.get("Use Secant Hessian-Times-A-Vector", false);
237  useSecantPrecond_ = parlist.get("Use Secant Preconditioning", false);
238  krylov_ = Teuchos::null;
239  iterKrylov_ = 0;
240  flagKrylov_ = 0;
241  if ( edesc_ == DESCENT_NEWTONKRYLOV ) {
242  KrylovFactory(parlist);
243  }
244 
245  // Initialize Secant Object
246  secant_ = Teuchos::null;
248  int L = parlist.get("Maximum Secant Storage",10);
249  int BBtype = parlist.get("Barzilai-Borwein Type",1);
250  secant_ = getSecant<Real>(esec_,L,BBtype);
251  }
252  if ( edesc_ == DESCENT_SECANT ) {
253  useSecantHessVec_ = true;
254  }
255 
256  // Initialize Nonlinear CG Object
257  nlcg_ = Teuchos::null;
258  if ( edesc_ == DESCENT_NONLINEARCG ) {
259  nlcg_ = Teuchos::rcp( new NonlinearCG<Real>(enlcg_) );
260  }
261 
262  ls_nfval_ = 0;
263  ls_ngrad_ = 0;
264  }
265 
274  LineSearchStep(Teuchos::RCP<LineSearch<Real> > &lineSearch, Teuchos::ParameterList &parlist)
275  : Step<Real>(), lineSearch_(lineSearch) {
276  // Enumerations
277  edesc_ = StringToEDescent(parlist.get("Descent Type","Quasi-Newton Method"));
278  enlcg_ = StringToENonlinearCG(parlist.get("Nonlinear CG Type","Hagar-Zhang"));
279  econd_ = StringToECurvatureCondition(parlist.get("Linesearch Curvature Condition","Strong Wolfe Conditions"));
280  esec_ = StringToESecant(parlist.get("Secant Type","Limited-Memory BFGS"));
281  ekv_ = StringToEKrylov(parlist.get("Krylov Type","Conjugate Gradients"));
282 
283  // Inexactness Information
284  useInexact_.clear();
285  useInexact_.push_back(parlist.get("Use Inexact Objective Function", false));
286  useInexact_.push_back(parlist.get("Use Inexact Gradient", false));
287  useInexact_.push_back(parlist.get("Use Inexact Hessian-Times-A-Vector", false));
288 
289  // Initialize Linesearch Object
290  useProjectedGrad_ = parlist.get("Use Projected Gradient Criticality Measure", false);
292 
293  // Changing Objective Functions
294  softUp_ = parlist.get("Variable Objective Function",false);
295 
296  // Initialize Krylov Object
297  useSecantHessVec_ = parlist.get("Use Secant Hessian-Times-A-Vector", false);
298  useSecantPrecond_ = parlist.get("Use Secant Preconditioning", false);
299  krylov_ = Teuchos::null;
300  iterKrylov_ = 0;
301  flagKrylov_ = 0;
302  if ( edesc_ == DESCENT_NEWTONKRYLOV ) {
303  KrylovFactory(parlist);
304  }
305 
306  // Initialize Secant Object
307  secant_ = Teuchos::null;
309  int L = parlist.get("Maximum Secant Storage",10);
310  int BBtype = parlist.get("Barzilai-Borwein Type",1);
311  secant_ = getSecant<Real>(esec_,L,BBtype);
312  }
313  if ( edesc_ == DESCENT_SECANT ) {
314  useSecantHessVec_ = true;
315  }
316 
317  // Initialize Nonlinear CG Object
318  nlcg_ = Teuchos::null;
319  if ( edesc_ == DESCENT_NONLINEARCG ) {
320  nlcg_ = Teuchos::rcp( new NonlinearCG<Real>(enlcg_) );
321  }
322 
323  ls_nfval_ = 0;
324  ls_ngrad_ = 0;
325  }
326 
336  LineSearchStep( Teuchos::RCP<Secant<Real> > &secant, Teuchos::ParameterList &parlist )
337  : Step<Real>(), secant_(secant) {
338  // Enumerations
339  edesc_ = StringToEDescent(parlist.get("Descent Type","Quasi-Newton Method"));
340  enlcg_ = StringToENonlinearCG(parlist.get("Nonlinear CG Type","Hagar-Zhang"));
341  els_ = StringToELineSearch(parlist.get("Linesearch Type","Cubic Interpolation"));
342  econd_ = StringToECurvatureCondition(parlist.get("Linesearch Curvature Condition","Strong Wolfe Conditions"));
343  esec_ = StringToESecant(parlist.get("Secant Type","Limited-Memory BFGS"));
344  ekv_ = StringToEKrylov(parlist.get("Krylov Type","Conjugate Gradients"));
345 
346  // Inexactness Information
347  useInexact_.clear();
348  useInexact_.push_back(parlist.get("Use Inexact Objective Function", false));
349  useInexact_.push_back(parlist.get("Use Inexact Gradient", false));
350  useInexact_.push_back(parlist.get("Use Inexact Hessian-Times-A-Vector", false));
351 
352  // Initialize Linesearch Object
353  useProjectedGrad_ = parlist.get("Use Projected Gradient Criticality Measure", false);
354  LineSearchFactory(parlist);
355 
356  // Changing Objective Functions
357  softUp_ = parlist.get("Variable Objective Function",false);
358 
359  // Initialize Krylov Object
360  useSecantHessVec_ = parlist.get("Use Secant Hessian-Times-A-Vector", false);
361  useSecantPrecond_ = parlist.get("Use Secant Preconditioner", false);
362  krylov_ = Teuchos::null;
363  iterKrylov_ = 0;
364  flagKrylov_ = 0;
365  if ( edesc_ == DESCENT_NEWTONKRYLOV ) {
366  KrylovFactory(parlist);
367  }
368 
369  // Secant Information
370  if ( edesc_ == DESCENT_SECANT ) {
371  useSecantHessVec_ = true;
372  }
373 
374  // Initialize Nonlinear CG Object
375  nlcg_ = Teuchos::null;
376  if ( edesc_ == DESCENT_NONLINEARCG ) {
377  nlcg_ = Teuchos::rcp( new NonlinearCG<Real>(enlcg_) );
378  }
379 
380  ls_nfval_ = 0;
381  ls_ngrad_ = 0;
382  }
383 
392  LineSearchStep( Teuchos::RCP<Krylov<Real> > &krylov, Teuchos::ParameterList &parlist )
393  : Step<Real>(), krylov_(krylov) {
394  // Enumerations
395  edesc_ = StringToEDescent(parlist.get("Descent Type","Quasi-Newton Method"));
396  enlcg_ = StringToENonlinearCG(parlist.get("Nonlinear CG Type","Hagar-Zhang"));
397  els_ = StringToELineSearch(parlist.get("Linesearch Type","Cubic Interpolation"));
398  econd_ = StringToECurvatureCondition(parlist.get("Linesearch Curvature Condition","Strong Wolfe Conditions"));
399  esec_ = StringToESecant(parlist.get("Secant Type","Limited-Memory BFGS"));
400  ekv_ = StringToEKrylov(parlist.get("Krylov Type","Conjugate Gradients"));
401 
402  // Inexactness Information
403  useInexact_.clear();
404  useInexact_.push_back(parlist.get("Use Inexact Objective Function", false));
405  useInexact_.push_back(parlist.get("Use Inexact Gradient", false));
406  useInexact_.push_back(parlist.get("Use Inexact Hessian-Times-A-Vector", false));
407 
408  // Initialize Linesearch Object
409  useProjectedGrad_ = parlist.get("Use Projected Gradient Criticality Measure", false);
410  LineSearchFactory(parlist);
411 
412  // Changing Objective Functions
413  softUp_ = parlist.get("Variable Objective Function",false);
414 
415  // Initialize Krylov Object
416  useSecantHessVec_ = parlist.get("Use Secant Hessian-Times-A-Vector", false);
417  useSecantPrecond_ = parlist.get("Use Secant Preconditioning", false);
418  iterKrylov_ = 0;
419  flagKrylov_ = 0;
420 
421  // Initialize Secant Object
422  secant_ = Teuchos::null;
424  int L = parlist.get("Maximum Secant Storage",10);
425  int BBtype = parlist.get("Barzilai-Borwein Type",1);
426  secant_ = getSecant<Real>(esec_,L,BBtype);
427  }
428  if ( edesc_ == DESCENT_SECANT ) {
429  useSecantHessVec_ = true;
430  }
431 
432  // Initialize Nonlinear CG Object
433  nlcg_ = Teuchos::null;
434  if ( edesc_ == DESCENT_NONLINEARCG ) {
435  nlcg_ = Teuchos::rcp( new NonlinearCG<Real>(enlcg_) );
436  }
437 
438  ls_nfval_ = 0;
439  ls_ngrad_ = 0;
440  }
441 
452  LineSearchStep( Teuchos::RCP<LineSearch<Real> > &lineSearch, Teuchos::RCP<Secant<Real> > &secant,
453  Teuchos::ParameterList &parlist ) : Step<Real>(), lineSearch_(lineSearch), secant_(secant) {
454  // Enumerations
455  edesc_ = StringToEDescent(parlist.get("Descent Type","Quasi-Newton Method"));
456  enlcg_ = StringToENonlinearCG(parlist.get("Nonlinear CG Type","Hagar-Zhang"));
457  econd_ = StringToECurvatureCondition(parlist.get("Linesearch Curvature Condition","Strong Wolfe Conditions"));
458  esec_ = StringToESecant(parlist.get("Secant Type","Limited-Memory BFGS"));
459  ekv_ = StringToEKrylov(parlist.get("Krylov Type","Conjugate Gradients"));
460 
461  // Inexactness Information
462  useInexact_.clear();
463  useInexact_.push_back(parlist.get("Use Inexact Objective Function", false));
464  useInexact_.push_back(parlist.get("Use Inexact Gradient", false));
465  useInexact_.push_back(parlist.get("Use Inexact Hessian-Times-A-Vector", false));
466 
467  // Initialize Linesearch Object
468  useProjectedGrad_ = parlist.get("Use Projected Gradient Criticality Measure", false);
470 
471  // Changing Objective Functions
472  softUp_ = parlist.get("Variable Objective Function",false);
473 
474  // Initialize Krylov Object
475  useSecantHessVec_ = parlist.get("Use Secant Hessian-Times-A-Vector", false);
476  useSecantPrecond_ = parlist.get("Use Secant Preconditioner", false);
477  krylov_ = Teuchos::null;
478  iterKrylov_ = 0;
479  flagKrylov_ = 0;
480  if ( edesc_ == DESCENT_NEWTONKRYLOV ) {
481  KrylovFactory(parlist);
482  }
483 
484  // Secant Information
485  if ( edesc_ == DESCENT_SECANT ) {
486  useSecantHessVec_ = true;
487  }
488 
489  // Initialize Nonlinear CG Object
490  nlcg_ = Teuchos::null;
491  if ( edesc_ == DESCENT_NONLINEARCG ) {
492  nlcg_ = Teuchos::rcp( new NonlinearCG<Real>(enlcg_) );
493  }
494 
495  ls_nfval_ = 0;
496  ls_ngrad_ = 0;
497  }
498 
499  void initialize( Vector<Real> &x, const Vector<Real> &s, const Vector<Real> &g,
501  AlgorithmState<Real> &algo_state ) {
502  Step<Real>::initialize(x,s,g,obj,con,algo_state);
503  Teuchos::RCP<StepState<Real> > step_state = Step<Real>::getState();
504  lineSearch_->initialize(x, s, *(step_state->gradientVec),obj,con);
506  Teuchos::RCP<Objective<Real> > obj_ptr = Teuchos::rcp(&obj, false);
507  Teuchos::RCP<BoundConstraint<Real> > con_ptr = Teuchos::rcp(&con, false);
508  hessian_ = Teuchos::rcp(
509  new ProjectedHessian<Real>(secant_,obj_ptr,con_ptr,algo_state.iterateVec,step_state->gradientVec,
511  precond_ = Teuchos::rcp(
512  new ProjectedPreconditioner<Real>(secant_,obj_ptr,con_ptr,algo_state.iterateVec,
513  step_state->gradientVec,useSecantPrecond_));
514  }
515  if ( con.isActivated() ) {
516  d_ = s.clone();
517  }
518  if ( con.isActivated() || edesc_ == DESCENT_SECANT
520  gp_ = g.clone();
521  }
522  }
523 
524 
539  AlgorithmState<Real> &algo_state ) {
540  Teuchos::RCP<StepState<Real> > step_state = Step<Real>::getState();
541 
542  Real tol = std::sqrt(ROL_EPSILON);
543 
544  // Set active set parameter
545  Real eps = 0.0;
546  if ( con.isActivated() ) {
547  eps = algo_state.gnorm;
548  }
549  lineSearch_->setData(eps);
550  if ( hessian_ != Teuchos::null ) {
551  hessian_->setData(eps);
552  }
553  if ( precond_ != Teuchos::null ) {
554  precond_->setData(eps);
555  }
556 
557  // Compute step s
558  switch(edesc_) {
560  flagKrylov_ = 0;
561  krylov_->run(s,*hessian_,*(step_state->gradientVec),*precond_,iterKrylov_,flagKrylov_);
562  break;
563  case DESCENT_NEWTON:
564  case DESCENT_SECANT:
565  hessian_->applyInverse(s,*(step_state->gradientVec),tol);
566  break;
567  case DESCENT_NONLINEARCG:
568  nlcg_->run(s,*(step_state->gradientVec),x,obj);
569  break;
570  case DESCENT_STEEPEST:
571  s.set(step_state->gradientVec->dual());
572  break;
573  default: break;
574  }
575 
576  // Compute g.dot(s)
577  Real gs = 0.0;
578  if ( !con.isActivated() ) {
579  gs = -s.dot((step_state->gradientVec)->dual());
580  }
581  else {
582  if ( edesc_ == DESCENT_STEEPEST ) {
583  d_->set(x);
584  d_->axpy(-1.0,s);
585  con.project(*d_);
586  d_->scale(-1.0);
587  d_->plus(x);
588  //d->set(s);
589  //con.pruneActive(*d,s,x,eps);
590  //con.pruneActive(*d,*(step_state->gradientVec),x,eps);
591  gs = -d_->dot((step_state->gradientVec)->dual());
592  }
593  else {
594  d_->set(s);
595  con.pruneActive(*d_,*(step_state->gradientVec),x,eps);
596  gs = -d_->dot((step_state->gradientVec)->dual());
597  d_->set(x);
598  d_->axpy(-1.0,(step_state->gradientVec)->dual());
599  con.project(*d_);
600  d_->scale(-1.0);
601  d_->plus(x);
602  con.pruneInactive(*d_,*(step_state->gradientVec),x,eps);
603  gs -= d_->dot((step_state->gradientVec)->dual());
604  }
605  }
606 
607  // Check if s is a descent direction i.e., g.dot(s) < 0
608  if ( gs >= 0.0 || (flagKrylov_ == 2 && iterKrylov_ <= 1) ) {
609  s.set((step_state->gradientVec)->dual());
610  if ( con.isActivated() ) {
611  d_->set(s);
612  con.pruneActive(*d_,s,x);
613  gs = -d_->dot((step_state->gradientVec)->dual());
614  }
615  else {
616  gs = -s.dot((step_state->gradientVec)->dual());
617  }
618  }
619  s.scale(-1.0);
620 
621  // Perform line search
622  Real fnew = algo_state.value;
623  ls_nfval_ = 0;
624  ls_ngrad_ = 0;
625  lineSearch_->run(step_state->searchSize,fnew,ls_nfval_,ls_ngrad_,gs,s,x,obj,con);
626  algo_state.nfval += ls_nfval_;
627  algo_state.ngrad += ls_ngrad_;
628 
629  // Compute get scaled descent direction
630  s.scale(step_state->searchSize);
631  if ( con.isActivated() ) {
632  s.plus(x);
633  con.project(s);
634  s.axpy(-1.0,x);
635  }
636 
637  // Update step state information
638  (step_state->descentVec)->set(s);
639 
640  // Update algorithm state information
641  algo_state.snorm = s.norm();
642  algo_state.value = fnew;
643  }
644 
657  AlgorithmState<Real> &algo_state ) {
658  Real tol = std::sqrt(ROL_EPSILON);
659  Teuchos::RCP<StepState<Real> > step_state = Step<Real>::getState();
660 
661  // Update iterate
662  algo_state.iter++;
663  x.axpy(1.0, s);
664  if ( softUp_ ) {
665  obj.update(x,true,algo_state.iter);
666  }
667 
668  // Compute new gradient
669  if ( edesc_ == DESCENT_SECANT ||
671  gp_->set(*(step_state->gradientVec));
672  }
673  obj.gradient(*(step_state->gradientVec),x,tol);
674  algo_state.ngrad++;
675 
676  // Update Secant Information
677  if ( edesc_ == DESCENT_SECANT ||
679  secant_->update(*(step_state->gradientVec),*gp_,s,algo_state.snorm,algo_state.iter+1);
680  }
681 
682  // Update algorithm state
683  (algo_state.iterateVec)->set(x);
684  if ( con.isActivated() ) {
685  if ( useProjectedGrad_ ) {
686  gp_->set(*(step_state->gradientVec));
687  con.computeProjectedGradient( *gp_, x );
688  algo_state.gnorm = gp_->norm();
689  }
690  else {
691  d_->set(x);
692  d_->axpy(-1.0,(step_state->gradientVec)->dual());
693  con.project(*d_);
694  d_->axpy(-1.0,x);
695  algo_state.gnorm = d_->norm();
696  }
697  }
698  else {
699  algo_state.gnorm = (step_state->gradientVec)->norm();
700  }
701  }
702 
707  std::string printHeader( void ) const {
708  std::stringstream hist;
709  hist << " ";
710  hist << std::setw(6) << std::left << "iter";
711  hist << std::setw(15) << std::left << "value";
712  hist << std::setw(15) << std::left << "gnorm";
713  hist << std::setw(15) << std::left << "snorm";
714  hist << std::setw(10) << std::left << "#fval";
715  hist << std::setw(10) << std::left << "#grad";
716  hist << std::setw(10) << std::left << "ls_#fval";
717  hist << std::setw(10) << std::left << "ls_#grad";
718  if ( edesc_ == DESCENT_NEWTONKRYLOV ) {
719  hist << std::setw(10) << std::left << "iterCG";
720  hist << std::setw(10) << std::left << "flagCG";
721  }
722  hist << "\n";
723  return hist.str();
724  }
725 
730  std::string printName( void ) const {
731  std::stringstream hist;
732  hist << "\n" << EDescentToString(edesc_)
733  << " with " << ELineSearchToString(els_)
734  << " Linesearch satisfying "
736  if ( edesc_ == DESCENT_NEWTONKRYLOV ) {
737  hist << "Krylov Type: " << EKrylovToString(ekv_) << "\n";
738  }
739  if ( edesc_ == DESCENT_SECANT ||
741  hist << "Secant Type: " << ESecantToString(esec_) << "\n";
742  }
743  if ( edesc_ == DESCENT_NONLINEARCG ) {
744  hist << "Nonlinear CG Type: " << ENonlinearCGToString(enlcg_) << "\n";
745  }
746  return hist.str();
747  }
748 
756  std::string print( AlgorithmState<Real> & algo_state, bool print_header = false ) const {
757  std::stringstream hist;
758  hist << std::scientific << std::setprecision(6);
759  if ( algo_state.iter == 0 ) {
760  hist << printName();
761  }
762  if ( print_header ) {
763  hist << printHeader();
764  }
765  if ( algo_state.iter == 0 ) {
766  hist << " ";
767  hist << std::setw(6) << std::left << algo_state.iter;
768  hist << std::setw(15) << std::left << algo_state.value;
769  hist << std::setw(15) << std::left << algo_state.gnorm;
770  hist << "\n";
771  }
772  else {
773  hist << " ";
774  hist << std::setw(6) << std::left << algo_state.iter;
775  hist << std::setw(15) << std::left << algo_state.value;
776  hist << std::setw(15) << std::left << algo_state.gnorm;
777  hist << std::setw(15) << std::left << algo_state.snorm;
778  hist << std::setw(10) << std::left << algo_state.nfval;
779  hist << std::setw(10) << std::left << algo_state.ngrad;
780  hist << std::setw(10) << std::left << ls_nfval_;
781  hist << std::setw(10) << std::left << ls_ngrad_;
782  if ( edesc_ == DESCENT_NEWTONKRYLOV ) {
783  hist << std::setw(10) << std::left << iterKrylov_;
784  hist << std::setw(10) << std::left << flagKrylov_;
785  }
786  hist << "\n";
787  }
788  return hist.str();
789  }
790 
791  // struct StepState (scalars, vectors) map?
792 
793  // getState
794 
795  // setState
796 
797 }; // class Step
798 
799 } // namespace ROL
800 
801 #endif
Provides the interface to evaluate objective functions.
int ls_nfval_
Number of function evaluations during line search.
virtual void scale(const Real alpha)=0
Compute where .
Provides definition of the Conjugate Residual solver.
LineSearchStep(Teuchos::RCP< Krylov< Real > > &krylov, Teuchos::ParameterList &parlist)
Constructor.
void LineSearchFactory(Teuchos::ParameterList &parlist)
virtual void plus(const Vector &x)=0
Compute , where .
virtual void axpy(const Real alpha, const Vector &x)
Compute where .
Definition: ROL_Vector.hpp:141
ELineSearch StringToELineSearch(std::string s)
Definition: ROL_Types.hpp:593
Implements a golden section line search.
std::string printName(void) const
Print step name.
EKrylov ekv_
enum determines type of Krylov solver
Provides the interface to compute optimization steps.
Definition: ROL_Step.hpp:63
Teuchos::RCP< ProjectedPreconditioner< Real > > precond_
Teuchos::RCP< StepState< Real > > getState(void)
Definition: ROL_Step.hpp:68
Contains definitions of custom data types in ROL.
bool useSecantHessVec_
Whether or not a secant approximation is used for Hessian-times-a-vector.
virtual Teuchos::RCP< Vector > clone() const =0
Clone to make a new (uninitialized) vector.
ELineSearch
Enumeration of line-search types.
Definition: ROL_Types.hpp:527
LineSearchStep(Teuchos::RCP< Secant< Real > > &secant, Teuchos::ParameterList &parlist)
Constructor.
ESecant StringToESecant(std::string s)
Definition: ROL_Types.hpp:352
std::string EDescentToString(EDescent tr)
Definition: ROL_Types.hpp:229
Contains definitions for helper functions in ROL.
std::string printHeader(void) const
Print iterate header.
Defines the linear algebra or vector space interface.
Definition: ROL_Vector.hpp:72
std::vector< bool > useInexact_
Flags for inexact objective function, gradient, and Hessian evaluation.
virtual Real dot(const Vector &x) const =0
Compute where .
void KrylovFactory(Teuchos::ParameterList &parlist)
int iterKrylov_
Number of Krylov iterations (used for inexact Newton)
EKrylov
Enumeration of Krylov methods.
Definition: ROL_Types.hpp:368
EKrylov StringToEKrylov(std::string s)
Definition: ROL_Types.hpp:415
EDescent StringToEDescent(std::string s)
Definition: ROL_Types.hpp:277
State for algorithm class. Will be used for restarts.
Definition: ROL_Types.hpp:76
Provides the interface to compute optimization steps with line search.
Teuchos::RCP< Vector< Real > > gp_
virtual void gradient(Vector< Real > &g, const Vector< Real > &x, Real &tol)
Compute gradient.
Teuchos::RCP< Krylov< Real > > krylov_
Krylov solver object (used for inexact Newton)
bool isActivated(void)
Check if bounds are on.
ENonlinearCG
Enumeration of nonlinear CG algorithms.
Definition: ROL_Types.hpp:438
std::string ECurvatureConditionToString(ECurvatureCondition ls)
Definition: ROL_Types.hpp:620
Provides interface for and implements line searches.
Provides an implementation of path-based target leve line search.
ESecant
Enumeration of secant update algorithms.
Definition: ROL_Types.hpp:295
ENonlinearCG enlcg_
enum determines type of nonlinear CG
Implements a Brent's method line search.
Definition: ROL_Brents.hpp:57
bool useSecantPrecond_
Whether or not a secant approximation is used for preconditioning inexact Newton. ...
int ls_ngrad_
Number of gradient evaluations during line search.
Provides interface for and implements limited-memory secant operators.
Definition: ROL_Secant.hpp:67
Implements cubic interpolation back tracking line search.
std::string ENonlinearCGToString(ENonlinearCG tr)
Definition: ROL_Types.hpp:451
Implements a simple back tracking line search.
ENonlinearCG StringToENonlinearCG(std::string s)
Definition: ROL_Types.hpp:507
EDescent edesc_
enum determines type of descent step
std::string ELineSearchToString(ELineSearch ls)
Definition: ROL_Types.hpp:539
void pruneInactive(Vector< Real > &v, const Vector< Real > &x, Real eps=0.0)
Set variables to zero if they correspond to the -inactive set.
Teuchos::RCP< LineSearch< Real > > lineSearch_
Line-search object.
void initialize(Vector< Real > &x, const Vector< Real > &s, const Vector< Real > &g, Objective< Real > &obj, BoundConstraint< Real > &con, AlgorithmState< Real > &algo_state)
Initialize step with bound constraint.
int flagKrylov_
Termination flag for Krylov method (used for inexact Newton)
ELineSearch els_
enum determines type of line search
Provides definitions of the Conjugate Gradient solver.
Teuchos::RCP< ProjectedHessian< Real > > hessian_
std::string print(AlgorithmState< Real > &algo_state, bool print_header=false) const
Print iterate status.
Provides definitions for Krylov solvers.
Definition: ROL_Krylov.hpp:57
Provides the interface to apply upper and lower bound constraints.
bool useProjectedGrad_
Whether or not to use to the projected gradient criticality measure.
void computeProjectedGradient(Vector< Real > &g, const Vector< Real > &x)
Compute projected gradient.
std::string EKrylovToString(EKrylov tr)
Definition: ROL_Types.hpp:374
ECurvatureCondition
Enumeration of line-search curvature conditions.
Definition: ROL_Types.hpp:610
Teuchos::RCP< NonlinearCG< Real > > nlcg_
Nonlinear CG object (used for nonlinear CG)
Teuchos::RCP< Vector< Real > > d_
virtual void initialize(Vector< Real > &x, const Vector< Real > &g, Objective< Real > &obj, BoundConstraint< Real > &con, AlgorithmState< Real > &algo_state)
Initialize step with bound constraint.
Definition: ROL_Step.hpp:83
void compute(Vector< Real > &s, const Vector< Real > &x, Objective< Real > &obj, BoundConstraint< Real > &con, AlgorithmState< Real > &algo_state)
Compute step.
Teuchos::RCP< Vector< Real > > iterateVec
Definition: ROL_Types.hpp:90
virtual void set(const Vector &x)
Set where .
Definition: ROL_Vector.hpp:194
virtual void pruneActive(Vector< Real > &v, const Vector< Real > &x, Real eps=0.0)
Set variables to zero if they correspond to the -active set.
ECurvatureCondition StringToECurvatureCondition(std::string s)
Definition: ROL_Types.hpp:670
virtual Real norm() const =0
Returns where .
Implements a bisection line search.
virtual void update(const Vector< Real > &x, bool flag=true, int iter=-1)
Update objective function.
Provides an implementation of iteration scaled line search.
std::string ESecantToString(ESecant tr)
Definition: ROL_Types.hpp:304
EDescent
Enumeration of descent direction types.
Definition: ROL_Types.hpp:220
LineSearchStep(Teuchos::RCP< LineSearch< Real > > &lineSearch, Teuchos::RCP< Secant< Real > > &secant, Teuchos::ParameterList &parlist)
Constructor.
ECurvatureCondition econd_
enum determines type of curvature condition
Teuchos::RCP< Secant< Real > > secant_
Secant object (used for quasi-Newton)
virtual void project(Vector< Real > &x)
Project optimization variables onto the bounds.
LineSearchStep(Teuchos::RCP< LineSearch< Real > > &lineSearch, Teuchos::ParameterList &parlist)
Constructor.
ESecant esec_
enum determines type of secant approximation
static const double ROL_EPSILON
Platform-dependent machine epsilon.
Definition: ROL_Types.hpp:115
void update(Vector< Real > &x, const Vector< Real > &s, Objective< Real > &obj, BoundConstraint< Real > &con, AlgorithmState< Real > &algo_state)
Update step, if successful.
LineSearchStep(Teuchos::ParameterList &parlist)
Constructor.