ROL
ROL_TypeU_LineSearchAlgorithm_Def.hpp
Go to the documentation of this file.
1 // @HEADER
2 // ************************************************************************
3 //
4 // Rapid Optimization Library (ROL) Package
5 // Copyright (2014) Sandia Corporation
6 //
7 // Under terms of Contract DE-AC04-94AL85000, there is a non-exclusive
8 // license for use of this work by or on behalf of the U.S. Government.
9 //
10 // Redistribution and use in source and binary forms, with or without
11 // modification, are permitted provided that the following conditions are
12 // met:
13 //
14 // 1. Redistributions of source code must retain the above copyright
15 // notice, this list of conditions and the following disclaimer.
16 //
17 // 2. Redistributions in binary form must reproduce the above copyright
18 // notice, this list of conditions and the following disclaimer in the
19 // documentation and/or other materials provided with the distribution.
20 //
21 // 3. Neither the name of the Corporation nor the names of the
22 // contributors may be used to endorse or promote products derived from
23 // this software without specific prior written permission.
24 //
25 // THIS SOFTWARE IS PROVIDED BY SANDIA CORPORATION "AS IS" AND ANY
26 // EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
27 // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
28 // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SANDIA CORPORATION OR THE
29 // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
30 // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
31 // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
32 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
33 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
34 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
35 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
36 //
37 // Questions? Contact lead developers:
38 // Drew Kouri (dpkouri@sandia.gov) and
39 // Denis Ridzal (dridzal@sandia.gov)
40 //
41 // ************************************************************************
42 // @HEADER
43 
44 #ifndef ROL_TYPEU_LINESEARCHALGORITHM_DEF_H
45 #define ROL_TYPEU_LINESEARCHALGORITHM_DEF_H
46 
49 
50 namespace ROL {
51 namespace TypeU {
52 
53 template<typename Real>
55  const Ptr<Secant<Real>> &secant,
56  const Ptr<DescentDirection_U<Real>> &descent,
57  const Ptr<LineSearch_U<Real>> &lineSearch )
58  : Algorithm<Real>(), desc_(descent), lineSearch_(lineSearch),
60  econd_(CURVATURECONDITION_U_WOLFE), verbosity_(0) {
61  // Set status test
62  status_->reset();
63  status_->add(makePtr<StatusTest<Real>>(parlist));
64 
65  // Parse parameter list
66  ParameterList& Llist = parlist.sublist("Step").sublist("Line Search");
67  ParameterList& Glist = parlist.sublist("General");
68  econd_ = StringToECurvatureConditionU(Llist.sublist("Curvature Condition").get("Type","Strong Wolfe Conditions") );
69  acceptLastAlpha_ = Llist.get("Accept Last Alpha", false);
70  verbosity_ = Glist.get("Output Level",0);
72  // Initialize Line Search
73  if (lineSearch_ == nullPtr) {
74  lineSearchName_ = Llist.sublist("Line-Search Method").get("Type","Cubic Interpolation");
76  lineSearch_ = LineSearchUFactory<Real>(parlist);
77  }
78  else { // User-defined linesearch provided
79  lineSearchName_ = Llist.sublist("Line-Search Method").get("User Defined Line Search Name","Unspecified User Defined Line Search");
80  }
81  if (desc_ == nullPtr) {
82  ParameterList& dlist = Llist.sublist("Descent Method");
83  descentName_ = dlist.get("Type","Quasi-Newton Method");
85  desc_ = DescentDirectionUFactory<Real>(parlist,secant);
86  }
87  else {
88  descentName_ = Llist.sublist("Descent Method").get("User Defined Descent Direction Name","Unspecified User Defined Descent Direction");
89  }
90 }
91 
92 template<typename Real>
94  const Vector<Real> &g,
95  Objective<Real> &obj,
96  std::ostream &outStream) {
97  // Initialize data
99  lineSearch_->initialize(x,g);
100  desc_->initialize(x,g);
101  // Update approximate gradient and approximate objective function.
102  Real ftol = std::sqrt(ROL_EPSILON<Real>());
103  obj.update(x,UpdateType::Initial,state_->iter);
104  state_->value = obj.value(x,ftol);
105  state_->nfval++;
106  obj.gradient(*state_->gradientVec,x,ftol);
107  state_->ngrad++;
108  state_->gnorm = state_->gradientVec->norm();
109  state_->snorm = ROL_INF<Real>();
110 }
111 
112 template<typename Real>
114  const Vector<Real> &g,
115  Objective<Real> &obj,
116  std::ostream &outStream ) {
117  const Real one(1);
118  // Initialize trust-region data
119  Real ftrial(0), gs(0), tol(std::sqrt(ROL_EPSILON<Real>()));
120  initialize(x,g,obj,outStream);
121  state_->searchSize = one;
122  Ptr<Vector<Real>> gprev = g.clone();
123 
124  // Output
125  if (verbosity_ > 0) writeOutput(outStream, true);
126 
127  while (status_->check(*state_)) {
128  // Compute descent direction
129  desc_->compute(*state_->stepVec,state_->snorm,gs,SPiter_,SPflag_,x,
130  *state_->gradientVec,obj);
131 
132  // Perform line search
133  ftrial = state_->value;
134  ls_nfval_ = 0; ls_ngrad_ = 0;
135  lineSearch_->run(state_->searchSize,ftrial,ls_nfval_,ls_ngrad_,gs,*state_->stepVec,x,obj);
136 
137  // Make correction if maximum function evaluations reached
138  if(!acceptLastAlpha_) {
139  lineSearch_->setMaxitUpdate(state_->searchSize,ftrial,state_->value);
140  }
141 
142  // Compute scaled descent direction
143  state_->stepVec->scale(state_->searchSize);
144  state_->snorm *= state_->searchSize;
145 
146  // Update iterate
147  x.plus(*state_->stepVec);
148 
149  // Compute new value and gradient
150  obj.update(x,UpdateType::Accept,state_->iter);
151  state_->value = obj.value(x,tol);
152  state_->nfval++;
153  gprev->set(*state_->gradientVec);
154  obj.gradient(*state_->gradientVec,x,tol);
155  state_->ngrad++;
156  state_->gnorm = state_->gradientVec->norm();
157 
158  // Update algorithm state
159  state_->iterateVec->set(x);
160  state_->nfval += ls_nfval_;
161  state_->ngrad += ls_ngrad_;
162  desc_->update(x,*state_->stepVec,*gprev,*state_->gradientVec,state_->snorm,state_->iter);
163  state_->iter++;
164 
165  // Update Output
166  if (verbosity_ > 0) writeOutput(outStream,printHeader_);
167  }
168  if (verbosity_ > 0) Algorithm<Real>::writeExitStatus(outStream);
169 }
170 
171 template<typename Real>
172 void LineSearchAlgorithm<Real>::writeHeader( std::ostream& os ) const {
173  std::ios_base::fmtflags osFlags(os.flags());
174  if (verbosity_ > 1) {
175  os << std::string(109,'-') << std::endl;
176  os << descentName_;
177  os << " status output definitions" << std::endl << std::endl;
178  os << " iter - Number of iterates (steps taken)" << std::endl;
179  os << " value - Objective function value" << std::endl;
180  os << " gnorm - Norm of the gradient" << std::endl;
181  os << " snorm - Norm of the step (update to optimization vector)" << std::endl;
182  os << " alpha - Line search step length" << std::endl;
183  os << " #fval - Cumulative number of times the objective function was evaluated" << std::endl;
184  os << " #grad - Cumulative number of times the gradient was computed" << std::endl;
185  os << " ls_#fval - Number of the times the objective function was evaluated during the line search" << std::endl;
186  os << " ls_#grad - Number of the times the gradient was evaluated during the line search" << std::endl;
187  if (edesc_ == DESCENT_U_NEWTONKRYLOV) {
188  os << " iterCG - Number of Krylov iterations used to compute search direction" << std::endl;
189  os << " flagCG - Krylov solver flag" << std::endl;
190  }
191  os << std::string(109,'-') << std::endl;
192  }
193 
194  os << " ";
195  os << std::setw(6) << std::left << "iter";
196  os << std::setw(15) << std::left << "value";
197  os << std::setw(15) << std::left << "gnorm";
198  os << std::setw(15) << std::left << "snorm";
199  os << std::setw(15) << std::left << "alpha";
200  os << std::setw(10) << std::left << "#fval";
201  os << std::setw(10) << std::left << "#grad";
202  os << std::setw(10) << std::left << "ls_#fval";
203  os << std::setw(10) << std::left << "ls_#grad";
204  if (edesc_ == DESCENT_U_NEWTONKRYLOV) {
205  os << std::setw(10) << std::left << "iterCG";
206  os << std::setw(10) << std::left << "flagCG";
207  }
208  os << std::endl;
209  os.flags(osFlags);
210 }
211 
212 template<typename Real>
213 void LineSearchAlgorithm<Real>::writeName( std::ostream& os ) const {
214  std::ios_base::fmtflags osFlags(os.flags());
215  os << std::endl << desc_->printName();
216  os << std::endl;
217  os << "Line Search: " << lineSearchName_;
218  os << " satisfying " << ECurvatureConditionUToString(econd_) << std::endl;
219  os.flags(osFlags);
220 }
221 
222 template<typename Real>
223 void LineSearchAlgorithm<Real>::writeOutput( std::ostream& os, bool print_header) const {
224  std::ios_base::fmtflags osFlags(os.flags());
225  os << std::scientific << std::setprecision(6);
226  if ( state_->iter == 0 ) {
227  writeName(os);
228  }
229  if ( print_header ) {
230  writeHeader(os);
231  }
232  if ( state_->iter == 0 ) {
233  os << " ";
234  os << std::setw(6) << std::left << state_->iter;
235  os << std::setw(15) << std::left << state_->value;
236  os << std::setw(15) << std::left << state_->gnorm;
237  os << std::setw(15) << std::left << "---";
238  os << std::setw(15) << std::left << "---";
239  os << std::setw(10) << std::left << state_->nfval;
240  os << std::setw(10) << std::left << state_->ngrad;
241  os << std::setw(10) << std::left << "---";
242  os << std::setw(10) << std::left << "---";
243  if (edesc_ == DESCENT_U_NEWTONKRYLOV) {
244  os << std::setw(10) << std::left << "---";
245  os << std::setw(10) << std::left << "---";
246  }
247  os << std::endl;
248  }
249  else {
250  os << " ";
251  os << std::setw(6) << std::left << state_->iter;
252  os << std::setw(15) << std::left << state_->value;
253  os << std::setw(15) << std::left << state_->gnorm;
254  os << std::setw(15) << std::left << state_->snorm;
255  os << std::setw(15) << std::left << state_->searchSize;
256  os << std::setw(10) << std::left << state_->nfval;
257  os << std::setw(10) << std::left << state_->ngrad;
258  os << std::setw(10) << std::left << ls_nfval_;
259  os << std::setw(10) << std::left << ls_ngrad_;
260  if (edesc_ == DESCENT_U_NEWTONKRYLOV) {
261  os << std::setw(10) << std::left << SPiter_;
262  os << std::setw(10) << std::left << SPflag_;
263  }
264  os << std::endl;
265  }
266  os.flags(osFlags);
267 }
268 } // namespace TypeU
269 } // namespace ROL
270 
271 #endif
Provides interface for and implements line searches.
Provides the interface to evaluate objective functions.
virtual ROL::Ptr< Vector > clone() const =0
Clone to make a new (uninitialized) vector.
void writeHeader(std::ostream &os) const override
Print iterate header.
bool acceptLastAlpha_
For backwards compatibility. When max function evaluations are reached take last step.
virtual void plus(const Vector &x)=0
Compute , where .
virtual Real value(const Vector< Real > &x, Real &tol)=0
Compute value.
void writeName(std::ostream &os) const override
Print step name.
void initialize(const Vector< Real > &x, const Vector< Real > &g, Objective< Real > &obj, std::ostream &outStream=std::cout)
void initialize(const Vector< Real > &x, const Vector< Real > &g)
Defines the linear algebra or vector space interface.
Definition: ROL_Vector.hpp:80
virtual void update(const Vector< Real > &x, UpdateType type, int iter=-1)
Update objective function.
Ptr< LineSearch_U< Real > > lineSearch_
Line-search object.
EDescentU StringToEDescentU(std::string s)
virtual void gradient(Vector< Real > &g, const Vector< Real > &x, Real &tol)
Compute gradient.
LineSearchAlgorithm(ParameterList &parlist, const Ptr< Secant< Real >> &secant=nullPtr, const Ptr< DescentDirection_U< Real >> &descent=nullPtr, const Ptr< LineSearch_U< Real >> &lineSearch=nullPtr)
Constructor.
std::string ECurvatureConditionUToString(ECurvatureConditionU ls)
Provides an interface to run unconstrained optimization algorithms.
Provides interface for and implements limited-memory secant operators.
Definition: ROL_Secant.hpp:79
Provides an interface to check status of optimization algorithms.
ECurvatureConditionU econd_
enum determines type of curvature condition
void writeOutput(std::ostream &os, const bool print_header=false) const override
Print iterate status.
EDescentU edesc_
enum determines type of descent direction
ELineSearchU StringToELineSearchU(std::string s)
void run(Vector< Real > &x, const Vector< Real > &g, Objective< Real > &obj, std::ostream &outStream=std::cout) override
Run algorithm on unconstrained problems (Type-U). This general interface supports the use of dual opt...
Provides the interface to compute unconstrained optimization steps for line search.
virtual void writeExitStatus(std::ostream &os) const
const Ptr< CombinedStatusTest< Real > > status_
Ptr< DescentDirection_U< Real > > desc_
Unglobalized step object.
ECurvatureConditionU StringToECurvatureConditionU(std::string s)
ELineSearchU els_
enum determines type of line search