ROL
ROL_TypeP_QuasiNewtonAlgorithm_Def.hpp
Go to the documentation of this file.
1 // @HEADER
2 // ************************************************************************
3 //
4 // Rapid Optimization Library (ROL) Package
5 // Copyright (2014) Sandia Corporation
6 //
7 // Under terms of Contract DE-AC04-94AL85000, there is a non-exclusive
8 // license for use of this work by or on behalf of the U.S. Government.
9 //
10 // Redistribution and use in source and binary forms, with or without
11 // modification, are permitted provided that the following conditions are
12 // met:
13 //
14 // 1. Redistributions of source code must retain the above copyright
15 // notice, this list of conditions and the following disclaimer.
16 //
17 // 2. Redistributions in binary form must reproduce the above copyright
18 // notice, this list of conditions and the following disclaimer in the
19 // documentation and/or other materials provided with the distribution.
20 //
21 // 3. Neither the name of the Corporation nor the names of the
22 // contributors may be used to endorse or promote products derived from
23 // this software without specific prior written permission.
24 //
25 // THIS SOFTWARE IS PROVIDED BY SANDIA CORPORATION "AS IS" AND ANY
26 // EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
27 // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
28 // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SANDIA CORPORATION OR THE
29 // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
30 // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
31 // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
32 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
33 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
34 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
35 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
36 //
37 // Questions? Contact lead developers:
38 // Drew Kouri (dpkouri@sandia.gov) and
39 // Denis Ridzal (dridzal@sandia.gov)
40 //
41 // ************************************************************************
42 // @HEADER
43 
44 #ifndef ROL_TYPEP_QUASINEWTONALGORITHM_DEF_HPP
45 #define ROL_TYPEP_QUASINEWTONALGORITHM_DEF_HPP
46 
50 #include "ROL_PQNObjective.hpp"
51 
52 namespace ROL {
53 namespace TypeP {
54 
55 template<typename Real>
57  const Ptr<Secant<Real>> &secant)
58  : secant_(secant), esec_(SECANT_USERDEFINED), list_(list), hasLEC_(true) {
59  // Set status test
60  status_->reset();
61  status_->add(makePtr<StatusTest<Real>>(list));
62 
63  // Parse parameter list
64  ParameterList &lslist = list.sublist("Step").sublist("Line Search");
65  t0_ = list.sublist("Status Test").get("Gradient Scale" , 1.0);
66  initProx_ = lslist.get("Apply Prox to Initial Guess", false);
67  maxit_ = lslist.get("Function Evaluation Limit", 20);
68  c1_ = lslist.get("Sufficient Decrease Tolerance", 1e-4);
69  rhodec_ = lslist.sublist("Line-Search Method").get("Backtracking Rate", 0.5);
70  sigma1_ = lslist.sublist("PQN").get("Lower Step Size Safeguard", 0.1);
71  sigma2_ = lslist.sublist("PQN").get("Upper Step Size Safeguard", 0.9);
72  algoName_ = lslist.sublist("PQN").get("Subproblem Solver","Spectral Gradient");
73  int sp_maxit = lslist.sublist("PQN").get("Subproblem Iteration Limit", 1000);
74  sp_tol1_ = lslist.sublist("PQN").get("Subproblem Absolute Tolerance", 1e-4);
75  sp_tol2_ = lslist.sublist("PQN").get("Subproblem Relative Tolerance", 1e-2);
76  Real opt_tol = lslist.sublist("Status Test").get("Gradient Tolerance", 1e-8);
77  sp_tol_min_ = static_cast<Real>(1e-2)*opt_tol;
78  verbosity_ = list.sublist("General").get("Output Level", 0);
80 
81  list_.sublist("Status Test").set("Iteration Limit", sp_maxit);
82  list_.sublist("General").set("Output Level", verbosity_>0 ? verbosity_-1 : 0);
83 
84  if ( secant_ == nullPtr ) {
85  secantName_ = list.sublist("General").sublist("Secant").get("Type","Limited-Memory BFGS");
87  secant_ = SecantFactory<Real>(list);
88  }
89  else {
90  secantName_ = list.sublist("General").sublist("Secant").get("User Defined Secant Name",
91  "Unspecified User Defined Secant Method");
92  }
93 }
94 
95 
96 template<typename Real>
98  const Vector<Real> &g,
99  Objective<Real> &sobj,
100  Objective<Real> &nobj,
101  Vector<Real> &dg,
102  std::ostream &outStream) {
103  const Real one(1);
104  Real tol(std::sqrt(ROL_EPSILON<Real>()));
105  // Initialize data
107  // Update approximate gradient and approximate objective function.
108  Real ftol = std::sqrt(ROL_EPSILON<Real>());
109  if (initProx_) {
110  state_->iterateVec->set(x);
111  nobj.prox(x,*state_->iterateVec,one,tol); state_->nprox++;
112  }
113  sobj.update(x,UpdateType::Initial,state_->iter);
114  nobj.update(x,UpdateType::Initial,state_->iter);
115  state_->svalue = sobj.value(x,ftol); state_->nsval++;
116  state_->nvalue = nobj.value(x,ftol); state_->nnval++;
117  state_->value = state_->svalue + state_->nvalue;
118  sobj.gradient(*state_->gradientVec,x,ftol); state_->ngrad++;
119  dg.set(state_->gradientVec->dual());
120  pgstep(*state_->iterateVec,*state_->stepVec,nobj,x,dg,t0_,tol);
121  state_->gnorm = state_->stepVec->norm() / t0_;
122  state_->snorm = ROL_INF<Real>();
123 }
124 
125 template<typename Real>
127  const Vector<Real> &g,
128  Objective<Real> &sobj,
129  Objective<Real> &nobj,
130  std::ostream &outStream ) {
131  const Real half(0.5), one(1);
132  // Initialize trust-region data
133  Ptr<Vector<Real>> s = x.clone(), gp = x.clone(), gold = g.clone(), xs = x.clone();
134  initialize(x,g,sobj,nobj,*gp,outStream);
135  Real strial(0), ntrial(0), ftrial(0), gs(0), Qk(0), rhoTmp(0);
136  Real tol(std::sqrt(ROL_EPSILON<Real>())), gtol(1);
137 
138  Ptr<TypeP::Algorithm<Real>> algo;
139  Ptr<PQNObjective<Real>> qobj = makePtr<PQNObjective<Real>>(secant_,x,g);
140  //Ptr<Problem<Real>> problem = makePtr<Problem<Real>>(qobj,xs);
141 
142  // Output
143  if (verbosity_ > 0) writeOutput(outStream,true);
144 
145  // Compute steepest descent step
146  xs->set(*state_->iterateVec);
147  state_->iterateVec->set(x);
148  while (status_->check(*state_)) {
149  // Compute step
150  qobj->setAnchor(x,*state_->gradientVec);
151  gtol = std::max(sp_tol_min_,std::min(sp_tol1_,sp_tol2_*state_->gnorm));
152  list_.sublist("Status Test").set("Gradient Tolerance",gtol);
153  if (algoName_ == "Line Search") algo = makePtr<TypeP::ProxGradientAlgorithm<Real>>(list_);
154  else if (algoName_ == "iPiano") algo = makePtr<TypeP::iPianoAlgorithm<Real>>(list_);
155  else algo = makePtr<TypeP::SpectralGradientAlgorithm<Real>>(list_);
156  algo->run(*xs,*qobj,nobj,outStream);
157  s->set(*xs); s->axpy(-one,x);
158  spgIter_ = algo->getState()->iter;
159  state_->nprox += staticPtrCast<const TypeP::AlgorithmState<Real>>(algo->getState())->nprox;
160 
161  // Perform backtracking line search
162  state_->searchSize = one;
163  x.set(*state_->iterateVec);
164  x.axpy(state_->searchSize,*s);
165  sobj.update(x,UpdateType::Trial);
166  nobj.update(x,UpdateType::Trial);
167  strial = sobj.value(x,tol);
168  ntrial = nobj.value(x,tol);
169  ftrial = strial + ntrial;
170  ls_nfval_ = 1;
171  gs = state_->gradientVec->apply(*s);
172  Qk = gs + ntrial - state_->nvalue;
173  if (verbosity_ > 1) {
174  outStream << " In TypeP::QuasiNewtonAlgorithm: Line Search" << std::endl;
175  outStream << " Step size: " << state_->searchSize << std::endl;
176  outStream << " Trial objective value: " << ftrial << std::endl;
177  outStream << " Computed reduction: " << state_->value-ftrial << std::endl;
178  outStream << " Dot product of gradient and step: " << gs << std::endl;
179  outStream << " Sufficient decrease bound: " << -Qk*c1_ << std::endl;
180  outStream << " Number of function evaluations: " << ls_nfval_ << std::endl;
181  }
182  while ( ftrial > state_->value + c1_*Qk && ls_nfval_ < maxit_ ) {
183  rhoTmp = -half * Qk / (strial-state_->svalue-state_->searchSize*gs);
184  state_->searchSize = ((sigma1_ <= rhoTmp && rhoTmp <= sigma2_) ? rhoTmp : rhodec_) * state_->searchSize;
185  x.set(*state_->iterateVec);
186  x.axpy(state_->searchSize,*s);
187  sobj.update(x,UpdateType::Trial);
188  nobj.update(x,UpdateType::Trial);
189  strial = sobj.value(x,tol);
190  ntrial = nobj.value(x,tol);
191  ftrial = strial + ntrial;
192  Qk = state_->searchSize * gs + ntrial - state_->nvalue;
193  ls_nfval_++;
194  if (verbosity_ > 1) {
195  outStream << std::endl;
196  outStream << " Step size: " << state_->searchSize << std::endl;
197  outStream << " Trial objective value: " << ftrial << std::endl;
198  outStream << " Computed reduction: " << state_->value-ftrial << std::endl;
199  outStream << " Dot product of gradient and step: " << gs << std::endl;
200  outStream << " Sufficient decrease bound: " << -Qk*c1_ << std::endl;
201  outStream << " Number of function evaluations: " << ls_nfval_ << std::endl;
202  }
203  }
204  state_->nsval += ls_nfval_;
205  state_->nnval += ls_nfval_;
206 
207  // Compute norm of step
208  state_->stepVec->set(*s);
209  state_->stepVec->scale(state_->searchSize);
210  state_->snorm = state_->stepVec->norm();
211 
212  // Update iterate
213  state_->iterateVec->set(x);
214 
215  // Compute new value and gradient
216  state_->iter++;
217  state_->value = ftrial;
218  state_->svalue = strial;
219  state_->nvalue = ntrial;
220  sobj.update(x,UpdateType::Accept,state_->iter);
221  nobj.update(x,UpdateType::Accept,state_->iter);
222  gold->set(*state_->gradientVec);
223  sobj.gradient(*state_->gradientVec,x,tol); state_->ngrad++;
224  gp->set(state_->gradientVec->dual());
225 
226  // Compute projected gradient norm
227  pgstep(*xs,*s,nobj,x,*gp,t0_,tol);
228  state_->gnorm = s->norm() / t0_;
229 
230  // Update secant
231  secant_->updateStorage(x,*state_->gradientVec,*gold,*state_->stepVec,state_->snorm,state_->iter);
232 
233  // Update Output
234  if (verbosity_ > 0) writeOutput(outStream,writeHeader_);
235  }
236  if (verbosity_ > 0) TypeP::Algorithm<Real>::writeExitStatus(outStream);
237 }
238 
239 template<typename Real>
240 void QuasiNewtonAlgorithm<Real>::writeHeader( std::ostream& os ) const {
241  std::ios_base::fmtflags osFlags(os.flags());
242  if (verbosity_ > 1) {
243  os << std::string(114,'-') << std::endl;
244  os << "Line-Search Proximal Quasi-Newton with " << secantName_ << " Hessian approximation";
245  os << " status output definitions" << std::endl << std::endl;
246  os << " iter - Number of iterates (steps taken)" << std::endl;
247  os << " value - Objective function value" << std::endl;
248  os << " gnorm - Norm of the gradient" << std::endl;
249  os << " snorm - Norm of the step (update to optimization vector)" << std::endl;
250  os << " alpha - Line search step length" << std::endl;
251  os << " #sval - Cumulative number of times the smooth objective function was evaluated" << std::endl;
252  os << " #nval - Cumulative number of times the nonsmooth objective function was evaluated" << std::endl;
253  os << " #grad - Cumulative number of times the gradient was computed" << std::endl;
254  os << " #prox - Cumulative number of times the projection was computed" << std::endl;
255  os << " ls_#fval - Number of the times the objective function was evaluated during the line search" << std::endl;
256  os << " sp_iter - Number iterations to compute quasi-Newton step" << std::endl;
257  os << std::string(114,'-') << std::endl;
258  }
259 
260  os << " ";
261  os << std::setw(6) << std::left << "iter";
262  os << std::setw(15) << std::left << "value";
263  os << std::setw(15) << std::left << "gnorm";
264  os << std::setw(15) << std::left << "snorm";
265  os << std::setw(15) << std::left << "alpha";
266  os << std::setw(10) << std::left << "#sval";
267  os << std::setw(10) << std::left << "#nval";
268  os << std::setw(10) << std::left << "#grad";
269  os << std::setw(10) << std::left << "#prox";
270  os << std::setw(10) << std::left << "#ls_fval";
271  os << std::setw(10) << std::left << "sp_iter";
272  os << std::endl;
273  os.flags(osFlags);
274 }
275 
276 template<typename Real>
277 void QuasiNewtonAlgorithm<Real>::writeName( std::ostream& os ) const {
278  std::ios_base::fmtflags osFlags(os.flags());
279  os << std::endl << "Line-Search Proximal Quasi-Newton (Type P)" << std::endl;
280  os.flags(osFlags);
281 }
282 
283 template<typename Real>
284 void QuasiNewtonAlgorithm<Real>::writeOutput( std::ostream& os, bool write_header ) const {
285  std::ios_base::fmtflags osFlags(os.flags());
286  os << std::scientific << std::setprecision(6);
287  if ( state_->iter == 0 ) writeName(os);
288  if ( write_header ) writeHeader(os);
289  if ( state_->iter == 0 ) {
290  os << " ";
291  os << std::setw(6) << std::left << state_->iter;
292  os << std::setw(15) << std::left << state_->value;
293  os << std::setw(15) << std::left << state_->gnorm;
294  os << std::setw(15) << std::left << "---";
295  os << std::setw(15) << std::left << "---";
296  os << std::setw(10) << std::left << state_->nsval;
297  os << std::setw(10) << std::left << state_->nnval;
298  os << std::setw(10) << std::left << state_->ngrad;
299  os << std::setw(10) << std::left << state_->nprox;
300  os << std::setw(10) << std::left << "---";
301  os << std::setw(10) << std::left << "---";
302  os << std::endl;
303  }
304  else {
305  os << " ";
306  os << std::setw(6) << std::left << state_->iter;
307  os << std::setw(15) << std::left << state_->value;
308  os << std::setw(15) << std::left << state_->gnorm;
309  os << std::setw(15) << std::left << state_->snorm;
310  os << std::setw(15) << std::left << state_->searchSize;
311  os << std::setw(10) << std::left << state_->nsval;
312  os << std::setw(10) << std::left << state_->nnval;
313  os << std::setw(10) << std::left << state_->ngrad;
314  os << std::setw(10) << std::left << state_->nprox;
315  os << std::setw(10) << std::left << ls_nfval_;
316  os << std::setw(10) << std::left << spgIter_;
317  os << std::endl;
318  }
319  os.flags(osFlags);
320 }
321 
322 } // namespace TypeP
323 } // namespace ROL
324 
325 #endif
Provides the interface to evaluate objective functions.
void initialize(Vector< Real > &x, const Vector< Real > &g, Objective< Real > &sobj, Objective< Real > &nobj, Vector< Real > &dg, std::ostream &outStream=std::cout)
virtual ROL::Ptr< Vector > clone() const =0
Clone to make a new (uninitialized) vector.
void run(Vector< Real > &x, const Vector< Real > &g, Objective< Real > &sobj, Objective< Real > &nobj, std::ostream &outStream=std::cout) override
Run algorithm on unconstrained problems (Type-U). This general interface supports the use of dual opt...
virtual Real value(const Vector< Real > &x, Real &tol)=0
Compute value.
Real sigma2_
Upper safeguard for quadratic line search (default: 0.9)
virtual void prox(Vector< Real > &Pv, const Vector< Real > &v, Real t, Real &tol)
Real sigma1_
Lower safeguard for quadratic line search (default: 0.1)
Ptr< Secant< Real > > secant_
Secant object (used for quasi-Newton)
QuasiNewtonAlgorithm(ParameterList &list, const Ptr< Secant< Real >> &secant=nullPtr)
ESecant StringToESecant(std::string s)
Definition: ROL_Types.hpp:543
Defines the linear algebra or vector space interface.
Definition: ROL_Vector.hpp:80
virtual void update(const Vector< Real > &x, UpdateType type, int iter=-1)
Update objective function.
virtual void gradient(Vector< Real > &g, const Vector< Real > &x, Real &tol)
Compute gradient.
Provides interface for and implements limited-memory secant operators.
Definition: ROL_Secant.hpp:79
Provides an interface to check status of optimization algorithms.
void writeName(std::ostream &os) const override
Print step name.
const Ptr< CombinedStatusTest< Real > > status_
Real c1_
Sufficient Decrease Parameter (default: 1e-4)
virtual void set(const Vector &x)
Set where .
Definition: ROL_Vector.hpp:209
virtual void writeExitStatus(std::ostream &os) const
void writeHeader(std::ostream &os) const override
Print iterate header.
int maxit_
Maximum number of line search steps (default: 20)
void initialize(const Vector< Real > &x, const Vector< Real > &g)
Real rhodec_
Backtracking rate (default: 0.5)
void writeOutput(std::ostream &os, bool write_header=false) const override
Print iterate status.