ROL
ROL_TypeP_iPianoAlgorithm_Def.hpp
Go to the documentation of this file.
1 // @HEADER
2 // ************************************************************************
3 //
4 // Rapid Optimization Library (ROL) Package
5 // Copyright (2014) Sandia Corporation
6 //
7 // Under terms of Contract DE-AC04-94AL85000, there is a non-exclusive
8 // license for use of this work by or on behalf of the U.S. Government.
9 //
10 // Redistribution and use in source and binary forms, with or without
11 // modification, are permitted provided that the following conditions are
12 // met:
13 //
14 // 1. Redistributions of source code must retain the above copyright
15 // notice, this list of conditions and the following disclaimer.
16 //
17 // 2. Redistributions in binary form must reproduce the above copyright
18 // notice, this list of conditions and the following disclaimer in the
19 // documentation and/or other materials provided with the distribution.
20 //
21 // 3. Neither the name of the Corporation nor the names of the
22 // contributors may be used to endorse or promote products derived from
23 // this software without specific prior written permission.
24 //
25 // THIS SOFTWARE IS PROVIDED BY SANDIA CORPORATION "AS IS" AND ANY
26 // EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
27 // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
28 // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SANDIA CORPORATION OR THE
29 // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
30 // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
31 // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
32 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
33 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
34 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
35 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
36 //
37 // Questions? Contact lead developers:
38 // Drew Kouri (dpkouri@sandia.gov) and
39 // Denis Ridzal (dridzal@sandia.gov)
40 //
41 // ************************************************************************
42 // @HEADER
43 
44 #ifndef ROL_TYPEP_IPIANOALGORITHM_DEF_HPP
45 #define ROL_TYPEP_IPIANOALGORITHM_DEF_HPP
46 
47 namespace ROL {
48 namespace TypeP {
49 
50 template<typename Real>
52  // Set status test
53  status_->reset();
54  status_->add(makePtr<StatusTest<Real>>(list));
55 
56  // Parse parameter list
57  ParameterList &lslist = list.sublist("Step").sublist("iPiano");
58  t0_ = list.sublist("Status Test").get("Gradient Scale", 1.0);
59  maxit_ = lslist.get("Reduction Iteration Limit", 20);
60  useConstBeta_ = lslist.get("Use Constant Beta", false);
61  beta_ = lslist.get("Momentum Parameter", 0.25);
62  rhodec_ = lslist.get("Backtracking Rate", 0.5);
63  rhoinc_ = lslist.get("Increase Rate", 2.0);
64  c1_ = lslist.get("Upper Interpolation Factor", 1e-5);
65  c2_ = lslist.get("Lower Interpolation Factor", 1e-6);
66  L_ = lslist.get("Initial Lipschitz Constant Estimate", 0.5/t0_);
67  initProx_ = lslist.get("Apply Prox to Initial Guess", false);
68  verbosity_ = list.sublist("General").get("Output Level", 0);
69  writeHeader_ = verbosity_ > 2;
70 }
71 
72 template<typename Real>
74  const Vector<Real> &g,
75  Objective<Real> &sobj,
76  Objective<Real> &nobj,
77  Vector<Real> &px,
78  Vector<Real> &dg,
79  std::ostream &outStream) {
80  Real ftol = std::sqrt(ROL_EPSILON<Real>());
81  // Initialize data
83  // Update approximate gradient and approximate objective function.
84  if (initProx_) {
85  nobj.prox(*state_->iterateVec,x,t0_,ftol); state_->nprox++;
86  x.set(*state_->iterateVec);
87  }
88  sobj.update(x,UpdateType::Initial,state_->iter);
89  state_->svalue = sobj.value(x,ftol); state_->nsval++;
90  nobj.update(x,UpdateType::Initial,state_->iter);
91  state_->nvalue = nobj.value(x,ftol); state_->nnval++;
92  state_->value = state_->svalue + state_->nvalue;
93  sobj.gradient(*state_->gradientVec,x,ftol); state_->ngrad++;
94  dg.set(state_->gradientVec->dual());
95  pgstep(*state_->iterateVec, *state_->stepVec, nobj, x, dg, t0_, ftol);
96  state_->snorm = state_->stepVec->norm();
97  state_->gnorm = state_->snorm / t0_;
98 }
99 
100 template<typename Real>
102  const Vector<Real> &g,
103  Objective<Real> &sobj,
104  Objective<Real> &nobj,
105  std::ostream &outStream ) {
106  const Real half(0.5), one(1), two(2);
107  // Initialize trust-region data
108  Ptr<Vector<Real>> sP = x.clone(), xP = x.clone(), dg = x.clone(), xold = x.clone();
109  initialize(x,g,sobj,nobj,*sP,*dg,outStream);
110  Real strial(0), strialP(0), snormP(0), LP(0), alphaP(0), betaP(0), gs(0), b(0);
111  Real tol(std::sqrt(ROL_EPSILON<Real>()));
112  bool accept(true);
113 
114  xold->set(x);
115 
116  // Output
117  if (verbosity_ > 0) writeOutput(outStream, true);
118 
119  // Iterate spectral projected gradient
120  while (status_->check(*state_)) {
121  // Compute parameters alpha and beta
122  if (!useConstBeta_) {
123  b = (c1_ + half * L_) / (c2_ + half * L_);
124  beta_ = (b - one) / (b - half);
125  }
126  alpha_ = two * (1 - beta_) / (two * c2_ + L_);
127  // Compute inertial step
128  state_->stepVec->set(x);
129  state_->stepVec->axpy(-alpha_, *dg);
130  state_->stepVec->axpy(beta_, x);
131  state_->stepVec->axpy(-beta_, *xold);
132  nobj.prox(*state_->iterateVec, *state_->stepVec, alpha_, tol); state_->nprox++;
133  state_->stepVec->set(*state_->iterateVec);
134  state_->stepVec->axpy(-one,x);
135  state_->snorm = state_->stepVec->norm();
136  // Compute smooth objective value
137  sobj.update(*state_->iterateVec,UpdateType::Trial);
138  nobj.update(*state_->iterateVec,UpdateType::Trial);
139  strial = sobj.value(*state_->iterateVec,tol); state_->nsval++;
140  gs = state_->gradientVec->apply(*state_->stepVec);
141  // Estimate Lipschitz constant of sobj
142  if (strial <= state_->svalue + gs + half * L_ * state_->snorm * state_->snorm) {
143  accept = true;
144  for (int i = 0; i < maxit_; ++i) {
145  // Store previously computed information
146  sobj.update(*state_->iterateVec,UpdateType::Accept);
147  nobj.update(*state_->iterateVec,UpdateType::Accept);
148  LP = L_;
149  alphaP = alpha_;
150  betaP = beta_;
151  strialP = strial;
152  snormP = state_->snorm;
153  xP->set(*state_->iterateVec);
154  sP->set(*state_->stepVec);
155  // Update alpha and beta with new Lipschitz constant estimate
156  L_ /= rhoinc_;
157  if (!useConstBeta_) {
158  b = (c1_ + half * L_) / (c2_ + half * L_);
159  beta_ = (b - one) / (b - half);
160  }
161  alpha_ = two * (one - beta_) / (two * c2_ + L_);
162  // Compute updated inertial step
163  state_->stepVec->set(x);
164  state_->stepVec->axpy(-alpha_, *dg);
165  state_->stepVec->axpy(beta_, x);
166  state_->stepVec->axpy(-beta_, *xold);
167  nobj.prox(*state_->iterateVec, *state_->stepVec, alpha_, tol); state_->nprox++;
168  state_->stepVec->set(*state_->iterateVec);
169  state_->stepVec->axpy(-one,x);
170  state_->snorm = state_->stepVec->norm();
171  // Compute smooth objective value
172  sobj.update(*state_->iterateVec,UpdateType::Trial);
173  strial = sobj.value(*state_->iterateVec,tol); state_->nsval++;
174  gs = state_->gradientVec->apply(*state_->stepVec);
175  if (strial > state_->svalue + gs + half * L_ * state_->snorm * state_->snorm) {
176  accept = false;
177  L_ = LP;
178  alpha_ = alphaP;
179  beta_ = betaP;
180  strial = strialP;
181  state_->snorm = snormP;
182  state_->iterateVec->set(*xP);
183  state_->stepVec->set(*sP);
184  break;
185  }
186  }
187  if (accept) {
188  sobj.update(*state_->iterateVec,UpdateType::Accept);
189  nobj.update(*state_->iterateVec,UpdateType::Accept);
190  }
191  else {
192  sobj.update(*state_->iterateVec,UpdateType::Revert);
193  nobj.update(*state_->iterateVec,UpdateType::Revert);
194  }
195  }
196  else {
197  while (strial > state_->svalue + gs + half * L_ * state_->snorm * state_->snorm) {
198  // Update alpha and beta with new Lipschitz constant estimate
199  L_ /= rhodec_;
200  if (!useConstBeta_) {
201  b = (c1_ + half * L_) / (c2_ + half * L_);
202  beta_ = (b - one) / (b - half);
203  }
204  alpha_ = two * (one - beta_) / (two * c2_ + L_);
205  // Compute updated inertial step
206  state_->stepVec->set(x);
207  state_->stepVec->axpy(-alpha_, *dg);
208  state_->stepVec->axpy(beta_, x);
209  state_->stepVec->axpy(-beta_, *xold);
210  nobj.prox(*state_->iterateVec, *state_->stepVec, alpha_, tol); state_->nprox++;
211  state_->stepVec->set(*state_->iterateVec);
212  state_->stepVec->axpy(-one,x);
213  state_->snorm = state_->stepVec->norm();
214  // Compute smooth objective value
215  sobj.update(*state_->iterateVec,UpdateType::Trial);
216  strial = sobj.value(*state_->iterateVec,tol); state_->nsval++;
217  gs = state_->gradientVec->apply(*state_->stepVec);
218  }
219  sobj.update(*state_->iterateVec,UpdateType::Accept);
220  nobj.update(*state_->iterateVec,UpdateType::Accept);
221  }
222  // Update iteration
223  state_->iter++;
224  xold->set(x);
225  x.set(*state_->iterateVec);
226  state_->svalue = strial;
227  state_->nvalue = nobj.value(x,tol); state_->nnval++;
228  state_->value = state_->svalue + state_->nvalue;
229  sobj.gradient(*state_->gradientVec,x,tol); state_->ngrad++;
230  dg->set(state_->gradientVec->dual());
231  // Compute proximal gradient for status check
232  pgstep(*xP,*sP,nobj,x,*dg,t0_,tol);
233  state_->gnorm = sP->norm() / t0_;
234 
235  // Update Output
236  if (verbosity_ > 0) writeOutput(outStream,writeHeader_);
237  }
238  if (verbosity_ > 0) TypeP::Algorithm<Real>::writeExitStatus(outStream);
239 }
240 
241 template<typename Real>
242 void iPianoAlgorithm<Real>::writeHeader( std::ostream& os ) const {
243  std::ios_base::fmtflags osFlags(os.flags());
244  if (verbosity_ > 1) {
245  os << std::string(109,'-') << std::endl;
246  os << "iPiano: Inertial proximal algorithm for nonconvex optimization";
247  os << " status output definitions" << std::endl << std::endl;
248  os << " iter - Number of iterates (steps taken)" << std::endl;
249  os << " value - Objective function value" << std::endl;
250  os << " gnorm - Norm of the proximal gradient with parameter lambda" << std::endl;
251  os << " snorm - Norm of the step (update to optimization vector)" << std::endl;
252  os << " alpha - Inertial gradient parameter" << std::endl;
253  os << " beta - Inertial step parameter" << std::endl;
254  os << " L - Lipschitz constant estimate" << std::endl;
255  os << " #sval - Cumulative number of times the smooth objective function was evaluated" << std::endl;
256  os << " #nval - Cumulative number of times the nonsmooth objective function was evaluated" << std::endl;
257  os << " #grad - Cumulative number of times the gradient was computed" << std::endl;
258  os << " #prox - Cumulative number of times the proximal operator was computed" << std::endl;
259  os << std::string(109,'-') << std::endl;
260  }
261 
262  os << " ";
263  os << std::setw(6) << std::left << "iter";
264  os << std::setw(15) << std::left << "value";
265  os << std::setw(15) << std::left << "gnorm";
266  os << std::setw(15) << std::left << "snorm";
267  os << std::setw(15) << std::left << "alpha";
268  os << std::setw(15) << std::left << "beta";
269  os << std::setw(15) << std::left << "L";
270  os << std::setw(10) << std::left << "#sval";
271  os << std::setw(10) << std::left << "#nval";
272  os << std::setw(10) << std::left << "#grad";
273  os << std::setw(10) << std::left << "#nprox";
274  os << std::endl;
275  os.flags(osFlags);
276 }
277 
278 template<typename Real>
279 void iPianoAlgorithm<Real>::writeName( std::ostream& os ) const {
280  std::ios_base::fmtflags osFlags(os.flags());
281  os << std::endl << "iPiano: Inertial Proximal Algorithm for Nonconvex Optimization (Type P)" << std::endl;
282  os.flags(osFlags);
283 }
284 
285 template<typename Real>
286 void iPianoAlgorithm<Real>::writeOutput( std::ostream& os, bool write_header ) const {
287  std::ios_base::fmtflags osFlags(os.flags());
288  os << std::scientific << std::setprecision(6);
289  if ( state_->iter == 0 ) writeName(os);
290  if ( write_header ) writeHeader(os);
291  if ( state_->iter == 0 ) {
292  os << " ";
293  os << std::setw(6) << std::left << state_->iter;
294  os << std::setw(15) << std::left << state_->value;
295  os << std::setw(15) << std::left << state_->gnorm;
296  os << std::setw(15) << std::left << "---";
297  os << std::setw(15) << std::left << "---";
298  os << std::setw(15) << std::left << "---";
299  os << std::setw(15) << std::left << L_;
300  os << std::setw(10) << std::left << state_->nsval;
301  os << std::setw(10) << std::left << state_->nnval;
302  os << std::setw(10) << std::left << state_->ngrad;
303  os << std::setw(10) << std::left << state_->nprox;
304  os << std::endl;
305  }
306  else {
307  os << " ";
308  os << std::setw(6) << std::left << state_->iter;
309  os << std::setw(15) << std::left << state_->value;
310  os << std::setw(15) << std::left << state_->gnorm;
311  os << std::setw(15) << std::left << state_->snorm;
312  os << std::setw(15) << std::left << alpha_;
313  os << std::setw(15) << std::left << beta_;
314  os << std::setw(15) << std::left << L_;
315  os << std::setw(10) << std::left << state_->nsval;
316  os << std::setw(10) << std::left << state_->nnval;
317  os << std::setw(10) << std::left << state_->ngrad;
318  os << std::setw(10) << std::left << state_->nprox;
319  os << std::endl;
320  }
321  os.flags(osFlags);
322 }
323 
324 } // namespace TypeP
325 } // namespace ROL
326 
327 #endif
Provides the interface to evaluate objective functions.
void initialize(Vector< Real > &x, const Vector< Real > &g, Objective< Real > &sobj, Objective< Real > &nobj, Vector< Real > &px, Vector< Real > &dg, std::ostream &outStream=std::cout)
virtual ROL::Ptr< Vector > clone() const =0
Clone to make a new (uninitialized) vector.
void writeName(std::ostream &os) const override
Print step name.
virtual Real value(const Vector< Real > &x, Real &tol)=0
Compute value.
virtual void prox(Vector< Real > &Pv, const Vector< Real > &v, Real t, Real &tol)
Defines the linear algebra or vector space interface.
Definition: ROL_Vector.hpp:80
virtual void update(const Vector< Real > &x, UpdateType type, int iter=-1)
Update objective function.
virtual void gradient(Vector< Real > &g, const Vector< Real > &x, Real &tol)
Compute gradient.
Provides an interface to check status of optimization algorithms.
void writeOutput(std::ostream &os, bool write_header=false) const override
Print iterate status.
void run(Vector< Real > &x, const Vector< Real > &g, Objective< Real > &sobj, Objective< Real > &nobj, std::ostream &outStream=std::cout) override
Run algorithm on unconstrained problems (Type-U). This general interface supports the use of dual opt...
virtual void set(const Vector &x)
Set where .
Definition: ROL_Vector.hpp:209
virtual void writeExitStatus(std::ostream &os) const
void initialize(const Vector< Real > &x, const Vector< Real > &g)
void writeHeader(std::ostream &os) const override
Print iterate header.