ROL
ROL_TypeB_MoreauYosidaAlgorithm_Def.hpp
Go to the documentation of this file.
1 // @HEADER
2 // ************************************************************************
3 //
4 // Rapid Optimization Library (ROL) Package
5 // Copyright (2014) Sandia Corporation
6 //
7 // Under terms of Contract DE-AC04-94AL85000, there is a non-exclusive
8 // license for use of this work by or on behalf of the U.S. Government.
9 //
10 // Redistribution and use in source and binary forms, with or without
11 // modification, are permitted provided that the following conditions are
12 // met:
13 //
14 // 1. Redistributions of source code must retain the above copyright
15 // notice, this list of conditions and the following disclaimer.
16 //
17 // 2. Redistributions in binary form must reproduce the above copyright
18 // notice, this list of conditions and the following disclaimer in the
19 // documentation and/or other materials provided with the distribution.
20 //
21 // 3. Neither the name of the Corporation nor the names of the
22 // contributors may be used to endorse or promote products derived from
23 // this software without specific prior written permission.
24 //
25 // THIS SOFTWARE IS PROVIDED BY SANDIA CORPORATION "AS IS" AND ANY
26 // EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
27 // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
28 // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SANDIA CORPORATION OR THE
29 // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
30 // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
31 // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
32 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
33 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
34 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
35 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
36 //
37 // Questions? Contact lead developers:
38 // Drew Kouri (dpkouri@sandia.gov) and
39 // Denis Ridzal (dridzal@sandia.gov)
40 //
41 // ************************************************************************
42 // @HEADER
43 
44 #ifndef ROL_TYPEB_MOREAUYOSIDAALGORITHM_DEF_HPP
45 #define ROL_TYPEB_MOREAUYOSIDAALGORITHM_DEF_HPP
46 
48 
49 namespace ROL {
50 namespace TypeB {
51 
52 template<typename Real>
53 MoreauYosidaAlgorithm<Real>::MoreauYosidaAlgorithm(ParameterList &list, const Ptr<Secant<Real>> &secant)
54  : TypeB::Algorithm<Real>::Algorithm(), secant_(secant),
55  tau_(10), print_(false), list_(list), subproblemIter_(0) {
56  // Set status test
57  status_->reset();
58  status_->add(makePtr<StatusTest<Real>>(list));
59 
60  // Parse parameters
61  Real ten(10), oem6(1.e-6), oem8(1.e-8), oe8(1e8);
62  ParameterList& steplist = list.sublist("Step").sublist("Moreau-Yosida Penalty");
63  state_->searchSize = steplist.get("Initial Penalty Parameter", ten);
64  maxPenalty_ = steplist.get("Maximum Penalty Parameter", oe8);
65  tau_ = steplist.get("Penalty Parameter Growth Factor", ten);
66  updatePenalty_ = steplist.get("Update Penalty", true);
67  updateMultiplier_ = steplist.get("Update Multiplier", true);
68  print_ = steplist.sublist("Subproblem").get("Print History", false);
69  // Set parameters for step subproblem
70  Real gtol = steplist.sublist("Subproblem").get("Optimality Tolerance", oem8);
71  Real ctol = steplist.sublist("Subproblem").get("Feasibility Tolerance", oem8);
72  int maxit = steplist.sublist("Subproblem").get("Iteration Limit", 1000);
73  bool reltol = steplist.sublist("Subproblem").get("Use Relative Tolerances", true);
74  Real stol = oem6*std::min(gtol,ctol);
75  list_.sublist("Status Test").set("Gradient Tolerance", gtol);
76  list_.sublist("Status Test").set("Constraint Tolerance", ctol);
77  list_.sublist("Status Test").set("Step Tolerance", stol);
78  list_.sublist("Status Test").set("Iteration Limit", maxit);
79  list_.sublist("Status Test").set("Use Relative Tolerances", reltol);
80  // Get step name from parameterlist
81  stepname_ = steplist.sublist("Subproblem").get("Step Type","Trust Region");
82  list_.sublist("Step").set("Type",stepname_);
83 
84  // Output settings
85  verbosity_ = list.sublist("General").get("Output Level", 0);
87  print_ = (verbosity_ > 2 ? true : print_);
88  list_.sublist("General").set("Output Level",(print_ ? verbosity_ : 0));
89 }
90 
91 template<typename Real>
93  const Vector<Real> &g,
96  Vector<Real> &pwa,
97  std::ostream &outStream) {
98  hasEcon_ = true;
99  if (proj_ == nullPtr) {
100  proj_ = makePtr<PolyhedralProjection<Real>>(makePtrFromRef(bnd));
101  hasEcon_ = false;
102  }
103  // Initialize data
105  // Initialize the algorithm state
106  state_->nfval = 0;
107  state_->ngrad = 0;
108  updateState(x,myobj,bnd,pwa,outStream);
109 }
110 
111 
112 template<typename Real>
116  Vector<Real> &pwa,
117  std::ostream &outStream) {
118  const Real one(1);
119  Real zerotol = std::sqrt(ROL_EPSILON<Real>());
120  // Update objective and constraint.
121  if (state_->iter == 0) {
122  myobj.update(x,UpdateType::Initial,state_->iter);
123  }
124  //else {
125  // myobj.update(x,UpdateType::Accept,state_->iter);
126  //}
127  // Compute norm of the gradient of the Lagrangian
128  state_->value = myobj.getObjectiveValue(x, zerotol);
129  myobj.getObjectiveGradient(*state_->gradientVec, x, zerotol);
130  //myobj.gradient(*state_->gradientVec, x, zerotol);
131  //gnorm_ = state_->gradientVec->norm();
132  pwa.set(x);
133  pwa.axpy(-one,state_->gradientVec->dual());
134  proj_->project(pwa,outStream);
135  pwa.axpy(-one,x);
136  gnorm_ = pwa.norm();
137  // Compute constraint violation
138  compViolation_ = myobj.testComplementarity(x);
139  state_->gnorm = std::max(gnorm_,compViolation_);
140  // Update state
141  state_->nfval++;
142  state_->ngrad++;
143 }
144 
145 template<typename Real>
147  const Vector<Real> &g,
148  Objective<Real> &obj,
150  std::ostream &outStream ) {
151  const Real one(1);
152  Ptr<Vector<Real>> pwa = x.clone();
153  // Initialize Moreau-Yosida data
154  MoreauYosidaObjective<Real> myobj(makePtrFromRef(obj),makePtrFromRef(bnd),
155  x,g,state_->searchSize,updateMultiplier_,
156  updatePenalty_);
157  initialize(x,g,myobj,bnd,*pwa,outStream);
158  Ptr<TypeU::Algorithm<Real>> algo;
159 
160  // Output
161  if (verbosity_ > 0) writeOutput(outStream,true);
162 
163  while (status_->check(*state_)) {
164  // Solve augmented Lagrangian subproblem
165  algo = TypeU::AlgorithmFactory<Real>(list_,secant_);
166  if (hasEcon_) algo->run(x,g,myobj,*proj_->getLinearConstraint(),
167  *proj_->getMultiplier(),*proj_->getResidual(),
168  outStream);
169  else algo->run(x,g,myobj,outStream);
170  subproblemIter_ = algo->getState()->iter;
171 
172  // Compute step
173  state_->stepVec->set(x);
174  state_->stepVec->axpy(-one,*state_->iterateVec);
175  state_->snorm = state_->stepVec->norm();
176 
177  // Update iterate and Lagrange multiplier
178  state_->iterateVec->set(x);
179 
180  // Update objective and constraint
181  state_->iter++;
182 
183  // Update state
184  updateState(x,myobj,bnd,*pwa,outStream);
185 
186  // Update multipliers
187  if (updatePenalty_) {
188  state_->searchSize *= tau_;
189  state_->searchSize = std::min(state_->searchSize,maxPenalty_);
190  }
191  myobj.updateMultipliers(state_->searchSize,x);
192 
193  state_->nfval += myobj.getNumberFunctionEvaluations() + algo->getState()->nfval;
194  state_->ngrad += myobj.getNumberGradientEvaluations() + algo->getState()->ngrad;
195 
196  // Update Output
197  if (verbosity_ > 0) writeOutput(outStream,writeHeader_);
198  }
199  if (verbosity_ > 0) TypeB::Algorithm<Real>::writeExitStatus(outStream);
200 }
201 
202 template<typename Real>
203 void MoreauYosidaAlgorithm<Real>::writeHeader( std::ostream& os ) const {
204  std::ios_base::fmtflags osFlags(os.flags());
205  if (verbosity_ > 1) {
206  os << std::string(109,'-') << std::endl;
207  os << "Moreau-Yosida Penalty Solver";
208  os << " status output definitions" << std::endl << std::endl;
209  os << " iter - Number of iterates (steps taken)" << std::endl;
210  os << " fval - Objective function value" << std::endl;
211  os << " gnorm - Norm of the gradient" << std::endl;
212  os << " ifeas - Infeasibility metric" << std::endl;
213  os << " snorm - Norm of the step (update to optimization vector)" << std::endl;
214  os << " penalty - Penalty parameter for bound constraints" << std::endl;
215  os << " #fval - Cumulative number of times the objective function was evaluated" << std::endl;
216  os << " #grad - Cumulative number of times the gradient was computed" << std::endl;
217  os << " subiter - Number of subproblem iterations" << std::endl;
218  os << std::string(109,'-') << std::endl;
219  }
220 
221  os << " ";
222  os << std::setw(6) << std::left << "iter";
223  os << std::setw(15) << std::left << "fval";
224  os << std::setw(15) << std::left << "gnorm";
225  os << std::setw(15) << std::left << "ifeas";
226  os << std::setw(15) << std::left << "snorm";
227  os << std::setw(10) << std::left << "penalty";
228  os << std::setw(8) << std::left << "#fval";
229  os << std::setw(8) << std::left << "#grad";
230  os << std::setw(8) << std::left << "subIter";
231  os << std::endl;
232  os.flags(osFlags);
233 }
234 
235 template<typename Real>
236 void MoreauYosidaAlgorithm<Real>::writeName( std::ostream& os ) const {
237  std::ios_base::fmtflags osFlags(os.flags());
238  os << std::endl << " Moreau-Yosida Penalty Solver";
239  os << std::endl;
240  os.flags(osFlags);
241 }
242 
243 template<typename Real>
244 void MoreauYosidaAlgorithm<Real>::writeOutput( std::ostream& os, bool write_header ) const {
245  std::ios_base::fmtflags osFlags(os.flags());
246  os << std::scientific << std::setprecision(6);
247  if ( state_->iter == 0 ) writeName(os);
248  if ( write_header ) writeHeader(os);
249  if ( state_->iter == 0 ) {
250  os << " ";
251  os << std::setw(6) << std::left << state_->iter;
252  os << std::setw(15) << std::left << state_->value;
253  os << std::setw(15) << std::left << gnorm_;
254  os << std::setw(15) << std::left << compViolation_;
255  os << std::setw(15) << std::left << "---";
256  os << std::scientific << std::setprecision(2);
257  os << std::setw(10) << std::left << state_->searchSize;
258  os << std::scientific << std::setprecision(6);
259  os << std::setw(8) << std::left << state_->nfval;
260  os << std::setw(8) << std::left << state_->ngrad;
261  os << std::setw(8) << std::left << "---";
262  os << std::endl;
263  }
264  else {
265  os << " ";
266  os << std::setw(6) << std::left << state_->iter;
267  os << std::setw(15) << std::left << state_->value;
268  os << std::setw(15) << std::left << gnorm_;
269  os << std::setw(15) << std::left << compViolation_;
270  os << std::setw(15) << std::left << state_->snorm;
271  os << std::scientific << std::setprecision(2);
272  os << std::setw(10) << std::left << state_->searchSize;
273  os << std::scientific << std::setprecision(6);
274  os << std::setw(8) << std::left << state_->nfval;
275  os << std::setw(8) << std::left << state_->ngrad;
276  os << std::setw(8) << std::left << subproblemIter_;
277  os << std::endl;
278  }
279  os.flags(osFlags);
280 }
281 
282 } // namespace TypeB
283 } // namespace ROL
284 
285 #endif
Provides the interface to evaluate objective functions.
void writeHeader(std::ostream &os) const override
Print iterate header.
virtual ROL::Ptr< Vector > clone() const =0
Clone to make a new (uninitialized) vector.
void initialize(Vector< Real > &x, const Vector< Real > &g, MoreauYosidaObjective< Real > &myobj, BoundConstraint< Real > &bnd, Vector< Real > &pwa, std::ostream &outStream=std::cout)
void writeOutput(std::ostream &os, const bool write_header=false) const override
Print iterate status.
virtual void axpy(const Real alpha, const Vector &x)
Compute where .
Definition: ROL_Vector.hpp:153
Real getObjectiveValue(const Vector< Real > &x, Real &tol)
virtual void writeExitStatus(std::ostream &os) const
void writeName(std::ostream &os) const override
Print step name.
Defines the linear algebra or vector space interface.
Definition: ROL_Vector.hpp:80
MoreauYosidaAlgorithm(ParameterList &list, const Ptr< Secant< Real >> &secant=nullPtr)
void updateMultipliers(Real mu, const Vector< Real > &x)
void update(const Vector< Real > &x, UpdateType type, int iter=-1)
Update Moreau-Yosida penalty function.
Provides an interface to run bound constrained optimization algorithms.
Provides the interface to evaluate the Moreau-Yosida penalty function.
const Ptr< AlgorithmState< Real > > state_
Provides interface for and implements limited-memory secant operators.
Definition: ROL_Secant.hpp:79
Provides an interface to check status of optimization algorithms.
Provides the interface to apply upper and lower bound constraints.
void updateState(const Vector< Real > &x, MoreauYosidaObjective< Real > &myobj, BoundConstraint< Real > &bnd, Vector< Real > &pwa, std::ostream &outStream=std::cout)
void initialize(const Vector< Real > &x, const Vector< Real > &g)
void run(Vector< Real > &x, const Vector< Real > &g, Objective< Real > &obj, BoundConstraint< Real > &bnd, std::ostream &outStream=std::cout) override
Run algorithm on bound constrained problems (Type-B). This general interface supports the use of dual...
virtual void set(const Vector &x)
Set where .
Definition: ROL_Vector.hpp:209
void getObjectiveGradient(Vector< Real > &g, const Vector< Real > &x, Real &tol)
virtual Real norm() const =0
Returns where .
Real testComplementarity(const Vector< Real > &x)
const Ptr< CombinedStatusTest< Real > > status_