ROL
ROL_TypeG_MoreauYosidaAlgorithm_Def.hpp
Go to the documentation of this file.
1 // @HEADER
2 // ************************************************************************
3 //
4 // Rapid Optimization Library (ROL) Package
5 // Copyright (2014) Sandia Corporation
6 //
7 // Under terms of Contract DE-AC04-94AL85000, there is a non-exclusive
8 // license for use of this work by or on behalf of the U.S. Government.
9 //
10 // Redistribution and use in source and binary forms, with or without
11 // modification, are permitted provided that the following conditions are
12 // met:
13 //
14 // 1. Redistributions of source code must retain the above copyright
15 // notice, this list of conditions and the following disclaimer.
16 //
17 // 2. Redistributions in binary form must reproduce the above copyright
18 // notice, this list of conditions and the following disclaimer in the
19 // documentation and/or other materials provided with the distribution.
20 //
21 // 3. Neither the name of the Corporation nor the names of the
22 // contributors may be used to endorse or promote products derived from
23 // this software without specific prior written permission.
24 //
25 // THIS SOFTWARE IS PROVIDED BY SANDIA CORPORATION "AS IS" AND ANY
26 // EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
27 // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
28 // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SANDIA CORPORATION OR THE
29 // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
30 // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
31 // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
32 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
33 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
34 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
35 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
36 //
37 // Questions? Contact lead developers:
38 // Drew Kouri (dpkouri@sandia.gov) and
39 // Denis Ridzal (dridzal@sandia.gov)
40 //
41 // ************************************************************************
42 // @HEADER
43 
44 #ifndef ROL_TYPEG_MOREAUYOSIDAALGORITHM_DEF_H
45 #define ROL_TYPEG_MOREAUYOSIDAALGORITHM_DEF_H
46 
48 
49 namespace ROL {
50 namespace TypeG {
51 
52 template<typename Real>
53 MoreauYosidaAlgorithm<Real>::MoreauYosidaAlgorithm(ParameterList &list, const Ptr<Secant<Real>> &secant)
54  : TypeG::Algorithm<Real>::Algorithm(), secant_(secant),
55  tau_(10), print_(false), list_(list), subproblemIter_(0) {
56  // Set status test
57  status_->reset();
58  status_->add(makePtr<ConstraintStatusTest<Real>>(list));
59 
60  // Parse parameters
61  Real ten(10), oem6(1.e-6), oem8(1.e-8), oe8(1e8);
62  ParameterList& steplist = list.sublist("Step").sublist("Moreau-Yosida Penalty");
63  state_->searchSize = steplist.get("Initial Penalty Parameter", ten);
64  maxPenalty_ = steplist.get("Maximum Penalty Parameter", oe8);
65  tau_ = steplist.get("Penalty Parameter Growth Factor", ten);
66  updatePenalty_ = steplist.get("Update Penalty", true);
67  updateMultiplier_ = steplist.get("Update Multiplier", true);
68  print_ = steplist.sublist("Subproblem").get("Print History", false);
69  // Set parameters for step subproblem
70  Real gtol = steplist.sublist("Subproblem").get("Optimality Tolerance", oem8);
71  Real ctol = steplist.sublist("Subproblem").get("Feasibility Tolerance", oem8);
72  int maxit = steplist.sublist("Subproblem").get("Iteration Limit", 1000);
73  bool reltol = steplist.sublist("Subproblem").get("Use Relative Tolerances", true);
74  Real stol = oem6*std::min(gtol,ctol);
75  list_.sublist("Status Test").set("Gradient Tolerance", gtol);
76  list_.sublist("Status Test").set("Constraint Tolerance", ctol);
77  list_.sublist("Status Test").set("Step Tolerance", stol);
78  list_.sublist("Status Test").set("Iteration Limit", maxit);
79  list_.sublist("Status Test").set("Use Relative Tolerances", reltol);
80  // Get step name from parameterlist
81  stepname_ = steplist.sublist("Subproblem").get("Step Type","Augmented Lagrangian");
82  list_.sublist("Step").set("Type",stepname_);
83 
84  // Output settings
85  verbosity_ = list.sublist("General").get("Output Level", 0);
87  print_ = (verbosity_ > 2 ? true : print_);
88  list_.sublist("General").set("Output Level",(print_ ? verbosity_ : 0));
89 }
90 
91 template<typename Real>
93  const Vector<Real> &g,
94  const Vector<Real> &l,
95  const Vector<Real> &c,
98  Constraint<Real> &con,
99  Vector<Real> &pwa,
100  Vector<Real> &dwa,
101  std::ostream &outStream) {
102  hasPolyProj_ = true;
103  if (proj_ == nullPtr) {
104  proj_ = makePtr<PolyhedralProjection<Real>>(makePtrFromRef(bnd));
105  hasPolyProj_ = false;
106  }
107  proj_->project(x,outStream);
108  // Initialize data
110  // Initialize the algorithm state
111  state_->nfval = 0;
112  state_->ngrad = 0;
113  state_->ncval = 0;
114  updateState(x,l,myobj,bnd,con,pwa,dwa,outStream);
115 }
116 
117 
118 template<typename Real>
120  const Vector<Real> &l,
123  Constraint<Real> &con,
124  Vector<Real> &pwa,
125  Vector<Real> &dwa,
126  std::ostream &outStream) {
127  const Real one(1);
128  Real zerotol = std::sqrt(ROL_EPSILON<Real>());
129  // Update objective and constraint
130  if (state_->iter == 0) {
131  myobj.update(x,UpdateType::Initial,state_->iter);
132  con.update(x,UpdateType::Initial,state_->iter);
133  }
134  //else {
135  // myobj.update(x,UpdateType::Accept,state_->iter);
136  // con.update(x,UpdateType::Accept,state_->iter);
137  //}
138  // Compute norm of the gradient of the Lagrangian
139  state_->value = myobj.getObjectiveValue(x, zerotol);
140  myobj.getObjectiveGradient(*state_->gradientVec, x, zerotol);
141  //myobj.gradient(*state_->gradientVec, x, zerotol);
142  con.applyAdjointJacobian(dwa, l, x, zerotol);
143  state_->gradientVec->plus(dwa);
144  //gnorm_ = state_->gradientVec->norm();
145  pwa.set(x);
146  pwa.axpy(-one,state_->gradientVec->dual());
147  proj_->project(pwa,outStream);
148  pwa.axpy(-one,x);
149  gnorm_ = pwa.norm();
150  // Compute constraint violation
151  con.value(*state_->constraintVec, x, zerotol);
152  state_->cnorm = state_->constraintVec->norm();
153  compViolation_ = myobj.testComplementarity(x);
154  state_->gnorm = std::max(gnorm_,compViolation_);
155  // Update state
156  state_->nfval++;
157  state_->ngrad++;
158  state_->ncval++;
159 }
160 
161 template<typename Real>
163  const Vector<Real> &g,
164  Objective<Real> &obj,
166  Constraint<Real> &econ,
167  Vector<Real> &emul,
168  const Vector<Real> &eres,
169  std::ostream &outStream ) {
170  const Real one(1);
171  Ptr<Vector<Real>> pwa = x.clone(), dwa = g.clone();
172  // Initialize Moreau-Yosida data
173  MoreauYosidaObjective<Real> myobj(makePtrFromRef(obj),makePtrFromRef(bnd),
174  x,g,state_->searchSize,updateMultiplier_,
175  updatePenalty_);
176  initialize(x,g,emul,eres,myobj,bnd,econ,*pwa,*dwa,outStream);
177  Ptr<TypeE::Algorithm<Real>> algo;
178 
179  // Output
180  if (verbosity_ > 0) writeOutput(outStream,true);
181 
182  while (status_->check(*state_)) {
183  // Solve augmented Lagrangian subproblem
184  algo = TypeE::AlgorithmFactory<Real>(list_,secant_);
185  emul.zero();
186  if (hasPolyProj_) algo->run(x,g,myobj,econ,emul,eres,
187  *proj_->getLinearConstraint(),
188  *proj_->getMultiplier(),
189  *proj_->getResidual(),outStream);
190  else algo->run(x,g,myobj,econ,emul,eres,outStream);
191  subproblemIter_ = algo->getState()->iter;
192  state_->nfval += algo->getState()->nfval;
193  state_->ngrad += algo->getState()->ngrad;
194  state_->ncval += algo->getState()->ncval;
195 
196  // Compute step
197  state_->stepVec->set(x);
198  state_->stepVec->axpy(-one,*state_->iterateVec);
199  state_->snorm = state_->stepVec->norm();
200  state_->lagmultVec->axpy(-one,emul);
201  state_->snorm += state_->lagmultVec->norm();
202 
203  // Update iterate and Lagrange multiplier
204  state_->iterateVec->set(x);
205  state_->lagmultVec->set(emul);
206 
207  // Update objective and constraint
208  state_->iter++;
209 
210  // Update state
211  updateState(x,emul,myobj,bnd,econ,*pwa,*dwa);
212 
213  // Update multipliers
214  if (updatePenalty_)
215  state_->searchSize = std::min(tau_*state_->searchSize,maxPenalty_);
216  myobj.updateMultipliers(state_->searchSize,x);
217 
218  // Update Output
219  if (verbosity_ > 0) writeOutput(outStream,printHeader_);
220  }
221  if (verbosity_ > 0) TypeG::Algorithm<Real>::writeExitStatus(outStream);
222 }
223 
224 template<typename Real>
225 void MoreauYosidaAlgorithm<Real>::writeHeader( std::ostream& os ) const {
226  std::ios_base::fmtflags osFlags(os.flags());
227  if (verbosity_ > 1) {
228  os << std::string(109,'-') << std::endl;
229  os << "Moreau-Yosida Penalty Solver";
230  os << " status output definitions" << std::endl << std::endl;
231  os << " iter - Number of iterates (steps taken)" << std::endl;
232  os << " fval - Objective function value" << std::endl;
233  os << " cnorm - Norm of the constraint" << std::endl;
234  os << " gLnorm - Norm of the gradient of the Lagrangian" << std::endl;
235  os << " ifeas - Infeasibility metric" << std::endl;
236  os << " snorm - Norm of the step (update to optimization vector)" << std::endl;
237  os << " penalty - Penalty parameter for bound constraints" << std::endl;
238  os << " #fval - Cumulative number of times the objective function was evaluated" << std::endl;
239  os << " #grad - Cumulative number of times the gradient was computed" << std::endl;
240  os << " #cval - Cumulative number of times the constraint was evaluated" << std::endl;
241  os << " subiter - Number of subproblem iterations" << std::endl;
242  os << std::string(109,'-') << std::endl;
243  }
244 
245  os << " ";
246  os << std::setw(6) << std::left << "iter";
247  os << std::setw(15) << std::left << "fval";
248  os << std::setw(15) << std::left << "cnorm";
249  os << std::setw(15) << std::left << "gLnorm";
250  os << std::setw(15) << std::left << "ifeas";
251  os << std::setw(15) << std::left << "snorm";
252  os << std::setw(10) << std::left << "penalty";
253  os << std::setw(8) << std::left << "#fval";
254  os << std::setw(8) << std::left << "#grad";
255  os << std::setw(8) << std::left << "#cval";
256  os << std::setw(8) << std::left << "subIter";
257  os << std::endl;
258  os.flags(osFlags);
259 }
260 
261 template<typename Real>
262 void MoreauYosidaAlgorithm<Real>::writeName( std::ostream& os ) const {
263  std::ios_base::fmtflags osFlags(os.flags());
264  os << std::endl << "Moreau-Yosida Penalty Solver (Type G, General Constraints)";
265  os << std::endl;
266  os << "Subproblem Solver: " << stepname_ << std::endl;
267  os.flags(osFlags);
268 }
269 
270 template<typename Real>
271 void MoreauYosidaAlgorithm<Real>::writeOutput( std::ostream& os, const bool print_header ) const {
272  std::ios_base::fmtflags osFlags(os.flags());
273  os << std::scientific << std::setprecision(6);
274  if ( state_->iter == 0 ) writeName(os);
275  if ( print_header ) writeHeader(os);
276  if ( state_->iter == 0 ) {
277  os << " ";
278  os << std::setw(6) << std::left << state_->iter;
279  os << std::setw(15) << std::left << state_->value;
280  os << std::setw(15) << std::left << state_->cnorm;
281  os << std::setw(15) << std::left << gnorm_;
282  os << std::setw(15) << std::left << compViolation_;
283  os << std::setw(15) << std::left << "---";
284  os << std::scientific << std::setprecision(2);
285  os << std::setw(10) << std::left << state_->searchSize;
286  os << std::setw(8) << std::left << state_->nfval;
287  os << std::setw(8) << std::left << state_->ngrad;
288  os << std::setw(8) << std::left << state_->ncval;
289  os << std::setw(8) << std::left << "---";
290  os << std::endl;
291  }
292  else {
293  os << " ";
294  os << std::setw(6) << std::left << state_->iter;
295  os << std::setw(15) << std::left << state_->value;
296  os << std::setw(15) << std::left << state_->cnorm;
297  os << std::setw(15) << std::left << gnorm_;
298  os << std::setw(15) << std::left << compViolation_;
299  os << std::setw(15) << std::left << state_->snorm;
300  os << std::scientific << std::setprecision(2);
301  os << std::setw(10) << std::left << state_->searchSize;
302  os << std::scientific << std::setprecision(6);
303  os << std::setw(8) << std::left << state_->nfval;
304  os << std::setw(8) << std::left << state_->ngrad;
305  os << std::setw(8) << std::left << state_->ncval;
306  os << std::setw(8) << std::left << subproblemIter_;
307  os << std::endl;
308  }
309  os.flags(osFlags);
310 }
311 
312 } // namespace TypeG
313 } // namespace ROL
314 
315 #endif
void updateState(const Vector< Real > &x, const Vector< Real > &l, MoreauYosidaObjective< Real > &myobj, BoundConstraint< Real > &bnd, Constraint< Real > &con, Vector< Real > &pwa, Vector< Real > &dwa, std::ostream &outStream=std::cout)
Provides the interface to evaluate objective functions.
virtual ROL::Ptr< Vector > clone() const =0
Clone to make a new (uninitialized) vector.
virtual void update(const Vector< Real > &x, UpdateType type, int iter=-1)
Update constraint function.
virtual void axpy(const Real alpha, const Vector &x)
Compute where .
Definition: ROL_Vector.hpp:153
Real getObjectiveValue(const Vector< Real > &x, Real &tol)
void writeHeader(std::ostream &os) const override
Print iterate header.
virtual void writeExitStatus(std::ostream &os) const
void writeName(std::ostream &os) const override
Print step name.
virtual void zero()
Set to zero vector.
Definition: ROL_Vector.hpp:167
Defines the linear algebra or vector space interface.
Definition: ROL_Vector.hpp:80
virtual void value(Vector< Real > &c, const Vector< Real > &x, Real &tol)=0
Evaluate the constraint operator at .
void updateMultipliers(Real mu, const Vector< Real > &x)
Provides an interface to check status of optimization algorithms for problems with equality constrain...
Provides an interface to run general constrained optimization algorithms.
void initialize(Vector< Real > &x, const Vector< Real > &g, const Vector< Real > &l, const Vector< Real > &c, MoreauYosidaObjective< Real > &myobj, BoundConstraint< Real > &bnd, Constraint< Real > &con, Vector< Real > &pwa, Vector< Real > &dwa, std::ostream &outStream=std::cout)
void update(const Vector< Real > &x, UpdateType type, int iter=-1)
Update Moreau-Yosida penalty function.
const Ptr< AlgorithmState< Real > > state_
Provides the interface to evaluate the Moreau-Yosida penalty function.
Provides interface for and implements limited-memory secant operators.
Definition: ROL_Secant.hpp:79
void writeOutput(std::ostream &os, const bool print_header=false) const override
Print iterate status.
void run(Vector< Real > &x, const Vector< Real > &g, Objective< Real > &obj, BoundConstraint< Real > &bnd, Constraint< Real > &econ, Vector< Real > &emul, const Vector< Real > &eres, std::ostream &outStream=std::cout) override
Run algorithm on general constrained problems (Type-G). This is the primary Type-G interface...
Provides the interface to apply upper and lower bound constraints.
virtual void applyAdjointJacobian(Vector< Real > &ajv, const Vector< Real > &v, const Vector< Real > &x, Real &tol)
Apply the adjoint of the the constraint Jacobian at , , to vector .
MoreauYosidaAlgorithm(ParameterList &list, const Ptr< Secant< Real >> &secant=nullPtr)
virtual void set(const Vector &x)
Set where .
Definition: ROL_Vector.hpp:209
void getObjectiveGradient(Vector< Real > &g, const Vector< Real > &x, Real &tol)
virtual Real norm() const =0
Returns where .
const Ptr< CombinedStatusTest< Real > > status_
Real testComplementarity(const Vector< Real > &x)
void initialize(const Vector< Real > &x, const Vector< Real > &g, const Vector< Real > &mul, const Vector< Real > &c)
Defines the general constraint operator interface.