ROL
ROL_MoreauYosidaPenalty.hpp
Go to the documentation of this file.
1 // @HEADER
2 // ************************************************************************
3 //
4 // Rapid Optimization Library (ROL) Package
5 // Copyright (2014) Sandia Corporation
6 //
7 // Under terms of Contract DE-AC04-94AL85000, there is a non-exclusive
8 // license for use of this work by or on behalf of the U.S. Government.
9 //
10 // Redistribution and use in source and binary forms, with or without
11 // modification, are permitted provided that the following conditions are
12 // met:
13 //
14 // 1. Redistributions of source code must retain the above copyright
15 // notice, this list of conditions and the following disclaimer.
16 //
17 // 2. Redistributions in binary form must reproduce the above copyright
18 // notice, this list of conditions and the following disclaimer in the
19 // documentation and/or other materials provided with the distribution.
20 //
21 // 3. Neither the name of the Corporation nor the names of the
22 // contributors may be used to endorse or promote products derived from
23 // this software without specific prior written permission.
24 //
25 // THIS SOFTWARE IS PROVIDED BY SANDIA CORPORATION "AS IS" AND ANY
26 // EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
27 // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
28 // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SANDIA CORPORATION OR THE
29 // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
30 // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
31 // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
32 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
33 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
34 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
35 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
36 //
37 // Questions? Contact lead developers:
38 // Drew Kouri (dpkouri@sandia.gov) and
39 // Denis Ridzal (dridzal@sandia.gov)
40 //
41 // ************************************************************************
42 // @HEADER
43 
44 #ifndef ROL_MOREAUYOSIDAPENALTY_H
45 #define ROL_MOREAUYOSIDAPENALTY_H
46 
47 #include "ROL_Objective.hpp"
48 #include "ROL_BoundConstraint.hpp"
49 #include "ROL_Vector.hpp"
50 #include "ROL_Types.hpp"
51 #include "ROL_Ptr.hpp"
52 #include <iostream>
53 
62 namespace ROL {
63 
64 template <class Real>
65 class MoreauYosidaPenalty : public Objective<Real> {
66 private:
67  const ROL::Ptr<Objective<Real> > obj_;
68  const ROL::Ptr<BoundConstraint<Real> > con_;
69 
70  ROL::Ptr<Vector<Real> > g_;
71  ROL::Ptr<Vector<Real> > l_;
72  ROL::Ptr<Vector<Real> > u_;
73  ROL::Ptr<Vector<Real> > l1_;
74  ROL::Ptr<Vector<Real> > u1_;
75  ROL::Ptr<Vector<Real> > dl1_;
76  ROL::Ptr<Vector<Real> > du1_;
77  ROL::Ptr<Vector<Real> > xlam_;
78  ROL::Ptr<Vector<Real> > v_;
79  ROL::Ptr<Vector<Real> > dv_;
80  ROL::Ptr<Vector<Real> > dv2_;
81  ROL::Ptr<Vector<Real> > lam_;
82  ROL::Ptr<Vector<Real> > tmp_;
83 
84  Real mu_;
85  Real fval_;
87  int nfval_;
88  int ngval_;
91 
92  void computePenalty(const Vector<Real> &x) {
93  if ( con_->isActivated() ) {
94  Real one = 1.0;
95  if ( !isConEvaluated_ ) {
96  xlam_->set(x);
97  xlam_->axpy(one/mu_,*lam_);
98 
99  if ( con_->isFeasible(*xlam_) ) {
100  l1_->zero(); dl1_->zero();
101  u1_->zero(); du1_->zero();
102  }
103  else {
104  // Compute lower penalty component
105  l1_->set(*l_);
106  con_->pruneLowerInactive(*l1_,*xlam_);
107  tmp_->set(*xlam_);
108  con_->pruneLowerInactive(*tmp_,*xlam_);
109  l1_->axpy(-one,*tmp_);
110 
111  // Compute upper penalty component
112  u1_->set(*xlam_);
113  con_->pruneUpperInactive(*u1_,*xlam_);
114  tmp_->set(*u_);
115  con_->pruneUpperInactive(*tmp_,*xlam_);
116  u1_->axpy(-one,*tmp_);
117 
118  // Compute derivative of lower penalty component
119  dl1_->set(l1_->dual());
120  con_->pruneLowerInactive(*dl1_,*xlam_);
121 
122  // Compute derivative of upper penalty component
123  du1_->set(u1_->dual());
124  con_->pruneUpperInactive(*du1_,*xlam_);
125  }
126 
127  isConEvaluated_ = true;
128  }
129  }
130  }
131 
133  const ROL::Ptr<ROL::BoundConstraint<Real> > &con) {
134  g_ = x.dual().clone();
135  l_ = x.clone();
136  l1_ = x.clone();
137  dl1_ = x.dual().clone();
138  u_ = x.clone();
139  u1_ = x.clone();
140  du1_ = x.dual().clone();
141  xlam_ = x.clone();
142  v_ = x.clone();
143  dv_ = x.dual().clone();
144  dv2_ = x.dual().clone();
145  lam_ = x.clone();
146  tmp_ = x.clone();
147 
148  l_->set(*con_->getLowerBound());
149  u_->set(*con_->getUpperBound());
150 
151  lam_->zero();
152  //lam_->set(*u_);
153  //lam_->plus(*l_);
154  //lam_->scale(0.5);
155  }
156 
157 public:
159 
160  MoreauYosidaPenalty(const ROL::Ptr<Objective<Real> > &obj,
161  const ROL::Ptr<BoundConstraint<Real> > &con,
162  const ROL::Vector<Real> &x,
163  const Real mu = 1e1)
164  : obj_(obj), con_(con), mu_(mu),
165  fval_(0), isConEvaluated_(false), nfval_(0), ngval_(0),
166  updateMultiplier_(true), updatePenalty_(true) {
167  initialize(x,con);
168  }
169 
170  MoreauYosidaPenalty(const ROL::Ptr<Objective<Real> > &obj,
171  const ROL::Ptr<BoundConstraint<Real> > &con,
172  const ROL::Vector<Real> &x,
173  ROL::ParameterList &parlist)
174  : obj_(obj), con_(con),
175  fval_(0), isConEvaluated_(false), nfval_(0), ngval_(0) {
176  initialize(x,con);
177  ROL::ParameterList &list = parlist.sublist("Step").sublist("Moreau-Yosida Penalty");
178  updateMultiplier_ = list.get("Update Multiplier",true);
179  updatePenalty_ = list.get("Update Penalty",true);
180  mu_ = list.get("Initial Penalty Parameter",1e1);
181  }
182 
183  MoreauYosidaPenalty(const ROL::Ptr<Objective<Real> > &obj,
184  const ROL::Ptr<BoundConstraint<Real> > &con,
185  const ROL::Vector<Real> &x,
186  const ROL::Vector<Real> &lam,
187  ROL::ParameterList &parlist)
188  : obj_(obj), con_(con),
189  fval_(0), isConEvaluated_(false), nfval_(0), ngval_(0) {
190  initialize(x,con);
191  lam_->set(lam);
192  ROL::ParameterList &list = parlist.sublist("Step").sublist("Moreau-Yosida Penalty");
193  updateMultiplier_ = list.get("Update Multiplier",true);
194  updatePenalty_ = list.get("Update Penalty",true);
195  mu_ = list.get("Initial Penalty Parameter",1e1);
196  }
197 
198  void updateMultipliers(Real mu, const ROL::Vector<Real> &x) {
199  if ( con_->isActivated() ) {
200  if ( updateMultiplier_ ) {
201  const Real one(1);
202  computePenalty(x);
203  lam_->set(*u1_);
204  lam_->axpy(-one,*l1_);
205  lam_->scale(mu_);
206  }
207  if ( updatePenalty_ ) {
208  mu_ = mu;
209  }
210  }
211  nfval_ = 0; ngval_ = 0;
212  isConEvaluated_ = false;
213  }
214 
215  void reset(const Real mu) {
216  lam_->zero();
217  mu_ = mu;
218  nfval_ = 0; ngval_ = 0;
219  isConEvaluated_ = false;
220  }
221 
223  Real val(0);
224  if (con_->isActivated()) {
225  computePenalty(x);
226 
227  tmp_->set(x);
228  tmp_->axpy(static_cast<Real>(-1), *l_);
229  Real lower = mu_*std::abs(tmp_->dot(*l1_));
230 
231  tmp_->set(x);
232  tmp_->axpy(static_cast<Real>(-1), *u_);
233  Real upper = mu_*std::abs(tmp_->dot(*u1_));
234 
235  tmp_->set(x);
236  con_->project(*tmp_);
237  tmp_->axpy(static_cast<Real>(-1), x);
238  Real xnorm = tmp_->norm();
239 
240  val = std::max(xnorm,std::max(lower,upper));
241  }
242  return val;
243  }
244 
245  Real getObjectiveValue(void) const {
246  return fval_;
247  }
248 
249  ROL::Ptr<Vector<Real> > getGradient(void) const {
250  return g_;
251  }
252 
254  return nfval_;
255  }
256 
258  return ngval_;
259  }
260 
268  void update( const Vector<Real> &x, bool flag = true, int iter = -1 ) {
269  obj_->update(x,flag,iter);
270  con_->update(x,flag,iter);
271  isConEvaluated_ = false;
272  }
273 
280  Real value( const Vector<Real> &x, Real &tol ) {
281  Real half = 0.5;
282  // Compute objective function value
283  fval_ = obj_->value(x,tol);
284  nfval_++;
285  // Add value of the Moreau-Yosida penalty
286  Real fval = fval_;
287  if ( con_->isActivated() ) {
288  computePenalty(x);
289  fval += half*mu_*(l1_->dot(*l1_) + u1_->dot(*u1_));
290  }
291  return fval;
292  }
293 
301  void gradient( Vector<Real> &g, const Vector<Real> &x, Real &tol ) {
302  // Compute gradient of objective function
303  obj_->gradient(*g_,x,tol);
304  ngval_++;
305  g.set(*g_);
306  // Add gradient of the Moreau-Yosida penalty
307  if ( con_->isActivated() ) {
308  computePenalty(x);
309  g.axpy(-mu_,*dl1_);
310  g.axpy(mu_,*du1_);
311  }
312  }
313 
322  void hessVec( Vector<Real> &hv, const Vector<Real> &v, const Vector<Real> &x, Real &tol ) {
323  // Apply objective Hessian to a vector
324  obj_->hessVec(hv,v,x,tol);
325  // Add Hessian of the Moreau-Yosida penalty
326  if ( con_->isActivated() ) {
327  Real one = 1.0;
328  computePenalty(x);
329 
330  v_->set(v);
331  con_->pruneLowerActive(*v_,*xlam_);
332  v_->scale(-one);
333  v_->plus(v);
334  dv_->set(v_->dual());
335  dv2_->set(*dv_);
336  con_->pruneLowerActive(*dv_,*xlam_);
337  dv_->scale(-one);
338  dv_->plus(*dv2_);
339  hv.axpy(mu_,*dv_);
340 
341  v_->set(v);
342  con_->pruneUpperActive(*v_,*xlam_);
343  v_->scale(-one);
344  v_->plus(v);
345  dv_->set(v_->dual());
346  dv2_->set(*dv_);
347  con_->pruneUpperActive(*dv_,*xlam_);
348  dv_->scale(-one);
349  dv_->plus(*dv2_);
350  hv.axpy(mu_,*dv_);
351  }
352  }
353 
354 // Definitions for parametrized (stochastic) objective functions
355 public:
356  void setParameter(const std::vector<Real> &param) {
358  obj_->setParameter(param);
359  }
360 }; // class MoreauYosidaPenalty
361 
362 } // namespace ROL
363 
364 #endif
Provides the interface to evaluate objective functions.
virtual const Vector & dual() const
Return dual representation of , for example, the result of applying a Riesz map, or change of basis...
Definition: ROL_Vector.hpp:226
ROL::Ptr< Vector< Real > > g_
ROL::Ptr< Vector< Real > > l1_
virtual ROL::Ptr< Vector > clone() const =0
Clone to make a new (uninitialized) vector.
ROL::Ptr< Vector< Real > > du1_
void setParameter(const std::vector< Real > &param)
virtual void axpy(const Real alpha, const Vector &x)
Compute where .
Definition: ROL_Vector.hpp:153
Contains definitions of custom data types in ROL.
Real value(const Vector< Real > &x, Real &tol)
Compute value.
ROL::Ptr< Vector< Real > > xlam_
void updateMultipliers(Real mu, const ROL::Vector< Real > &x)
ROL::Ptr< Vector< Real > > dv_
Defines the linear algebra or vector space interface.
Definition: ROL_Vector.hpp:80
void initialize(const ROL::Vector< Real > &x, const ROL::Ptr< ROL::BoundConstraint< Real > > &con)
void computePenalty(const Vector< Real > &x)
ROL::Ptr< Vector< Real > > dl1_
const ROL::Ptr< BoundConstraint< Real > > con_
ROL::Ptr< Vector< Real > > tmp_
MoreauYosidaPenalty(const ROL::Ptr< Objective< Real > > &obj, const ROL::Ptr< BoundConstraint< Real > > &con, const ROL::Vector< Real > &x, const ROL::Vector< Real > &lam, ROL::ParameterList &parlist)
const ROL::Ptr< Objective< Real > > obj_
Provides the interface to evaluate the Moreau-Yosida penalty function.
void gradient(Vector< Real > &g, const Vector< Real > &x, Real &tol)
Compute gradient.
MoreauYosidaPenalty(const ROL::Ptr< Objective< Real > > &obj, const ROL::Ptr< BoundConstraint< Real > > &con, const ROL::Vector< Real > &x, ROL::ParameterList &parlist)
Provides the interface to apply upper and lower bound constraints.
ROL::Ptr< Vector< Real > > l_
ROL::Ptr< Vector< Real > > u_
virtual void setParameter(const std::vector< Real > &param)
ROL::Ptr< Vector< Real > > v_
void update(const Vector< Real > &x, bool flag=true, int iter=-1)
Update Moreau-Yosida penalty function.
virtual void set(const Vector &x)
Set where .
Definition: ROL_Vector.hpp:209
ROL::Ptr< Vector< Real > > lam_
ROL::Ptr< Vector< Real > > dv2_
void hessVec(Vector< Real > &hv, const Vector< Real > &v, const Vector< Real > &x, Real &tol)
Apply Hessian approximation to vector.
ROL::Ptr< Vector< Real > > u1_
Real testComplementarity(const ROL::Vector< Real > &x)
ROL::Ptr< Vector< Real > > getGradient(void) const
MoreauYosidaPenalty(const ROL::Ptr< Objective< Real > > &obj, const ROL::Ptr< BoundConstraint< Real > > &con, const ROL::Vector< Real > &x, const Real mu=1e1)