ROL
ROL_ProjectedSecantStep.hpp
Go to the documentation of this file.
1 // @HEADER
2 // ************************************************************************
3 //
4 // Rapid Optimization Library (ROL) Package
5 // Copyright (2014) Sandia Corporation
6 //
7 // Under terms of Contract DE-AC04-94AL85000, there is a non-exclusive
8 // license for use of this work by or on behalf of the U.S. Government.
9 //
10 // Redistribution and use in source and binary forms, with or without
11 // modification, are permitted provided that the following conditions are
12 // met:
13 //
14 // 1. Redistributions of source code must retain the above copyright
15 // notice, this list of conditions and the following disclaimer.
16 //
17 // 2. Redistributions in binary form must reproduce the above copyright
18 // notice, this list of conditions and the following disclaimer in the
19 // documentation and/or other materials provided with the distribution.
20 //
21 // 3. Neither the name of the Corporation nor the names of the
22 // contributors may be used to endorse or promote products derived from
23 // this software without specific prior written permission.
24 //
25 // THIS SOFTWARE IS PROVIDED BY SANDIA CORPORATION "AS IS" AND ANY
26 // EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
27 // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
28 // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SANDIA CORPORATION OR THE
29 // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
30 // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
31 // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
32 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
33 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
34 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
35 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
36 //
37 // Questions? Contact lead developers:
38 // Drew Kouri (dpkouri@sandia.gov) and
39 // Denis Ridzal (dridzal@sandia.gov)
40 //
41 // ************************************************************************
42 // @HEADER
43 
44 #ifndef ROL_PROJECTEDSECANTSTEP_H
45 #define ROL_PROJECTEDSECANTSTEP_H
46 
47 #include "ROL_Types.hpp"
48 #include "ROL_Step.hpp"
49 #include "ROL_Secant.hpp"
50 
57 namespace ROL {
58 
59 template <class Real>
60 class ProjectedSecantStep : public Step<Real> {
61 private:
62 
63  ROL::Ptr<Secant<Real> > secant_;
65  ROL::Ptr<Vector<Real> > d_;
66  ROL::Ptr<Vector<Real> > gp_;
67  int verbosity_;
68  const bool computeObj_;
70 
71 public:
72 
74  using Step<Real>::compute;
75  using Step<Real>::update;
76 
85  ProjectedSecantStep( ROL::ParameterList &parlist,
86  const ROL::Ptr<Secant<Real> > &secant = ROL::nullPtr,
87  const bool computeObj = true )
88  : Step<Real>(), secant_(secant), d_(ROL::nullPtr), gp_(ROL::nullPtr),
89  verbosity_(0), computeObj_(computeObj), useProjectedGrad_(false) {
90  // Parse ParameterList
91  ROL::ParameterList& Glist = parlist.sublist("General");
92  useProjectedGrad_ = Glist.get("Projected Gradient Criticality Measure", false);
93  verbosity_ = parlist.sublist("General").get("Print Verbosity",0);
94  // Initialize secant object
95  if ( secant == ROL::nullPtr ) {
96  esec_ = StringToESecant(parlist.sublist("General").sublist("Secant").get("Type","Limited-Memory BFGS"));
97  secant_ = SecantFactory<Real>(parlist);
98  }
99  }
100 
101  void initialize( Vector<Real> &x, const Vector<Real> &s, const Vector<Real> &g,
103  AlgorithmState<Real> &algo_state ) {
104  Step<Real>::initialize(x,s,g,obj,bnd,algo_state);
105  d_ = s.clone();
106  gp_ = g.clone();
107  }
108 
109  void compute( Vector<Real> &s, const Vector<Real> &x,
111  AlgorithmState<Real> &algo_state ) {
112  ROL::Ptr<StepState<Real> > step_state = Step<Real>::getState();
113  Real one(1);
114 
115  // Compute projected secant step
116  // ---> Apply inactive-inactive block of inverse secant to gradient
117  gp_->set(*(step_state->gradientVec));
118  bnd.pruneActive(*gp_,*(step_state->gradientVec),x,algo_state.gnorm);
119  secant_->applyH(s,*gp_);
120  bnd.pruneActive(s,*(step_state->gradientVec),x,algo_state.gnorm);
121  // ---> Add in active gradient components
122  gp_->set(*(step_state->gradientVec));
123  bnd.pruneInactive(*gp_,*(step_state->gradientVec),x,algo_state.gnorm);
124  s.plus(gp_->dual());
125  s.scale(-one);
126  }
127 
128  void update( Vector<Real> &x, const Vector<Real> &s,
130  AlgorithmState<Real> &algo_state ) {
131  Real tol = std::sqrt(ROL_EPSILON<Real>()), one(1);
132  ROL::Ptr<StepState<Real> > step_state = Step<Real>::getState();
133 
134  // Update iterate and store previous step
135  algo_state.iter++;
136  d_->set(x);
137  x.plus(s);
138  bnd.project(x);
139  (step_state->descentVec)->set(x);
140  (step_state->descentVec)->axpy(-one,*d_);
141  algo_state.snorm = s.norm();
142 
143  // Compute new gradient
144  gp_->set(*(step_state->gradientVec));
145  obj.update(x,true,algo_state.iter);
146  if ( computeObj_ ) {
147  algo_state.value = obj.value(x,tol);
148  algo_state.nfval++;
149  }
150  obj.gradient(*(step_state->gradientVec),x,tol);
151  algo_state.ngrad++;
152 
153  // Update Secant Information
154  secant_->updateStorage(x,*(step_state->gradientVec),*gp_,s,algo_state.snorm,algo_state.iter+1);
155 
156  // Update algorithm state
157  (algo_state.iterateVec)->set(x);
158  if ( useProjectedGrad_ ) {
159  gp_->set(*(step_state->gradientVec));
160  bnd.computeProjectedGradient( *gp_, x );
161  algo_state.gnorm = gp_->norm();
162  }
163  else {
164  d_->set(x);
165  d_->axpy(-one,(step_state->gradientVec)->dual());
166  bnd.project(*d_);
167  d_->axpy(-one,x);
168  algo_state.gnorm = d_->norm();
169  }
170  }
171 
172  std::string printHeader( void ) const {
173  std::stringstream hist;
174 
175  if( verbosity_>0 ) {
176  hist << std::string(109,'-') << "\n";
178  hist << " status output definitions\n\n";
179  hist << " iter - Number of iterates (steps taken) \n";
180  hist << " value - Objective function value \n";
181  hist << " gnorm - Norm of the gradient\n";
182  hist << " snorm - Norm of the step (update to optimization vector)\n";
183  hist << " #fval - Cumulative number of times the objective function was evaluated\n";
184  hist << " #grad - Number of times the gradient was computed\n";
185  hist << std::string(109,'-') << "\n";
186  }
187 
188  hist << " ";
189  hist << std::setw(6) << std::left << "iter";
190  hist << std::setw(15) << std::left << "value";
191  hist << std::setw(15) << std::left << "gnorm";
192  hist << std::setw(15) << std::left << "snorm";
193  hist << std::setw(10) << std::left << "#fval";
194  hist << std::setw(10) << std::left << "#grad";
195  hist << "\n";
196  return hist.str();
197  }
198  std::string printName( void ) const {
199  std::stringstream hist;
200  hist << "\n" << EDescentToString(DESCENT_SECANT);
201  hist << " with " << ESecantToString(esec_) << "\n";
202  return hist.str();
203  }
204  std::string print( AlgorithmState<Real> &algo_state, bool print_header = false ) const {
205  std::stringstream hist;
206  hist << std::scientific << std::setprecision(6);
207  if ( algo_state.iter == 0 ) {
208  hist << printName();
209  }
210  if ( print_header ) {
211  hist << printHeader();
212  }
213  if ( algo_state.iter == 0 ) {
214  hist << " ";
215  hist << std::setw(6) << std::left << algo_state.iter;
216  hist << std::setw(15) << std::left << algo_state.value;
217  hist << std::setw(15) << std::left << algo_state.gnorm;
218  hist << "\n";
219  }
220  else {
221  hist << " ";
222  hist << std::setw(6) << std::left << algo_state.iter;
223  hist << std::setw(15) << std::left << algo_state.value;
224  hist << std::setw(15) << std::left << algo_state.gnorm;
225  hist << std::setw(15) << std::left << algo_state.snorm;
226  hist << std::setw(10) << std::left << algo_state.nfval;
227  hist << std::setw(10) << std::left << algo_state.ngrad;
228  hist << "\n";
229  }
230  return hist.str();
231  }
232 }; // class ProjectedSecantStep
233 
234 } // namespace ROL
235 
236 #endif
Provides the interface to evaluate objective functions.
virtual void scale(const Real alpha)=0
Compute where .
virtual ROL::Ptr< Vector > clone() const =0
Clone to make a new (uninitialized) vector.
ROL::Ptr< Vector< Real > > d_
Additional vector storage.
virtual void plus(const Vector &x)=0
Compute , where .
virtual Real value(const Vector< Real > &x, Real &tol)=0
Compute value.
Provides the interface to compute optimization steps.
Definition: ROL_Step.hpp:68
Contains definitions of custom data types in ROL.
ROL::Ptr< Vector< Real > > gp_
Additional vector storage.
void pruneInactive(Vector< Real > &v, const Vector< Real > &x, Real eps=0)
Set variables to zero if they correspond to the -inactive set.
void pruneActive(Vector< Real > &v, const Vector< Real > &x, Real eps=0)
Set variables to zero if they correspond to the -active set.
ESecant StringToESecant(std::string s)
Definition: ROL_Types.hpp:541
std::string EDescentToString(EDescent tr)
Definition: ROL_Types.hpp:418
void update(Vector< Real > &x, const Vector< Real > &s, Objective< Real > &obj, BoundConstraint< Real > &bnd, AlgorithmState< Real > &algo_state)
Update step, if successful.
ProjectedSecantStep(ROL::ParameterList &parlist, const ROL::Ptr< Secant< Real > > &secant=ROL::nullPtr, const bool computeObj=true)
Constructor.
Defines the linear algebra or vector space interface.
Definition: ROL_Vector.hpp:80
Provides the interface to compute optimization steps with projected secant method using line search...
void compute(Vector< Real > &s, const Vector< Real > &x, Objective< Real > &obj, BoundConstraint< Real > &bnd, AlgorithmState< Real > &algo_state)
Compute step.
State for algorithm class. Will be used for restarts.
Definition: ROL_Types.hpp:143
virtual void gradient(Vector< Real > &g, const Vector< Real > &x, Real &tol)
Compute gradient.
ROL::Ptr< Secant< Real > > secant_
Secant object (used for quasi-Newton)
ESecant
Enumeration of secant update algorithms.
Definition: ROL_Types.hpp:484
ROL::Ptr< StepState< Real > > getState(void)
Definition: ROL_Step.hpp:73
void initialize(Vector< Real > &x, const Vector< Real > &s, const Vector< Real > &g, Objective< Real > &obj, BoundConstraint< Real > &bnd, AlgorithmState< Real > &algo_state)
Initialize step with bound constraint.
bool useProjectedGrad_
Whether or not to use to the projected gradient criticality measure.
Provides interface for and implements limited-memory secant operators.
Definition: ROL_Secant.hpp:70
ROL::Ptr< Vector< Real > > iterateVec
Definition: ROL_Types.hpp:157
std::string printName(void) const
Print step name.
Provides the interface to apply upper and lower bound constraints.
void computeProjectedGradient(Vector< Real > &g, const Vector< Real > &x)
Compute projected gradient.
std::string printHeader(void) const
Print iterate header.
virtual void initialize(Vector< Real > &x, const Vector< Real > &g, Objective< Real > &obj, BoundConstraint< Real > &con, AlgorithmState< Real > &algo_state)
Initialize step with bound constraint.
Definition: ROL_Step.hpp:88
virtual Real norm() const =0
Returns where .
virtual void update(const Vector< Real > &x, bool flag=true, int iter=-1)
Update objective function.
std::string ESecantToString(ESecant tr)
Definition: ROL_Types.hpp:493
std::string print(AlgorithmState< Real > &algo_state, bool print_header=false) const
Print iterate status.
virtual void project(Vector< Real > &x)
Project optimization variables onto the bounds.