ROL
ROL_Secant.hpp
Go to the documentation of this file.
1 // @HEADER
2 // ************************************************************************
3 //
4 // Rapid Optimization Library (ROL) Package
5 // Copyright (2014) Sandia Corporation
6 //
7 // Under terms of Contract DE-AC04-94AL85000, there is a non-exclusive
8 // license for use of this work by or on behalf of the U.S. Government.
9 //
10 // Redistribution and use in source and binary forms, with or without
11 // modification, are permitted provided that the following conditions are
12 // met:
13 //
14 // 1. Redistributions of source code must retain the above copyright
15 // notice, this list of conditions and the following disclaimer.
16 //
17 // 2. Redistributions in binary form must reproduce the above copyright
18 // notice, this list of conditions and the following disclaimer in the
19 // documentation and/or other materials provided with the distribution.
20 //
21 // 3. Neither the name of the Corporation nor the names of the
22 // contributors may be used to endorse or promote products derived from
23 // this software without specific prior written permission.
24 //
25 // THIS SOFTWARE IS PROVIDED BY SANDIA CORPORATION "AS IS" AND ANY
26 // EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
27 // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
28 // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SANDIA CORPORATION OR THE
29 // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
30 // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
31 // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
32 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
33 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
34 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
35 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
36 //
37 // Questions? Contact lead developers:
38 // Drew Kouri (dpkouri@sandia.gov) and
39 // Denis Ridzal (dridzal@sandia.gov)
40 //
41 // ************************************************************************
42 // @HEADER
43 
44 #ifndef ROL_SECANT_H
45 #define ROL_SECANT_H
46 
51 #include "ROL_ParameterList.hpp"
52 #include "ROL_LinearOperator.hpp"
53 #include "ROL_Types.hpp"
54 
55 namespace ROL {
56 
61 };
62 
63 template<class Real>
64 struct SecantState {
65  Ptr<Vector<Real>> iterate;
66  std::vector<Ptr<Vector<Real>>> iterDiff; // Step Storage
67  std::vector<Ptr<Vector<Real>>> gradDiff; // Gradient Storage
68  std::vector<Real> product; // Step-Gradient Inner Product Storage
69  std::vector<Real> product2; // Step-Gradient Inner Product Storage
70  int storage; // Storage Size
71  int current; // Current Storage Size
72  int iter; // Current Optimization Iteration
73  ESecantMode mode; // Intended application mode
74 
75  SecantState(int M, ESecantMode sm) : storage(M), current(-1), iter(0), mode(sm) {}
76 };
77 
78 template<class Real>
79 class Secant : public LinearOperator<Real> {
80 protected:
81 
82  const Ptr<SecantState<Real>> state_; // Secant State
83  Ptr<Vector<Real>> y_;
85  Real Bscaling_;
86 
87 private:
88 
90 
91 public:
92 
93  virtual ~Secant() {}
94 
95  // Constructor
96  Secant( int M = 10, bool useDefaultScaling = true, Real Bscaling = Real(1), ESecantMode mode = SECANTMODE_BOTH )
97  : state_(makePtr<SecantState<Real>>(M,mode)),
98  useDefaultScaling_(useDefaultScaling), Bscaling_(Bscaling),
99  isInitialized_(false) {}
100 
101  Ptr<SecantState<Real>>& get_state() { return state_; }
102  const Ptr<SecantState<Real>>& get_state() const { return state_; }
103 
104  // Update Secant Approximation
105  virtual void updateStorage( const Vector<Real> &x, const Vector<Real> &grad,
106  const Vector<Real> &gp, const Vector<Real> &s,
107  const Real snorm, const int iter ) {
108  const Real one(1);
109  if ( !isInitialized_ ) {
110  state_->iterate = x.clone();
111  y_ = grad.clone();
112  isInitialized_ = true;
113  }
114  state_->iterate->set(x);
115  state_->iter = iter;
116  y_->set(grad);
117  y_->axpy(-one,gp);
118 
119  //Real sy = s.dot(y_->dual());
120  Real sy = s.apply(*y_);
121  if (sy > ROL_EPSILON<Real>()*snorm*snorm) {
122  if (state_->current < state_->storage-1) {
123  state_->current++; // Increment Storage
124  state_->iterDiff.push_back(s.clone()); // Create new memory
125  state_->gradDiff.push_back(grad.clone()); // Create new memory
126  }
127  else {
128  state_->iterDiff.push_back(state_->iterDiff[0]); // Move first element to the last
129  state_->gradDiff.push_back(state_->gradDiff[0]); // Move first element to the last
130  state_->iterDiff.erase(state_->iterDiff.begin()); // Remove first element of s list
131  state_->gradDiff.erase(state_->gradDiff.begin()); // Remove first element of y list
132  state_->product.erase(state_->product.begin()); // Remove first element of rho list
133  }
134  state_->iterDiff[state_->current]->set(s); // s=x_{k+1}-x_k
135  state_->gradDiff[state_->current]->set(*y_); // y=g_{k+1}-g_k
136  state_->product.push_back(sy); // ys=1/rho
137  }
138  }
139 
140  // Apply Secant Approximate Inverse Hessian
141  virtual void applyH( Vector<Real> &Hv, const Vector<Real> &v ) const = 0;
142 
143  // Apply Initial Secant Approximate Inverse Hessian
144  virtual void applyH0( Vector<Real> &Hv, const Vector<Real> &v ) const {
145  Hv.set(v.dual());
146  if (useDefaultScaling_) {
147  if (state_->iter != 0 && state_->current != -1) {
148  Real yy = state_->gradDiff[state_->current]->dot(*(state_->gradDiff[state_->current]));
149  Hv.scale(state_->product[state_->current]/yy);
150  }
151  }
152  else {
153  Hv.scale(static_cast<Real>(1)/Bscaling_);
154  }
155  }
156 
157  // Apply Secant Approximate Hessian
158  virtual void applyB( Vector<Real> &Bv, const Vector<Real> &v ) const = 0;
159 
160  // Apply Initial Secant Approximate Hessian
161  virtual void applyB0( Vector<Real> &Bv, const Vector<Real> &v ) const {
162  Bv.set(v.dual());
163  if (useDefaultScaling_) {
164  if (state_->iter != 0 && state_->current != -1) {
165  Real yy = state_->gradDiff[state_->current]->dot(*(state_->gradDiff[state_->current]));
166  Bv.scale(yy/state_->product[state_->current]);
167  }
168  }
169  else {
170  Bv.scale(Bscaling_);
171  }
172  }
173 
174  // Test Secant Approximations
175  void test(std::ostream &stream = std::cout ) const {
176  if (isInitialized_) {
177  Ptr<Vector<Real>> v = state_->iterate->clone();
178  Ptr<Vector<Real>> Hv = state_->iterate->clone();
179  Ptr<Vector<Real>> Bv = state_->iterate->dual().clone();
180  const Real one(1);
181 
182  // Print BHv -> Should be v
183  v->randomize(-one,one);
184  applyH(*Hv,*v);
185  applyB(*Bv,*Hv);
186  v->axpy(-one,*Bv);
187  stream << " ||BHv-v|| = " << v->norm() << std::endl;
188 
189  // Print HBv -> Should be v
190  v->randomize(-one,one);
191  applyB(*Bv,*v);
192  applyH(*Hv,*Bv);
193  v->axpy(-one,*Hv);
194  stream << " ||HBv-v|| = " << v->norm() << std::endl;
195  }
196  }
197 
198  void apply(Vector<Real> &Hv, const Vector<Real> &v, Real &tol) const {
199  applyB(Hv,v);
200  }
201 
202  void applyInverse(Vector<Real> &Hv, const Vector<Real> &v, Real &tol) const {
203  applyH(Hv,v);
204  }
205 
206 };
207 
208 }
209 
210 #include "ROL_SecantFactory.hpp"
211 
212 #endif
virtual const Vector & dual() const
Return dual representation of , for example, the result of applying a Riesz map, or change of basis...
Definition: ROL_Vector.hpp:226
virtual void scale(const Real alpha)=0
Compute where .
virtual ROL::Ptr< Vector > clone() const =0
Clone to make a new (uninitialized) vector.
virtual Real apply(const Vector< Real > &x) const
Apply to a dual vector. This is equivalent to the call .
Definition: ROL_Vector.hpp:238
SecantState(int M, ESecantMode sm)
Definition: ROL_Secant.hpp:75
void apply(Vector< Real > &Hv, const Vector< Real > &v, Real &tol) const
Apply linear operator.
Definition: ROL_Secant.hpp:198
Contains definitions of custom data types in ROL.
virtual void applyH(Vector< Real > &Hv, const Vector< Real > &v) const =0
bool useDefaultScaling_
Definition: ROL_Secant.hpp:84
Defines the linear algebra or vector space interface.
Definition: ROL_Vector.hpp:80
bool isInitialized_
Definition: ROL_Secant.hpp:89
Ptr< Vector< Real > > iterate
Definition: ROL_Secant.hpp:65
Ptr< Vector< Real > > y_
Definition: ROL_Secant.hpp:83
std::vector< Ptr< Vector< Real > > > iterDiff
Definition: ROL_Secant.hpp:66
virtual void applyB0(Vector< Real > &Bv, const Vector< Real > &v) const
Definition: ROL_Secant.hpp:161
ESecantMode mode
Definition: ROL_Secant.hpp:73
ESecantMode
Definition: ROL_Secant.hpp:57
Provides interface for and implements limited-memory secant operators.
Definition: ROL_Secant.hpp:79
virtual void updateStorage(const Vector< Real > &x, const Vector< Real > &grad, const Vector< Real > &gp, const Vector< Real > &s, const Real snorm, const int iter)
Definition: ROL_Secant.hpp:105
std::vector< Real > product2
Definition: ROL_Secant.hpp:69
const Ptr< SecantState< Real > > & get_state() const
Definition: ROL_Secant.hpp:102
virtual void applyB(Vector< Real > &Bv, const Vector< Real > &v) const =0
Provides the interface to apply a linear operator.
const Ptr< SecantState< Real > > state_
Definition: ROL_Secant.hpp:82
virtual void applyH0(Vector< Real > &Hv, const Vector< Real > &v) const
Definition: ROL_Secant.hpp:144
virtual ~Secant()
Definition: ROL_Secant.hpp:93
std::vector< Ptr< Vector< Real > > > gradDiff
Definition: ROL_Secant.hpp:67
void applyInverse(Vector< Real > &Hv, const Vector< Real > &v, Real &tol) const
Apply inverse of linear operator.
Definition: ROL_Secant.hpp:202
virtual void set(const Vector &x)
Set where .
Definition: ROL_Vector.hpp:209
Real Bscaling_
Definition: ROL_Secant.hpp:85
Ptr< SecantState< Real > > & get_state()
Definition: ROL_Secant.hpp:101
std::vector< Real > product
Definition: ROL_Secant.hpp:68
void test(std::ostream &stream=std::cout) const
Definition: ROL_Secant.hpp:175