MOOCHO  Version of the Day
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Groups Pages
NLPInterfacePack_ExampleNLPObjGrad.cpp
1 // @HEADER
2 // ***********************************************************************
3 //
4 // Moocho: Multi-functional Object-Oriented arCHitecture for Optimization
5 // Copyright (2003) Sandia Corporation
6 //
7 // Under terms of Contract DE-AC04-94AL85000, there is a non-exclusive
8 // license for use of this work by or on behalf of the U.S. Government.
9 //
10 // Redistribution and use in source and binary forms, with or without
11 // modification, are permitted provided that the following conditions are
12 // met:
13 //
14 // 1. Redistributions of source code must retain the above copyright
15 // notice, this list of conditions and the following disclaimer.
16 //
17 // 2. Redistributions in binary form must reproduce the above copyright
18 // notice, this list of conditions and the following disclaimer in the
19 // documentation and/or other materials provided with the distribution.
20 //
21 // 3. Neither the name of the Corporation nor the names of the
22 // contributors may be used to endorse or promote products derived from
23 // this software without specific prior written permission.
24 //
25 // THIS SOFTWARE IS PROVIDED BY SANDIA CORPORATION "AS IS" AND ANY
26 // EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
27 // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
28 // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SANDIA CORPORATION OR THE
29 // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
30 // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
31 // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
32 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
33 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
34 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
35 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
36 //
37 // Questions? Contact Roscoe A. Bartlett (rabartl@sandia.gov)
38 //
39 // ***********************************************************************
40 // @HEADER
41 
42 #include <assert.h>
43 
44 #include <stdexcept>
45 #include <limits>
46 
47 #include "NLPInterfacePack_ExampleNLPObjGrad.hpp"
48 #include "ExampleNLPDirectRTOps.h"
49 #include "AbstractLinAlgPack_BasisSystemComposite.hpp"
50 #include "AbstractLinAlgPack_VectorMutable.hpp"
51 #include "AbstractLinAlgPack_VectorStdOps.hpp"
52 #include "AbstractLinAlgPack_VectorAuxiliaryOps.hpp"
53 #include "AbstractLinAlgPack_LinAlgOpPack.hpp"
54 #include "RTOpPack_RTOpC.hpp"
55 #include "Teuchos_dyn_cast.hpp"
56 #include "Teuchos_Assert.hpp"
57 #include "Teuchos_AbstractFactoryStd.hpp"
58 
59 namespace {
60 
61 static RTOpPack::RTOpC explnlp2_c_eval_op;
62 
63 class init_rtop_server_t {
64 public:
65  init_rtop_server_t() {
67  }
68 };
69 init_rtop_server_t init_rtop_server;
70 
71 } // end namespace
72 
73 namespace NLPInterfacePack {
74 
76  const VectorSpace::space_ptr_t& vec_space
77  ,value_type xo
78  ,bool has_bounds
79  ,bool dep_bounded
80  )
81  :vec_space_(vec_space), vec_space_comp_(Teuchos::null)
82  ,initialized_(false), obj_scale_(1.0)
83  ,has_bounds_(has_bounds), force_xinit_in_bounds_(true), n_(2*vec_space->dim())
84 {
85  namespace rcp = MemMngPack;
86 
87  // Assert the size of the NLP
89  vec_space->dim() <= 0, std::logic_error
90  ,"ExampleNLPObjGrad::ExampleNLPObjGrad(...) Error!" );
91 
92  // Setup the aggregate vector space object
93  BasisSystemComposite::initialize_space_x(
94  vec_space, vec_space, &var_dep_, &var_indep_, &vec_space_comp_ );
95 
96  // Set the initial starting point.
97  xinit_ = vec_space_comp_->create_member();
98  *xinit_ = xo;
99 
100  /*
101  Setup the sparse bounds
102 
103  xl(i) = 0.01 \
104  } for i <: bounded_rng
105  xu(i) = 20 /
106  */
107 
108  xl_ = vec_space_comp_->create_member();
109  xu_ = vec_space_comp_->create_member();
110 
111  if(has_bounds) {
112  const Range1D
113  bounded_rng = ( dep_bounded ? var_dep_ : var_indep_ ),
114  unbounded_rng = ( dep_bounded ? var_indep_ : var_dep_ );
115  *xl_->sub_view(bounded_rng) = 0.01;
116  *xl_->sub_view(unbounded_rng) = -NLP::infinite_bound();
117  *xu_->sub_view(bounded_rng) = 20.0;
118  *xu_->sub_view(unbounded_rng) = +NLP::infinite_bound();
119  }
120  else {
121  *xl_ = -NLP::infinite_bound();
122  *xu_ = +NLP::infinite_bound();
123  }
124 }
125 
126 // Overridden public members from NLP
127 
128 void ExampleNLPObjGrad::initialize(bool test_setup)
129 {
130  if( initialized_ ) {
131  NLPObjGrad::initialize(test_setup);
132  return;
133  }
134 
135  AbstractLinAlgPack::force_in_bounds( *xl_, *xu_, xinit_.get() );
136 
137  NLPObjGrad::initialize(test_setup);
138 
139  initialized_ = true;
140 }
141 
143 {
144  return initialized_;
145 }
146 
148 {
149  assert_is_initialized();
150  return n_;
151 }
152 
154 {
155  assert_is_initialized();
156  return n_ / 2;
157 }
158 
160 {
161  return vec_space_comp_;
162 }
163 
165 {
166  return vec_space_;
167 }
168 
170 {
171  return has_bounds_ ? n_/2 : 0;
172 }
173 
174 void ExampleNLPObjGrad::force_xinit_in_bounds(bool force_xinit_in_bounds)
175 {
176  force_xinit_in_bounds_ = force_xinit_in_bounds;
177 }
178 
180 {
181  return force_xinit_in_bounds_;
182 }
183 
184 const Vector& ExampleNLPObjGrad::xinit() const
185 {
186  assert_is_initialized();
187  return *xinit_;
188 }
189 
190 const Vector& ExampleNLPObjGrad::xl() const
191 {
192  assert_is_initialized();
193  return *xl_;
194 }
195 
196 const Vector& ExampleNLPObjGrad::xu() const
197 {
198  assert_is_initialized();
199  return *xu_;
200 }
201 
203 {
204  return std::numeric_limits<value_type>::max(); // No limits on the bounds
205 }
206 
207 void ExampleNLPObjGrad::scale_f( value_type scale_f )
208 {
209  assert_is_initialized();
210  obj_scale_ = scale_f;
211 }
212 
213 value_type ExampleNLPObjGrad::scale_f() const
214 {
215  assert_is_initialized();
216  return obj_scale_;
217 }
218 
220  const Vector& x
221  ,const Vector* lambda
222  ,const Vector* nu
223  ,bool optimal
224  )
225 {
226  assert_is_initialized();
227  // Do what you want with the solution (or final values) here.
228  // For this example we will just ignore it.
229 }
230 
232 {
233  return var_dep_;
234 }
235 
237 {
238  return var_indep_;
239 }
240 
241 // Overridden protected members from NLP
242 
243 void ExampleNLPObjGrad::imp_calc_f(const Vector& x, bool newx
244  , const ZeroOrderInfo& zero_order_info) const
245 {
247  assert_is_initialized();
248  f(); // assert f is set
249  TEUCHOS_TEST_FOR_EXCEPTION( n() != x.dim(), std::length_error, "ExampleNLPObjGrad::imp_calc_f(...)" );
250  // f(x) = (obj_scale/2) * sum( x(i)^2, for i = 1..n )
251  *zero_order_info.f = obj_scale_ / 2.0 * dot(x,x);
252 }
253 
254 void ExampleNLPObjGrad::imp_calc_c(const Vector& x, bool newx
255  , const ZeroOrderInfo& zero_order_info) const
256 {
257  assert_is_initialized();
258  const size_type n = this->n();
259  TEUCHOS_TEST_FOR_EXCEPTION( n != x.dim(), std::length_error, "ExampleNLPObjGrad::imp_calc_c(...)" );
260 
261  // c(x)(j) = x(j) * (x(m+j) -1) - 10 * x(m+j) = 0, for j = 1...m
262 
263  Vector::vec_ptr_t
264  xD= x.sub_view(var_dep()),
265  xI = x.sub_view(var_indep());
266 
267  const Vector* vecs[] = { xD.get(), xI.get() };
268  VectorMutable* targ_vecs[] = { zero_order_info.c };
269  AbstractLinAlgPack::apply_op(explnlp2_c_eval_op,2,vecs,1,targ_vecs,NULL);
270 
271 }
272 
274  const Vector& x, bool newx, const ZeroOrderInfo& zero_order_info) const
275 {
276  TEUCHOS_TEST_FOR_EXCEPT(true); // Should never be called!
277 }
278 
279 // Overridden protected members from NLPFirstOrder
280 
281 void ExampleNLPObjGrad::imp_calc_Gf(const Vector& x, bool newx
282  , const ObjGradInfo& obj_grad_info) const
283 {
284  assert_is_initialized();
285  TEUCHOS_TEST_FOR_EXCEPTION( n() != x.dim(), std::length_error, "ExampleNLPObjGrad::imp_calc_Gf(...)" );
286  // Gf = obj_scale * x
287  LinAlgOpPack::V_StV(obj_grad_info.Gf,obj_scale_,x);
288 }
289 
290 } // end namespace NLPInterfacePack
virtual value_type & f()
#define TEUCHOS_TEST_FOR_EXCEPTION(throw_exception_test, Exception, msg)
void V_StV(VectorMutable *v_lhs, value_type alpha, const V &V_rhs)
void imp_calc_f(const Vector &x, bool newx, const ZeroOrderInfo &zero_order_info) const
void report_final_solution(const Vector &x, const Vector *lambda, const Vector *nu, bool optimal)
int RTOp_TOp_explnlp2_c_eval_construct(struct RTOp_RTOp *op)
TEUCHOS_DEPRECATED RCP< T > rcp(T *p, Dealloc_T dealloc, bool owns_mem)
void imp_calc_c(const Vector &x, bool newx, const ZeroOrderInfo &zero_order_info) const
size_t size_type
void imp_calc_h(const Vector &x, bool newx, const ZeroOrderInfo &zero_order_info) const
This implementation does nothing (should never be called though).
RTOp_RTOp & op()
value_type dot(const Vector &v_rhs1, const Vector &v_rhs2)
ExampleNLPObjGrad(const VectorSpace::space_ptr_t &vec_space, value_type xo, bool has_bounds, bool dep_bounded)
Constructor.
void force_in_bounds(const Vector &xl, const Vector &xu, VectorMutable *x)
virtual void initialize(bool test_setup=false)
static value_type infinite_bound()
void imp_calc_Gf(const Vector &x, bool newx, const ObjGradInfo &obj_grad_info) const
#define TEUCHOS_TEST_FOR_EXCEPT(throw_exception_test)