44 #include "MoochoPack_ReducedHessianSecantUpdateStd_Step.hpp"
45 #include "MoochoPack_moocho_algo_conversion.hpp"
46 #include "MoochoPack_Exceptions.hpp"
47 #include "IterationPack_print_algorithm_step.hpp"
48 #include "AbstractLinAlgPack_VectorSpace.hpp"
49 #include "AbstractLinAlgPack_VectorStdOps.hpp"
50 #include "AbstractLinAlgPack_VectorMutable.hpp"
51 #include "AbstractLinAlgPack_VectorOut.hpp"
52 #include "AbstractLinAlgPack_MatrixSymOpNonsing.hpp"
53 #include "AbstractLinAlgPack_MatrixSymInitDiag.hpp"
54 #include "AbstractLinAlgPack_MatrixOpOut.hpp"
55 #include "AbstractLinAlgPack_LinAlgOpPack.hpp"
56 #include "Teuchos_dyn_cast.hpp"
59 const secant_update_ptr_t& secant_update
61 :secant_update_(secant_update)
62 ,num_basis_(NO_BASIS_UPDATED_YET)
63 ,iter_k_rHL_init_ident_(-1)
79 using LinAlgOpPack::V_MtV;
81 NLPAlgo &algo = rsqp_algo(_algo);
84 EJournalOutputLevel olevel = algo.algo_cntr().journal_output_level();
85 EJournalOutputLevel ns_olevel = algo.algo_cntr().null_space_journal_output_level();
89 if( static_cast<int>(olevel) >= static_cast<int>(PRINT_ALGORITHM_STEPS) ) {
90 using IterationPack::print_algorithm_step;
91 print_algorithm_step( algo, step_poss, type, assoc_step_poss, out );
94 bool return_val =
true;
97 IterQuantityAccess<index_type>
98 &num_basis_iq = s.num_basis();
99 IterQuantityAccess<VectorMutable>
103 IterQuantityAccess<MatrixOp>
105 IterQuantityAccess<MatrixSymOp>
109 const NLP &nlp = algo.nlp();
113 nind = m ? Z_iq.get_k(Z_iq.last_updated()).
cols() : 0;
117 bool new_basis =
false;
119 const int last_updated_k = num_basis_iq.last_updated();
120 if( last_updated_k != IterQuantity::NONE_UPDATED ) {
121 const index_type num_basis_last = num_basis_iq.get_k(last_updated_k);
122 if( num_basis_ == NO_BASIS_UPDATED_YET )
123 num_basis_ = num_basis_last;
124 else if( num_basis_ != num_basis_last )
126 num_basis_ = num_basis_last;
131 if( !rHL_iq.updated_k(0) ) {
136 if( (
int)olevel >= (int)PRINT_ALGORITHM_STEPS ) {
137 out <<
"\nBasis changed. Reinitializing rHL_k = eye(n-r) ...\n";
139 dyn_cast<MatrixSymInitDiag>(rHL_iq.set_k(0)).init_identity(
140 Z_iq.get_k(0).space_rows()
142 if( static_cast<int>(ns_olevel) >=
static_cast<int>(PRINT_ALGORITHM_STEPS) )
143 if( algo.algo_cntr().calc_matrix_norms() )
144 out <<
"\n||rHL_k||inf = " << rHL_iq.get_k(0).calc_norm(MatrixOp::MAT_NORM_INF).value << std::endl;
145 if( (
int)ns_olevel >= (
int)PRINT_ITERATION_QUANTITIES )
146 out <<
"\nrHL_k = \n" << rHL_iq.get_k(0);
147 quasi_newton_stats_(s).set_k(0).set_updated_stats(
148 QuasiNewtonStats::REINITIALIZED );
149 iter_k_rHL_init_ident_ = s.
k();
157 if( rHL_iq.updated_k(-1) && rGf_iq.updated_k(-1) ) {
162 if( (
int)olevel >= (int)PRINT_ALGORITHM_STEPS )
165 <<
"\nPerforming Secant update ...\n";
169 &rGf_k = rGf_iq.get_k(0),
170 &rGf_km1 = rGf_iq.get_k(-1),
171 &pz_km1 = pz_iq.get_k(-1);
173 alpha_km1 = s.alpha().get_k(-1);
174 VectorSpace::vec_mut_ptr_t
175 y_bfgs = rGf_k.space().create_member(),
176 s_bfgs = pz_km1.space().create_member();
182 V_VmV( y_bfgs.get(), rGf_k, rGf_km1 );
184 if( w_iq.updated_k(-1) )
186 Vp_StV( y_bfgs.get(), - alpha_km1, w_iq.get_k(-1) );
190 V_StV( s_bfgs.get(), alpha_km1, pz_iq.get_k(-1) );
192 if( static_cast<int>(ns_olevel) >=
static_cast<int>(PRINT_ALGORITHM_STEPS) ) {
193 out <<
"\n||y_bfgs||inf = " << y_bfgs->norm_inf() << std::endl;
194 out <<
"\n||s_bfgs||inf = " << s_bfgs->norm_inf() << std::endl;
197 if( static_cast<int>(ns_olevel) >= static_cast<int>(PRINT_VECTORS) ) {
198 out <<
"\ny_bfgs =\n" << *y_bfgs;
199 out <<
"\ns_bfgs =\n" << *s_bfgs;
204 &rHL_k = rHL_iq.set_k(0,-1);
207 if(!secant_update().perform_update(
208 s_bfgs.get(), y_bfgs.get()
209 ,iter_k_rHL_init_ident_ == s.
k() - 1
210 ,out, ns_olevel, &algo, &s, &rHL_k
220 int k_last_offset = rHL_iq.last_updated();
221 bool set_current =
false;
222 if( k_last_offset != IterQuantity::NONE_UPDATED && k_last_offset < 0 ) {
223 const MatrixSymOp &rHL_k_last = rHL_iq.get_k(k_last_offset);
224 const size_type nind_last = rHL_k_last.rows();
225 if( nind_last != nind) {
226 if( (
int)olevel >= (
int)PRINT_ALGORITHM_STEPS ) {
228 <<
"No new basis was selected.\n"
229 <<
"The previous matrix rHL_k(" << k_last_offset <<
") was found but its dimmension\n"
230 <<
"rHL_k(" << k_last_offset <<
").rows() = " << nind_last <<
" != n-r = " << nind << std::endl;
234 if( (
int)olevel >= (
int)PRINT_ALGORITHM_STEPS ) {
236 <<
"No new basis was selected so using previously updated...\n "
237 <<
"rHL_k = rHL_k(" << k_last_offset <<
")\n";
239 rHL_iq.set_k(0) = rHL_k_last;
240 quasi_newton_stats_(s).set_k(0).set_updated_stats(
241 QuasiNewtonStats::SKIPED );
243 if( (
int)ns_olevel >= (
int)PRINT_ITERATION_QUANTITIES ) {
244 rHL_iq.get_k(0).output( out <<
"\nrHL_k = \n" );
250 if( (
int)olevel >= (
int)PRINT_ALGORITHM_STEPS ) {
252 <<
"\nInitializing rHL = eye(n-r) "
253 <<
"(k = " << algo.
state().
k() <<
")...\n";
258 dyn_cast<MatrixSymInitDiag>(rHL_iq.set_k(0)).init_identity(
259 Z_iq.get_k(0).space_rows() );
260 iter_k_rHL_init_ident_ = s.
k();
261 quasi_newton_stats_(s).set_k(0).set_updated_stats(
262 QuasiNewtonStats::REINITIALIZED );
271 MatrixOp::EMatNormType mat_nrm_inf = MatrixOp::MAT_NORM_INF;
273 if( (
int)ns_olevel >= (
int)PRINT_ALGORITHM_STEPS ) {
274 if(algo.algo_cntr().calc_matrix_norms())
275 out <<
"\n||rHL_k||inf = " << rHL_iq.get_k(0).calc_norm(mat_nrm_inf).value << std::endl;
276 if(algo.algo_cntr().calc_conditioning()) {
277 const MatrixSymOpNonsing
278 *rHL_ns_k =
dynamic_cast<const MatrixSymOpNonsing*
>(&rHL_iq.get_k(0));
280 out <<
"\ncond_inf(rHL_k) = " << rHL_ns_k->calc_cond_num(mat_nrm_inf).value << std::endl;
283 if( (
int)ns_olevel >= (
int)PRINT_ITERATION_QUANTITIES ) {
284 out <<
"\nrHL_k = \n" << rHL_iq.get_k(0);
289 if( (
int)olevel >= (
int)PRINT_ALGORITHM_STEPS ) {
290 out <<
"\nThe matrix rHL_k has already been updated so leave it\n";
299 ,std::ostream& out,
const std::string& L
303 << L <<
"*** Calculate the reduced hessian of the Lagrangian rHL = Z' * HL * Z\n"
304 << L <<
"default: num_basis_remembered = NO_BASIS_UPDATED_YET\n"
305 << L <<
" iter_k_rHL_init_ident = -1\n"
306 << L <<
"if num_basis_remembered = NO_BASIS_UPDATED_YET then\n"
307 << L <<
" num_basis_remembered = num_basis\n"
309 << L <<
"if num_basis_remembered != num_basis then\n"
310 << L <<
" num_basis_remembered = num_basis\n"
311 << L <<
" new_basis = true\n"
313 << L <<
"if rHL_k is not updated then\n"
314 << L <<
" if new_basis == true then\n"
315 << L <<
" *** Transition rHL to the new basis by just starting over.\n"
316 << L <<
" rHL_k = eye(n-r) *** must support MatrixSymInitDiag interface\n"
317 << L <<
" iter_k_rHL_init_ident = k\n"
318 << L <<
" goto next step\n"
320 << L <<
" if rHL_km1 and rGf_km1 are updated then\n"
321 << L <<
" *** We should have the information to perform a BFGS update\n"
322 << L <<
" y = rGf_k - rGf_km1\n"
323 << L <<
" s = alpha_km1 * pz_km1\n"
324 << L <<
" if k - 1 == iter_k_rHL_init_ident then\n"
325 << L <<
" first_update = true\n"
327 << L <<
" first_update = false\n"
329 << L <<
" rHL_k = rHL_km1\n"
330 << L <<
" begin secant update\n"
331 << L <<
" (" <<
typeName(secant_update()) <<
")\n"
333 secant_update().print_step( out, L+
" " );
335 << L <<
" end secant update\n"
337 << L <<
" *** We have no information for which to preform a BFGS update.\n"
338 << L <<
" k_last_offset = last iteration rHL was updated for\n"
339 << L <<
" if k_last_offset does not exist then\n"
340 << L <<
" *** We are left with no choise but to initialize rHL\n"
341 << L <<
" rHL_k = eye(n-r) *** must support MatrixSymInitDiag interface\n"
342 << L <<
" iter_k_rHL_init_ident = k\n"
344 << L <<
" *** No new basis has been selected so we may as well\n"
345 << L <<
" *** just use the last rHL that was updated\n"
346 << L <<
" rHL_k = rHL_k(k_last_offset)\n"
ReducedHessianSecantUpdateStd_Step(const secant_update_ptr_t &secant_update=Teuchos::null)
void Vt_S(VectorMutable *v_lhs, const value_type &alpha)
void Vp_StV(VectorMutable *v_lhs, const value_type &alpha, const Vector &v_rhs)
T_To & dyn_cast(T_From &from)
rSQP Algorithm control class.
void print_step(const Algorithm &algo, poss_type step_poss, IterationPack::EDoStepType type, poss_type assoc_step_poss, std::ostream &out, const std::string &leading_str) const
void V_StV(VectorMutable *v_lhs, value_type alpha, const V &V_rhs)
bool do_step(Algorithm &algo, poss_type step_poss, IterationPack::EDoStepType type, poss_type assoc_step_poss)
virtual std::ostream & journal_out() const
void V_VpV(VectorMutable *v_lhs, const V1 &V1_rhs1, const V2 &V2_rhs2)
Reduced space SQP state encapsulation interface.
AlgorithmTracker & track()
value_type dot(const Vector &v_rhs1, const Vector &v_rhs2)
NLPAlgoState & rsqp_state()
<<std aggr>="">> members for algo_cntr
size_type cols(size_type rows, size_type cols, BLAS_Cpp::Transp _trans)
void V_VmV(VectorMutable *v_lhs, const V1 &V1_rhs1, const V2 &V2_rhs2)
std::string typeName(const T &t)
void Vp_V(VectorMutable *v_lhs, const V &V_rhs)