ROL
ROL_PrimalDualActiveSetStep.hpp
Go to the documentation of this file.
1 // @HEADER
2 // *****************************************************************************
3 // Rapid Optimization Library (ROL) Package
4 //
5 // Copyright 2014 NTESS and the ROL contributors.
6 // SPDX-License-Identifier: BSD-3-Clause
7 // *****************************************************************************
8 // @HEADER
9 #ifndef ROL_PRIMALDUALACTIVESETSTEP_H
10 #define ROL_PRIMALDUALACTIVESETSTEP_H
11 
12 #include "ROL_Step.hpp"
13 #include "ROL_Vector.hpp"
14 #include "ROL_KrylovFactory.hpp"
15 #include "ROL_Objective.hpp"
16 #include "ROL_BoundConstraint.hpp"
17 #include "ROL_Types.hpp"
18 #include "ROL_Secant.hpp"
19 #include "ROL_ParameterList.hpp"
20 
95 namespace ROL {
96 
97 template <class Real>
98 class PrimalDualActiveSetStep : public Step<Real> {
99 private:
100 
101  ROL::Ptr<Krylov<Real> > krylov_;
102 
103  // Krylov Parameters
104  int iterCR_;
105  int flagCR_;
106  Real itol_;
107 
108  // PDAS Parameters
109  int maxit_;
110  int iter_;
111  int flag_;
112  Real stol_;
113  Real gtol_;
114  Real scale_;
115  Real neps_;
116  bool feasible_;
117 
118  // Dual Variable
119  ROL::Ptr<Vector<Real> > lambda_;
120  ROL::Ptr<Vector<Real> > xlam_;
121  ROL::Ptr<Vector<Real> > x0_;
122  ROL::Ptr<Vector<Real> > xbnd_;
123  ROL::Ptr<Vector<Real> > As_;
124  ROL::Ptr<Vector<Real> > xtmp_;
125  ROL::Ptr<Vector<Real> > res_;
126  ROL::Ptr<Vector<Real> > Ag_;
127  ROL::Ptr<Vector<Real> > rtmp_;
128  ROL::Ptr<Vector<Real> > gtmp_;
129 
130  // Secant Information
132  ROL::Ptr<Secant<Real> > secant_;
135 
136  class HessianPD : public LinearOperator<Real> {
137  private:
138  const ROL::Ptr<Objective<Real> > obj_;
139  const ROL::Ptr<BoundConstraint<Real> > bnd_;
140  const ROL::Ptr<Vector<Real> > x_;
141  const ROL::Ptr<Vector<Real> > xlam_;
142  ROL::Ptr<Vector<Real> > v_;
143  Real eps_;
144  const ROL::Ptr<Secant<Real> > secant_;
146  public:
147  HessianPD(const ROL::Ptr<Objective<Real> > &obj,
148  const ROL::Ptr<BoundConstraint<Real> > &bnd,
149  const ROL::Ptr<Vector<Real> > &x,
150  const ROL::Ptr<Vector<Real> > &xlam,
151  const Real eps = 0,
152  const ROL::Ptr<Secant<Real> > &secant = ROL::nullPtr,
153  const bool useSecant = false )
154  : obj_(obj), bnd_(bnd), x_(x), xlam_(xlam),
155  eps_(eps), secant_(secant), useSecant_(useSecant) {
156  v_ = x_->clone();
157  if ( !useSecant || secant == ROL::nullPtr ) {
158  useSecant_ = false;
159  }
160  }
161  void apply( Vector<Real> &Hv, const Vector<Real> &v, Real &tol ) const {
162  v_->set(v);
163  bnd_->pruneActive(*v_,*xlam_,eps_);
164  if ( useSecant_ ) {
165  secant_->applyB(Hv,*v_);
166  }
167  else {
168  obj_->hessVec(Hv,*v_,*x_,tol);
169  }
170  bnd_->pruneActive(Hv,*xlam_,eps_);
171  }
172  };
173 
174  class PrecondPD : public LinearOperator<Real> {
175  private:
176  const ROL::Ptr<Objective<Real> > obj_;
177  const ROL::Ptr<BoundConstraint<Real> > bnd_;
178  const ROL::Ptr<Vector<Real> > x_;
179  const ROL::Ptr<Vector<Real> > xlam_;
180  ROL::Ptr<Vector<Real> > v_;
181  Real eps_;
182  const ROL::Ptr<Secant<Real> > secant_;
184  public:
185  PrecondPD(const ROL::Ptr<Objective<Real> > &obj,
186  const ROL::Ptr<BoundConstraint<Real> > &bnd,
187  const ROL::Ptr<Vector<Real> > &x,
188  const ROL::Ptr<Vector<Real> > &xlam,
189  const Real eps = 0,
190  const ROL::Ptr<Secant<Real> > &secant = ROL::nullPtr,
191  const bool useSecant = false )
192  : obj_(obj), bnd_(bnd), x_(x), xlam_(xlam),
193  eps_(eps), secant_(secant), useSecant_(useSecant) {
194  v_ = x_->dual().clone();
195  if ( !useSecant || secant == ROL::nullPtr ) {
196  useSecant_ = false;
197  }
198  }
199  void apply( Vector<Real> &Hv, const Vector<Real> &v, Real &tol ) const {
200  Hv.set(v.dual());
201  }
202  void applyInverse( Vector<Real> &Hv, const Vector<Real> &v, Real &tol ) const {
203  v_->set(v);
204  bnd_->pruneActive(*v_,*xlam_,eps_);
205  if ( useSecant_ ) {
206  secant_->applyH(Hv,*v_);
207  }
208  else {
209  obj_->precond(Hv,*v_,*x_,tol);
210  }
211  bnd_->pruneActive(Hv,*xlam_,eps_);
212  }
213  };
214 
228  Real one(1);
229  ROL::Ptr<StepState<Real> > step_state = Step<Real>::getState();
230  obj.gradient(*(step_state->gradientVec),x,tol);
231  xtmp_->set(x);
232  xtmp_->axpy(-one,(step_state->gradientVec)->dual());
233  con.project(*xtmp_);
234  xtmp_->axpy(-one,x);
235  return xtmp_->norm();
236  }
237 
238 public:
245  PrimalDualActiveSetStep( ROL::ParameterList &parlist )
246  : Step<Real>::Step(), krylov_(ROL::nullPtr),
247  iterCR_(0), flagCR_(0), itol_(0),
248  maxit_(0), iter_(0), flag_(0), stol_(0), gtol_(0), scale_(0),
249  neps_(-ROL_EPSILON<Real>()), feasible_(false),
250  lambda_(ROL::nullPtr), xlam_(ROL::nullPtr), x0_(ROL::nullPtr),
251  xbnd_(ROL::nullPtr), As_(ROL::nullPtr), xtmp_(ROL::nullPtr),
252  res_(ROL::nullPtr), Ag_(ROL::nullPtr), rtmp_(ROL::nullPtr),
253  gtmp_(ROL::nullPtr),
254  esec_(SECANT_LBFGS), secant_(ROL::nullPtr), useSecantPrecond_(false),
255  useSecantHessVec_(false) {
256  Real one(1), oem6(1.e-6), oem8(1.e-8);
257  // Algorithmic parameters
258  maxit_ = parlist.sublist("Step").sublist("Primal Dual Active Set").get("Iteration Limit",10);
259  stol_ = parlist.sublist("Step").sublist("Primal Dual Active Set").get("Relative Step Tolerance",oem8);
260  gtol_ = parlist.sublist("Step").sublist("Primal Dual Active Set").get("Relative Gradient Tolerance",oem6);
261  scale_ = parlist.sublist("Step").sublist("Primal Dual Active Set").get("Dual Scaling", one);
262  // Build secant object
263  std::string secantType = parlist.sublist("General").sublist("Secant").get("Type","Limited-Memory BFGS");
264  esec_ = StringToESecant(secantType);
265  useSecantHessVec_ = parlist.sublist("General").sublist("Secant").get("Use as Hessian", false);
266  useSecantPrecond_ = parlist.sublist("General").sublist("Secant").get("Use as Preconditioner", false);
268  secant_ = SecantFactory<Real>(parlist);
269  }
270  // Build Krylov object
271  krylov_ = KrylovFactory<Real>(parlist);
272  }
273 
286  void initialize( Vector<Real> &x, const Vector<Real> &s, const Vector<Real> &g,
288  AlgorithmState<Real> &algo_state ) {
289  ROL::Ptr<StepState<Real> > step_state = Step<Real>::getState();
290  Real zero(0), one(1);
291  // Initialize state descent direction and gradient storage
292  step_state->descentVec = s.clone();
293  step_state->gradientVec = g.clone();
294  step_state->searchSize = zero;
295  // Initialize additional storage
296  xlam_ = x.clone();
297  x0_ = x.clone();
298  xbnd_ = x.clone();
299  As_ = s.clone();
300  xtmp_ = x.clone();
301  res_ = g.clone();
302  Ag_ = g.clone();
303  rtmp_ = g.clone();
304  gtmp_ = g.clone();
305  // Project x onto constraint set
306  con.project(x);
307  // Update objective function, get value, and get gradient
308  Real tol = std::sqrt(ROL_EPSILON<Real>());
309  obj.update(x,true,algo_state.iter);
310  algo_state.value = obj.value(x,tol);
311  algo_state.nfval++;
312  algo_state.gnorm = computeCriticalityMeasure(x,obj,con,tol);
313  algo_state.ngrad++;
314  // Initialize dual variable
315  lambda_ = s.clone();
316  lambda_->set((step_state->gradientVec)->dual());
317  lambda_->scale(-one);
318  }
319 
345  using Step<Real>::compute;
347  AlgorithmState<Real> &algo_state ) {
348  ROL::Ptr<StepState<Real> > step_state = Step<Real>::getState();
349  Real zero(0), one(1);
350  s.zero();
351  x0_->set(x);
352  res_->set(*(step_state->gradientVec));
353  for ( iter_ = 0; iter_ < maxit_; iter_++ ) {
354  /********************************************************************/
355  // MODIFY ITERATE VECTOR TO CHECK ACTIVE SET
356  /********************************************************************/
357  xlam_->set(*x0_); // xlam = x0
358  xlam_->axpy(scale_,*(lambda_)); // xlam = x0 + c*lambda
359  /********************************************************************/
360  // PROJECT x ONTO PRIMAL DUAL FEASIBLE SET
361  /********************************************************************/
362  As_->zero(); // As = 0
363 
364  xbnd_->set(*con.getUpperBound()); // xbnd = u
365  xbnd_->axpy(-one,x); // xbnd = u - x
366  xtmp_->set(*xbnd_); // tmp = u - x
367  con.pruneUpperActive(*xtmp_,*xlam_,neps_); // tmp = I(u - x)
368  xbnd_->axpy(-one,*xtmp_); // xbnd = A(u - x)
369  As_->plus(*xbnd_); // As += A(u - x)
370 
371  xbnd_->set(*con.getLowerBound()); // xbnd = l
372  xbnd_->axpy(-one,x); // xbnd = l - x
373  xtmp_->set(*xbnd_); // tmp = l - x
374  con.pruneLowerActive(*xtmp_,*xlam_,neps_); // tmp = I(l - x)
375  xbnd_->axpy(-one,*xtmp_); // xbnd = A(l - x)
376  As_->plus(*xbnd_); // As += A(l - x)
377  /********************************************************************/
378  // APPLY HESSIAN TO ACTIVE COMPONENTS OF s AND REMOVE INACTIVE
379  /********************************************************************/
380  itol_ = std::sqrt(ROL_EPSILON<Real>());
381  if ( useSecantHessVec_ && secant_ != ROL::nullPtr ) { // IHAs = H*As
382  secant_->applyB(*gtmp_,*As_);
383  }
384  else {
385  obj.hessVec(*gtmp_,*As_,x,itol_);
386  }
387  con.pruneActive(*gtmp_,*xlam_,neps_); // IHAs = I(H*As)
388  /********************************************************************/
389  // SEPARATE ACTIVE AND INACTIVE COMPONENTS OF THE GRADIENT
390  /********************************************************************/
391  rtmp_->set(*(step_state->gradientVec)); // Inactive components
392  con.pruneActive(*rtmp_,*xlam_,neps_);
393 
394  Ag_->set(*(step_state->gradientVec)); // Active components
395  Ag_->axpy(-one,*rtmp_);
396  /********************************************************************/
397  // SOLVE REDUCED NEWTON SYSTEM
398  /********************************************************************/
399  rtmp_->plus(*gtmp_);
400  rtmp_->scale(-one); // rhs = -Ig - I(H*As)
401  s.zero();
402  if ( rtmp_->norm() > zero ) {
403  // Initialize Hessian and preconditioner
404  ROL::Ptr<Objective<Real> > obj_ptr = ROL::makePtrFromRef(obj);
405  ROL::Ptr<BoundConstraint<Real> > con_ptr = ROL::makePtrFromRef(con);
406  ROL::Ptr<LinearOperator<Real> > hessian
407  = ROL::makePtr<HessianPD>(obj_ptr,con_ptr,
409  ROL::Ptr<LinearOperator<Real> > precond
410  = ROL::makePtr<PrecondPD>(obj_ptr,con_ptr,
412  //solve(s,*rtmp_,*xlam_,x,obj,con); // Call conjugate residuals
413  krylov_->run(s,*hessian,*rtmp_,*precond,iterCR_,flagCR_);
414  con.pruneActive(s,*xlam_,neps_); // s <- Is
415  }
416  s.plus(*As_); // s = Is + As
417  /********************************************************************/
418  // UPDATE MULTIPLIER
419  /********************************************************************/
420  if ( useSecantHessVec_ && secant_ != ROL::nullPtr ) {
421  secant_->applyB(*rtmp_,s);
422  }
423  else {
424  obj.hessVec(*rtmp_,s,x,itol_);
425  }
426  gtmp_->set(*rtmp_);
427  con.pruneActive(*gtmp_,*xlam_,neps_);
428  lambda_->set(*rtmp_);
429  lambda_->axpy(-one,*gtmp_);
430  lambda_->plus(*Ag_);
431  lambda_->scale(-one);
432  /********************************************************************/
433  // UPDATE STEP
434  /********************************************************************/
435  x0_->set(x);
436  x0_->plus(s);
437  res_->set(*(step_state->gradientVec));
438  res_->plus(*rtmp_);
439  // Compute criticality measure
440  xtmp_->set(*x0_);
441  xtmp_->axpy(-one,res_->dual());
442  con.project(*xtmp_);
443  xtmp_->axpy(-one,*x0_);
444 // std::cout << s.norm() << " "
445 // << tmp->norm() << " "
446 // << res_->norm() << " "
447 // << lambda_->norm() << " "
448 // << flagCR_ << " "
449 // << iterCR_ << "\n";
450  if ( xtmp_->norm() < gtol_*algo_state.gnorm ) {
451  flag_ = 0;
452  break;
453  }
454  if ( s.norm() < stol_*x.norm() ) {
455  flag_ = 2;
456  break;
457  }
458  }
459  if ( iter_ == maxit_ ) {
460  flag_ = 1;
461  }
462  else {
463  iter_++;
464  }
465  }
466 
478  using Step<Real>::update;
480  AlgorithmState<Real> &algo_state ) {
481  ROL::Ptr<StepState<Real> > step_state = Step<Real>::getState();
482  step_state->SPiter = (maxit_ > 1) ? iter_ : iterCR_;
483  step_state->SPflag = (maxit_ > 1) ? flag_ : flagCR_;
484 
485  x.plus(s);
486  feasible_ = con.isFeasible(x);
487  algo_state.snorm = s.norm();
488  algo_state.iter++;
489  Real tol = std::sqrt(ROL_EPSILON<Real>());
490  obj.update(x,true,algo_state.iter);
491  algo_state.value = obj.value(x,tol);
492  algo_state.nfval++;
493 
494  if ( secant_ != ROL::nullPtr ) {
495  gtmp_->set(*(step_state->gradientVec));
496  }
497  algo_state.gnorm = computeCriticalityMeasure(x,obj,con,tol);
498  algo_state.ngrad++;
499 
500  if ( secant_ != ROL::nullPtr ) {
501  secant_->updateStorage(x,*(step_state->gradientVec),*gtmp_,s,algo_state.snorm,algo_state.iter+1);
502  }
503  (algo_state.iterateVec)->set(x);
504  }
505 
511  std::string printHeader( void ) const {
512  std::stringstream hist;
513  hist << " ";
514  hist << std::setw(6) << std::left << "iter";
515  hist << std::setw(15) << std::left << "value";
516  hist << std::setw(15) << std::left << "gnorm";
517  hist << std::setw(15) << std::left << "snorm";
518  hist << std::setw(10) << std::left << "#fval";
519  hist << std::setw(10) << std::left << "#grad";
520  if ( maxit_ > 1 ) {
521  hist << std::setw(10) << std::left << "iterPDAS";
522  hist << std::setw(10) << std::left << "flagPDAS";
523  }
524  else {
525  hist << std::setw(10) << std::left << "iterCR";
526  hist << std::setw(10) << std::left << "flagCR";
527  }
528  hist << std::setw(10) << std::left << "feasible";
529  hist << "\n";
530  return hist.str();
531  }
532 
538  std::string printName( void ) const {
539  std::stringstream hist;
540  hist << "\nPrimal Dual Active Set Newton's Method\n";
541  return hist.str();
542  }
543 
551  virtual std::string print( AlgorithmState<Real> &algo_state, bool print_header = false ) const {
552  std::stringstream hist;
553  hist << std::scientific << std::setprecision(6);
554  if ( algo_state.iter == 0 ) {
555  hist << printName();
556  }
557  if ( print_header ) {
558  hist << printHeader();
559  }
560  if ( algo_state.iter == 0 ) {
561  hist << " ";
562  hist << std::setw(6) << std::left << algo_state.iter;
563  hist << std::setw(15) << std::left << algo_state.value;
564  hist << std::setw(15) << std::left << algo_state.gnorm;
565  hist << "\n";
566  }
567  else {
568  hist << " ";
569  hist << std::setw(6) << std::left << algo_state.iter;
570  hist << std::setw(15) << std::left << algo_state.value;
571  hist << std::setw(15) << std::left << algo_state.gnorm;
572  hist << std::setw(15) << std::left << algo_state.snorm;
573  hist << std::setw(10) << std::left << algo_state.nfval;
574  hist << std::setw(10) << std::left << algo_state.ngrad;
575  if ( maxit_ > 1 ) {
576  hist << std::setw(10) << std::left << iter_;
577  hist << std::setw(10) << std::left << flag_;
578  }
579  else {
580  hist << std::setw(10) << std::left << iterCR_;
581  hist << std::setw(10) << std::left << flagCR_;
582  }
583  if ( feasible_ ) {
584  hist << std::setw(10) << std::left << "YES";
585  }
586  else {
587  hist << std::setw(10) << std::left << "NO";
588  }
589  hist << "\n";
590  }
591  return hist.str();
592  }
593 
594 }; // class PrimalDualActiveSetStep
595 
596 } // namespace ROL
597 
598 #endif
599 
600 // void solve(Vector<Real> &sol, const Vector<Real> &rhs, const Vector<Real> &xlam, const Vector<Real> &x,
601 // Objective<Real> &obj, BoundConstraint<Real> &con) {
602 // Real rnorm = rhs.norm();
603 // Real rtol = std::min(tol1_,tol2_*rnorm);
604 // itol_ = std::sqrt(ROL_EPSILON<Real>());
605 // sol.zero();
606 //
607 // ROL::Ptr<Vector<Real> > res = rhs.clone();
608 // res->set(rhs);
609 //
610 // ROL::Ptr<Vector<Real> > v = x.clone();
611 // con.pruneActive(*res,xlam,neps_);
612 // obj.precond(*v,*res,x,itol_);
613 // con.pruneActive(*v,xlam,neps_);
614 //
615 // ROL::Ptr<Vector<Real> > p = x.clone();
616 // p->set(*v);
617 //
618 // ROL::Ptr<Vector<Real> > Hp = x.clone();
619 //
620 // iterCR_ = 0;
621 // flagCR_ = 0;
622 //
623 // Real kappa = 0.0, beta = 0.0, alpha = 0.0, tmp = 0.0, rv = v->dot(*res);
624 //
625 // for (iterCR_ = 0; iterCR_ < maxitCR_; iterCR_++) {
626 // if ( false ) {
627 // itol_ = rtol/(maxitCR_*rnorm);
628 // }
629 // con.pruneActive(*p,xlam,neps_);
630 // if ( secant_ == ROL::nullPtr ) {
631 // obj.hessVec(*Hp, *p, x, itol_);
632 // }
633 // else {
634 // secant_->applyB( *Hp, *p, x );
635 // }
636 // con.pruneActive(*Hp,xlam,neps_);
637 //
638 // kappa = p->dot(*Hp);
639 // if ( kappa <= 0.0 ) { flagCR_ = 2; break; }
640 // alpha = rv/kappa;
641 // sol.axpy(alpha,*p);
642 //
643 // res->axpy(-alpha,*Hp);
644 // rnorm = res->norm();
645 // if ( rnorm < rtol ) { break; }
646 //
647 // con.pruneActive(*res,xlam,neps_);
648 // obj.precond(*v,*res,x,itol_);
649 // con.pruneActive(*v,xlam,neps_);
650 // tmp = rv;
651 // rv = v->dot(*res);
652 // beta = rv/tmp;
653 //
654 // p->scale(beta);
655 // p->axpy(1.0,*v);
656 // }
657 // if ( iterCR_ == maxitCR_ ) {
658 // flagCR_ = 1;
659 // }
660 // else {
661 // iterCR_++;
662 // }
663 // }
664 
665 
666 // /** \brief Apply the inactive components of the Hessian operator.
667 //
668 // I.e., the components corresponding to \f$\mathcal{I}_k\f$.
669 //
670 // @param[out] hv is the result of applying the Hessian at @b x to
671 // @b v
672 // @param[in] v is the direction in which we apply the Hessian
673 // @param[in] x is the current iteration vector \f$x_k\f$
674 // @param[in] xlam is the vector \f$x_k + c\lambda_k\f$
675 // @param[in] obj is the objective function
676 // @param[in] con are the bound constraints
677 // */
678 // void applyInactiveHessian(Vector<Real> &hv, const Vector<Real> &v, const Vector<Real> &x,
679 // const Vector<Real> &xlam, Objective<Real> &obj, BoundConstraint<Real> &con) {
680 // ROL::Ptr<Vector<Real> > tmp = v.clone();
681 // tmp->set(v);
682 // con.pruneActive(*tmp,xlam,neps_);
683 // if ( secant_ == ROL::nullPtr ) {
684 // obj.hessVec(hv,*tmp,x,itol_);
685 // }
686 // else {
687 // secant_->applyB(hv,*tmp,x);
688 // }
689 // con.pruneActive(hv,xlam,neps_);
690 // }
691 //
692 // /** \brief Apply the inactive components of the preconditioner operator.
693 //
694 // I.e., the components corresponding to \f$\mathcal{I}_k\f$.
695 //
696 // @param[out] hv is the result of applying the preconditioner at @b x to
697 // @b v
698 // @param[in] v is the direction in which we apply the preconditioner
699 // @param[in] x is the current iteration vector \f$x_k\f$
700 // @param[in] xlam is the vector \f$x_k + c\lambda_k\f$
701 // @param[in] obj is the objective function
702 // @param[in] con are the bound constraints
703 // */
704 // void applyInactivePrecond(Vector<Real> &pv, const Vector<Real> &v, const Vector<Real> &x,
705 // const Vector<Real> &xlam, Objective<Real> &obj, BoundConstraint<Real> &con) {
706 // ROL::Ptr<Vector<Real> > tmp = v.clone();
707 // tmp->set(v);
708 // con.pruneActive(*tmp,xlam,neps_);
709 // obj.precond(pv,*tmp,x,itol_);
710 // con.pruneActive(pv,xlam,neps_);
711 // }
712 //
713 // /** \brief Solve the inactive part of the PDAS optimality system.
714 //
715 // The inactive PDAS optimality system is
716 // \f[
717 // \nabla^2 f(x_k)_{\mathcal{I}_k,\mathcal{I}_k}s =
718 // -\nabla f(x_k)_{\mathcal{I}_k}
719 // -\nabla^2 f(x_k)_{\mathcal{I}_k,\mathcal{A}_k} (s_k)_{\mathcal{A}_k}.
720 // \f]
721 // Since the inactive part of the Hessian may not be positive definite, we solve
722 // using CR.
723 //
724 // @param[out] sol is the vector containing the solution
725 // @param[in] rhs is the right-hand side vector
726 // @param[in] xlam is the vector \f$x_k + c\lambda_k\f$
727 // @param[in] x is the current iteration vector \f$x_k\f$
728 // @param[in] obj is the objective function
729 // @param[in] con are the bound constraints
730 // */
731 // // Solve the inactive part of the optimality system using conjugate residuals
732 // void solve(Vector<Real> &sol, const Vector<Real> &rhs, const Vector<Real> &xlam, const Vector<Real> &x,
733 // Objective<Real> &obj, BoundConstraint<Real> &con) {
734 // // Initialize Residual
735 // ROL::Ptr<Vector<Real> > res = rhs.clone();
736 // res->set(rhs);
737 // Real rnorm = res->norm();
738 // Real rtol = std::min(tol1_,tol2_*rnorm);
739 // if ( false ) { itol_ = rtol/(maxitCR_*rnorm); }
740 // sol.zero();
741 //
742 // // Apply preconditioner to residual r = Mres
743 // ROL::Ptr<Vector<Real> > r = x.clone();
744 // applyInactivePrecond(*r,*res,x,xlam,obj,con);
745 //
746 // // Initialize direction p = v
747 // ROL::Ptr<Vector<Real> > p = x.clone();
748 // p->set(*r);
749 //
750 // // Apply Hessian to v
751 // ROL::Ptr<Vector<Real> > Hr = x.clone();
752 // applyInactiveHessian(*Hr,*r,x,xlam,obj,con);
753 //
754 // // Apply Hessian to p
755 // ROL::Ptr<Vector<Real> > Hp = x.clone();
756 // ROL::Ptr<Vector<Real> > MHp = x.clone();
757 // Hp->set(*Hr);
758 //
759 // iterCR_ = 0;
760 // flagCR_ = 0;
761 //
762 // Real kappa = 0.0, beta = 0.0, alpha = 0.0, tmp = 0.0, rHr = Hr->dot(*r);
763 //
764 // for (iterCR_ = 0; iterCR_ < maxitCR_; iterCR_++) {
765 // // Precondition Hp
766 // applyInactivePrecond(*MHp,*Hp,x,xlam,obj,con);
767 //
768 // kappa = Hp->dot(*MHp); // p' H M H p
769 // alpha = rHr/kappa; // r' M H M r
770 // sol.axpy(alpha,*p); // update step
771 // res->axpy(-alpha,*Hp); // residual
772 // r->axpy(-alpha,*MHp); // preconditioned residual
773 //
774 // // recompute rnorm and decide whether or not to exit
775 // rnorm = res->norm();
776 // if ( rnorm < rtol ) { break; }
777 //
778 // // Apply Hessian to v
779 // itol_ = rtol/(maxitCR_*rnorm);
780 // applyInactiveHessian(*Hr,*r,x,xlam,obj,con);
781 //
782 // tmp = rHr;
783 // rHr = Hr->dot(*r);
784 // beta = rHr/tmp;
785 // p->scale(beta);
786 // p->axpy(1.0,*r);
787 // Hp->scale(beta);
788 // Hp->axpy(1.0,*Hr);
789 // }
790 // if ( iterCR_ == maxitCR_ ) {
791 // flagCR_ = 1;
792 // }
793 // else {
794 // iterCR_++;
795 // }
796 // }
virtual bool isFeasible(const Vector< Real > &v)
Check if the vector, v, is feasible.
Implements the computation of optimization steps with the Newton primal-dual active set method...
Provides the interface to evaluate objective functions.
std::string printName(void) const
Print step name.
virtual const Vector & dual() const
Return dual representation of , for example, the result of applying a Riesz map, or change of basis...
Definition: ROL_Vector.hpp:192
virtual ROL::Ptr< Vector > clone() const =0
Clone to make a new (uninitialized) vector.
virtual void plus(const Vector &x)=0
Compute , where .
std::string printHeader(void) const
Print iterate header.
virtual void pruneUpperActive(Vector< Real > &v, const Vector< Real > &x, Real eps=Real(0))
Set variables to zero if they correspond to the upper -active set.
virtual Real value(const Vector< Real > &x, Real &tol)=0
Compute value.
Provides the interface to compute optimization steps.
Definition: ROL_Step.hpp:34
const ROL::Ptr< BoundConstraint< Real > > bnd_
virtual void hessVec(Vector< Real > &hv, const Vector< Real > &v, const Vector< Real > &x, Real &tol)
Apply Hessian approximation to vector.
Contains definitions of custom data types in ROL.
HessianPD(const ROL::Ptr< Objective< Real > > &obj, const ROL::Ptr< BoundConstraint< Real > > &bnd, const ROL::Ptr< Vector< Real > > &x, const ROL::Ptr< Vector< Real > > &xlam, const Real eps=0, const ROL::Ptr< Secant< Real > > &secant=ROL::nullPtr, const bool useSecant=false)
ESecant StringToESecant(std::string s)
Definition: ROL_Types.hpp:513
ROL::Ptr< Secant< Real > > secant_
Secant object.
virtual void zero()
Set to zero vector.
Definition: ROL_Vector.hpp:133
Real scale_
Scale for dual variables in the active set, .
Defines the linear algebra or vector space interface.
Definition: ROL_Vector.hpp:46
virtual void update(const Vector< Real > &x, UpdateType type, int iter=-1)
Update objective function.
ROL::Ptr< Vector< Real > > gtmp_
Container for temporary gradient storage.
Objective_SerialSimOpt(const Ptr< Obj > &obj, const V &ui) z0_ zero()
virtual const Ptr< const Vector< Real > > getLowerBound(void) const
Return the ref count pointer to the lower bound vector.
ROL::Ptr< Vector< Real > > res_
Container for optimality system residual for quadratic model.
State for algorithm class. Will be used for restarts.
Definition: ROL_Types.hpp:109
ROL::Ptr< Vector< Real > > x0_
Container for initial priaml variables.
virtual void gradient(Vector< Real > &g, const Vector< Real > &x, Real &tol)
Compute gradient.
PrimalDualActiveSetStep(ROL::ParameterList &parlist)
Constructor.
ESecant
Enumeration of secant update algorithms.
Definition: ROL_Types.hpp:456
ROL::Ptr< Vector< Real > > rtmp_
Container for temporary right hand side storage.
ROL::Ptr< Vector< Real > > Ag_
Container for gradient projected onto active set.
ROL::Ptr< StepState< Real > > getState(void)
Definition: ROL_Step.hpp:39
void initialize(Vector< Real > &x, const Vector< Real > &s, const Vector< Real > &g, Objective< Real > &obj, BoundConstraint< Real > &con, AlgorithmState< Real > &algo_state)
Initialize step with bound constraint.
const ROL::Ptr< BoundConstraint< Real > > bnd_
bool feasible_
Flag whether the current iterate is feasible or not.
virtual void pruneLowerActive(Vector< Real > &v, const Vector< Real > &x, Real eps=Real(0))
Set variables to zero if they correspond to the lower -active set.
Provides interface for and implements limited-memory secant operators.
Definition: ROL_Secant.hpp:45
void apply(Vector< Real > &Hv, const Vector< Real > &v, Real &tol) const
Apply linear operator.
Real computeCriticalityMeasure(Vector< Real > &x, Objective< Real > &obj, BoundConstraint< Real > &con, Real tol)
Compute the gradient-based criticality measure.
ROL::Ptr< Vector< Real > > iterateVec
Definition: ROL_Types.hpp:123
ROL::Ptr< Vector< Real > > xbnd_
Container for primal variable bounds.
virtual void project(Vector< Real > &x)
Project optimization variables onto the bounds.
virtual const Ptr< const Vector< Real > > getUpperBound(void) const
Return the ref count pointer to the upper bound vector.
void pruneActive(Vector< Real > &v, const Vector< Real > &x, Real eps=Real(0))
Set variables to zero if they correspond to the -active set.
Provides the interface to apply a linear operator.
Provides the interface to apply upper and lower bound constraints.
void compute(Vector< Real > &s, const Vector< Real > &x, Objective< Real > &obj, BoundConstraint< Real > &con, AlgorithmState< Real > &algo_state)
Compute step.
Real gtol_
PDAS gradient stopping tolerance.
PrecondPD(const ROL::Ptr< Objective< Real > > &obj, const ROL::Ptr< BoundConstraint< Real > > &bnd, const ROL::Ptr< Vector< Real > > &x, const ROL::Ptr< Vector< Real > > &xlam, const Real eps=0, const ROL::Ptr< Secant< Real > > &secant=ROL::nullPtr, const bool useSecant=false)
Real stol_
PDAS minimum step size stopping tolerance.
ROL::Ptr< Vector< Real > > lambda_
Container for dual variables.
virtual void set(const Vector &x)
Set where .
Definition: ROL_Vector.hpp:175
virtual Real norm() const =0
Returns where .
Real ROL_EPSILON(void)
Platform-dependent machine epsilon.
Definition: ROL_Types.hpp:57
void applyInverse(Vector< Real > &Hv, const Vector< Real > &v, Real &tol) const
Apply inverse of linear operator.
ROL::Ptr< Vector< Real > > xlam_
Container for primal plus dual variables.
ESecant esec_
Enum for secant type.
void update(Vector< Real > &x, const Vector< Real > &s, Objective< Real > &obj, BoundConstraint< Real > &con, AlgorithmState< Real > &algo_state)
Update step, if successful.
virtual std::string print(AlgorithmState< Real > &algo_state, bool print_header=false) const
Print iterate status.
ROL::Ptr< Vector< Real > > As_
Container for step projected onto active set.
ROL::Ptr< Vector< Real > > xtmp_
Container for temporary primal storage.
void apply(Vector< Real > &Hv, const Vector< Real > &v, Real &tol) const
Apply linear operator.
int maxit_
Maximum number of PDAS iterations.