ROL
ROL_PrimalDualActiveSetStep.hpp
Go to the documentation of this file.
1 // @HEADER
2 // ************************************************************************
3 //
4 // Rapid Optimization Library (ROL) Package
5 // Copyright (2014) Sandia Corporation
6 //
7 // Under terms of Contract DE-AC04-94AL85000, there is a non-exclusive
8 // license for use of this work by or on behalf of the U.S. Government.
9 //
10 // Redistribution and use in source and binary forms, with or without
11 // modification, are permitted provided that the following conditions are
12 // met:
13 //
14 // 1. Redistributions of source code must retain the above copyright
15 // notice, this list of conditions and the following disclaimer.
16 //
17 // 2. Redistributions in binary form must reproduce the above copyright
18 // notice, this list of conditions and the following disclaimer in the
19 // documentation and/or other materials provided with the distribution.
20 //
21 // 3. Neither the name of the Corporation nor the names of the
22 // contributors may be used to endorse or promote products derived from
23 // this software without specific prior written permission.
24 //
25 // THIS SOFTWARE IS PROVIDED BY SANDIA CORPORATION "AS IS" AND ANY
26 // EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
27 // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
28 // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SANDIA CORPORATION OR THE
29 // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
30 // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
31 // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
32 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
33 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
34 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
35 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
36 //
37 // Questions? Contact lead developers:
38 // Drew Kouri (dpkouri@sandia.gov) and
39 // Denis Ridzal (dridzal@sandia.gov)
40 //
41 // ************************************************************************
42 // @HEADER
43 
44 #ifndef ROL_PRIMALDUALACTIVESETSTEP_H
45 #define ROL_PRIMALDUALACTIVESETSTEP_H
46 
47 #include "ROL_Step.hpp"
48 #include "ROL_Vector.hpp"
49 #include "ROL_KrylovFactory.hpp"
50 #include "ROL_Objective.hpp"
51 #include "ROL_BoundConstraint.hpp"
52 #include "ROL_Types.hpp"
53 #include "ROL_Secant.hpp"
54 #include "ROL_ParameterList.hpp"
55 
130 namespace ROL {
131 
132 template <class Real>
133 class PrimalDualActiveSetStep : public Step<Real> {
134 private:
135 
136  ROL::Ptr<Krylov<Real> > krylov_;
137 
138  // Krylov Parameters
139  int iterCR_;
140  int flagCR_;
141  Real itol_;
142 
143  // PDAS Parameters
144  int maxit_;
145  int iter_;
146  int flag_;
147  Real stol_;
148  Real gtol_;
149  Real scale_;
150  Real neps_;
151  bool feasible_;
152 
153  // Dual Variable
154  ROL::Ptr<Vector<Real> > lambda_;
155  ROL::Ptr<Vector<Real> > xlam_;
156  ROL::Ptr<Vector<Real> > x0_;
157  ROL::Ptr<Vector<Real> > xbnd_;
158  ROL::Ptr<Vector<Real> > As_;
159  ROL::Ptr<Vector<Real> > xtmp_;
160  ROL::Ptr<Vector<Real> > res_;
161  ROL::Ptr<Vector<Real> > Ag_;
162  ROL::Ptr<Vector<Real> > rtmp_;
163  ROL::Ptr<Vector<Real> > gtmp_;
164 
165  // Secant Information
167  ROL::Ptr<Secant<Real> > secant_;
170 
171  class HessianPD : public LinearOperator<Real> {
172  private:
173  const ROL::Ptr<Objective<Real> > obj_;
174  const ROL::Ptr<BoundConstraint<Real> > bnd_;
175  const ROL::Ptr<Vector<Real> > x_;
176  const ROL::Ptr<Vector<Real> > xlam_;
177  ROL::Ptr<Vector<Real> > v_;
178  Real eps_;
179  const ROL::Ptr<Secant<Real> > secant_;
181  public:
182  HessianPD(const ROL::Ptr<Objective<Real> > &obj,
183  const ROL::Ptr<BoundConstraint<Real> > &bnd,
184  const ROL::Ptr<Vector<Real> > &x,
185  const ROL::Ptr<Vector<Real> > &xlam,
186  const Real eps = 0,
187  const ROL::Ptr<Secant<Real> > &secant = ROL::nullPtr,
188  const bool useSecant = false )
189  : obj_(obj), bnd_(bnd), x_(x), xlam_(xlam),
190  eps_(eps), secant_(secant), useSecant_(useSecant) {
191  v_ = x_->clone();
192  if ( !useSecant || secant == ROL::nullPtr ) {
193  useSecant_ = false;
194  }
195  }
196  void apply( Vector<Real> &Hv, const Vector<Real> &v, Real &tol ) const {
197  v_->set(v);
198  bnd_->pruneActive(*v_,*xlam_,eps_);
199  if ( useSecant_ ) {
200  secant_->applyB(Hv,*v_);
201  }
202  else {
203  obj_->hessVec(Hv,*v_,*x_,tol);
204  }
205  bnd_->pruneActive(Hv,*xlam_,eps_);
206  }
207  };
208 
209  class PrecondPD : public LinearOperator<Real> {
210  private:
211  const ROL::Ptr<Objective<Real> > obj_;
212  const ROL::Ptr<BoundConstraint<Real> > bnd_;
213  const ROL::Ptr<Vector<Real> > x_;
214  const ROL::Ptr<Vector<Real> > xlam_;
215  ROL::Ptr<Vector<Real> > v_;
216  Real eps_;
217  const ROL::Ptr<Secant<Real> > secant_;
219  public:
220  PrecondPD(const ROL::Ptr<Objective<Real> > &obj,
221  const ROL::Ptr<BoundConstraint<Real> > &bnd,
222  const ROL::Ptr<Vector<Real> > &x,
223  const ROL::Ptr<Vector<Real> > &xlam,
224  const Real eps = 0,
225  const ROL::Ptr<Secant<Real> > &secant = ROL::nullPtr,
226  const bool useSecant = false )
227  : obj_(obj), bnd_(bnd), x_(x), xlam_(xlam),
228  eps_(eps), secant_(secant), useSecant_(useSecant) {
229  v_ = x_->dual().clone();
230  if ( !useSecant || secant == ROL::nullPtr ) {
231  useSecant_ = false;
232  }
233  }
234  void apply( Vector<Real> &Hv, const Vector<Real> &v, Real &tol ) const {
235  Hv.set(v.dual());
236  }
237  void applyInverse( Vector<Real> &Hv, const Vector<Real> &v, Real &tol ) const {
238  v_->set(v);
239  bnd_->pruneActive(*v_,*xlam_,eps_);
240  if ( useSecant_ ) {
241  secant_->applyH(Hv,*v_);
242  }
243  else {
244  obj_->precond(Hv,*v_,*x_,tol);
245  }
246  bnd_->pruneActive(Hv,*xlam_,eps_);
247  }
248  };
249 
263  Real one(1);
264  ROL::Ptr<StepState<Real> > step_state = Step<Real>::getState();
265  obj.gradient(*(step_state->gradientVec),x,tol);
266  xtmp_->set(x);
267  xtmp_->axpy(-one,(step_state->gradientVec)->dual());
268  con.project(*xtmp_);
269  xtmp_->axpy(-one,x);
270  return xtmp_->norm();
271  }
272 
273 public:
280  PrimalDualActiveSetStep( ROL::ParameterList &parlist )
281  : Step<Real>::Step(), krylov_(ROL::nullPtr),
282  iterCR_(0), flagCR_(0), itol_(0),
283  maxit_(0), iter_(0), flag_(0), stol_(0), gtol_(0), scale_(0),
284  neps_(-ROL_EPSILON<Real>()), feasible_(false),
285  lambda_(ROL::nullPtr), xlam_(ROL::nullPtr), x0_(ROL::nullPtr),
286  xbnd_(ROL::nullPtr), As_(ROL::nullPtr), xtmp_(ROL::nullPtr),
287  res_(ROL::nullPtr), Ag_(ROL::nullPtr), rtmp_(ROL::nullPtr),
288  gtmp_(ROL::nullPtr),
289  esec_(SECANT_LBFGS), secant_(ROL::nullPtr), useSecantPrecond_(false),
290  useSecantHessVec_(false) {
291  Real one(1), oem6(1.e-6), oem8(1.e-8);
292  // Algorithmic parameters
293  maxit_ = parlist.sublist("Step").sublist("Primal Dual Active Set").get("Iteration Limit",10);
294  stol_ = parlist.sublist("Step").sublist("Primal Dual Active Set").get("Relative Step Tolerance",oem8);
295  gtol_ = parlist.sublist("Step").sublist("Primal Dual Active Set").get("Relative Gradient Tolerance",oem6);
296  scale_ = parlist.sublist("Step").sublist("Primal Dual Active Set").get("Dual Scaling", one);
297  // Build secant object
298  esec_ = StringToESecant(parlist.sublist("General").sublist("Secant").get("Type","Limited-Memory BFGS"));
299  useSecantHessVec_ = parlist.sublist("General").sublist("Secant").get("Use as Hessian", false);
300  useSecantPrecond_ = parlist.sublist("General").sublist("Secant").get("Use as Preconditioner", false);
301  if ( useSecantHessVec_ || useSecantPrecond_ ) {
302  secant_ = SecantFactory<Real>(parlist);
303  }
304  // Build Krylov object
305  krylov_ = KrylovFactory<Real>(parlist);
306  }
307 
319  void initialize( Vector<Real> &x, const Vector<Real> &s, const Vector<Real> &g,
321  AlgorithmState<Real> &algo_state ) {
322  ROL::Ptr<StepState<Real> > step_state = Step<Real>::getState();
323  Real zero(0), one(1);
324  // Initialize state descent direction and gradient storage
325  step_state->descentVec = s.clone();
326  step_state->gradientVec = g.clone();
327  step_state->searchSize = zero;
328  // Initialize additional storage
329  xlam_ = x.clone();
330  x0_ = x.clone();
331  xbnd_ = x.clone();
332  As_ = s.clone();
333  xtmp_ = x.clone();
334  res_ = g.clone();
335  Ag_ = g.clone();
336  rtmp_ = g.clone();
337  gtmp_ = g.clone();
338  // Project x onto constraint set
339  con.project(x);
340  // Update objective function, get value, and get gradient
341  Real tol = std::sqrt(ROL_EPSILON<Real>());
342  obj.update(x,true,algo_state.iter);
343  algo_state.value = obj.value(x,tol);
344  algo_state.nfval++;
345  algo_state.gnorm = computeCriticalityMeasure(x,obj,con,tol);
346  algo_state.ngrad++;
347  // Initialize dual variable
348  lambda_ = s.clone();
349  lambda_->set((step_state->gradientVec)->dual());
350  lambda_->scale(-one);
351  }
352 
379  AlgorithmState<Real> &algo_state ) {
380  ROL::Ptr<StepState<Real> > step_state = Step<Real>::getState();
381  Real zero(0), one(1);
382  s.zero();
383  x0_->set(x);
384  res_->set(*(step_state->gradientVec));
385  for ( iter_ = 0; iter_ < maxit_; iter_++ ) {
386  /********************************************************************/
387  // MODIFY ITERATE VECTOR TO CHECK ACTIVE SET
388  /********************************************************************/
389  xlam_->set(*x0_); // xlam = x0
390  xlam_->axpy(scale_,*(lambda_)); // xlam = x0 + c*lambda
391  /********************************************************************/
392  // PROJECT x ONTO PRIMAL DUAL FEASIBLE SET
393  /********************************************************************/
394  As_->zero(); // As = 0
395 
396  xbnd_->set(*con.getUpperBound()); // xbnd = u
397  xbnd_->axpy(-one,x); // xbnd = u - x
398  xtmp_->set(*xbnd_); // tmp = u - x
399  con.pruneUpperActive(*xtmp_,*xlam_,neps_); // tmp = I(u - x)
400  xbnd_->axpy(-one,*xtmp_); // xbnd = A(u - x)
401  As_->plus(*xbnd_); // As += A(u - x)
402 
403  xbnd_->set(*con.getLowerBound()); // xbnd = l
404  xbnd_->axpy(-one,x); // xbnd = l - x
405  xtmp_->set(*xbnd_); // tmp = l - x
406  con.pruneLowerActive(*xtmp_,*xlam_,neps_); // tmp = I(l - x)
407  xbnd_->axpy(-one,*xtmp_); // xbnd = A(l - x)
408  As_->plus(*xbnd_); // As += A(l - x)
409  /********************************************************************/
410  // APPLY HESSIAN TO ACTIVE COMPONENTS OF s AND REMOVE INACTIVE
411  /********************************************************************/
412  itol_ = std::sqrt(ROL_EPSILON<Real>());
413  if ( useSecantHessVec_ && secant_ != ROL::nullPtr ) { // IHAs = H*As
414  secant_->applyB(*gtmp_,*As_);
415  }
416  else {
417  obj.hessVec(*gtmp_,*As_,x,itol_);
418  }
419  con.pruneActive(*gtmp_,*xlam_,neps_); // IHAs = I(H*As)
420  /********************************************************************/
421  // SEPARATE ACTIVE AND INACTIVE COMPONENTS OF THE GRADIENT
422  /********************************************************************/
423  rtmp_->set(*(step_state->gradientVec)); // Inactive components
424  con.pruneActive(*rtmp_,*xlam_,neps_);
425 
426  Ag_->set(*(step_state->gradientVec)); // Active components
427  Ag_->axpy(-one,*rtmp_);
428  /********************************************************************/
429  // SOLVE REDUCED NEWTON SYSTEM
430  /********************************************************************/
431  rtmp_->plus(*gtmp_);
432  rtmp_->scale(-one); // rhs = -Ig - I(H*As)
433  s.zero();
434  if ( rtmp_->norm() > zero ) {
435  // Initialize Hessian and preconditioner
436  ROL::Ptr<Objective<Real> > obj_ptr = ROL::makePtrFromRef(obj);
437  ROL::Ptr<BoundConstraint<Real> > con_ptr = ROL::makePtrFromRef(con);
438  ROL::Ptr<LinearOperator<Real> > hessian
439  = ROL::makePtr<HessianPD>(obj_ptr,con_ptr,
441  ROL::Ptr<LinearOperator<Real> > precond
442  = ROL::makePtr<PrecondPD>(obj_ptr,con_ptr,
444  //solve(s,*rtmp_,*xlam_,x,obj,con); // Call conjugate residuals
445  krylov_->run(s,*hessian,*rtmp_,*precond,iterCR_,flagCR_);
446  con.pruneActive(s,*xlam_,neps_); // s <- Is
447  }
448  s.plus(*As_); // s = Is + As
449  /********************************************************************/
450  // UPDATE MULTIPLIER
451  /********************************************************************/
452  if ( useSecantHessVec_ && secant_ != ROL::nullPtr ) {
453  secant_->applyB(*rtmp_,s);
454  }
455  else {
456  obj.hessVec(*rtmp_,s,x,itol_);
457  }
458  gtmp_->set(*rtmp_);
459  con.pruneActive(*gtmp_,*xlam_,neps_);
460  lambda_->set(*rtmp_);
461  lambda_->axpy(-one,*gtmp_);
462  lambda_->plus(*Ag_);
463  lambda_->scale(-one);
464  /********************************************************************/
465  // UPDATE STEP
466  /********************************************************************/
467  x0_->set(x);
468  x0_->plus(s);
469  res_->set(*(step_state->gradientVec));
470  res_->plus(*rtmp_);
471  // Compute criticality measure
472  xtmp_->set(*x0_);
473  xtmp_->axpy(-one,res_->dual());
474  con.project(*xtmp_);
475  xtmp_->axpy(-one,*x0_);
476 // std::cout << s.norm() << " "
477 // << tmp->norm() << " "
478 // << res_->norm() << " "
479 // << lambda_->norm() << " "
480 // << flagCR_ << " "
481 // << iterCR_ << "\n";
482  if ( xtmp_->norm() < gtol_*algo_state.gnorm ) {
483  flag_ = 0;
484  break;
485  }
486  if ( s.norm() < stol_*x.norm() ) {
487  flag_ = 2;
488  break;
489  }
490  }
491  if ( iter_ == maxit_ ) {
492  flag_ = 1;
493  }
494  else {
495  iter_++;
496  }
497  }
498 
511  AlgorithmState<Real> &algo_state ) {
512  ROL::Ptr<StepState<Real> > step_state = Step<Real>::getState();
513  step_state->SPiter = (maxit_ > 1) ? iter_ : iterCR_;
514  step_state->SPflag = (maxit_ > 1) ? flag_ : flagCR_;
515 
516  x.plus(s);
517  feasible_ = con.isFeasible(x);
518  algo_state.snorm = s.norm();
519  algo_state.iter++;
520  Real tol = std::sqrt(ROL_EPSILON<Real>());
521  obj.update(x,true,algo_state.iter);
522  algo_state.value = obj.value(x,tol);
523  algo_state.nfval++;
524 
525  if ( secant_ != ROL::nullPtr ) {
526  gtmp_->set(*(step_state->gradientVec));
527  }
528  algo_state.gnorm = computeCriticalityMeasure(x,obj,con,tol);
529  algo_state.ngrad++;
530 
531  if ( secant_ != ROL::nullPtr ) {
532  secant_->updateStorage(x,*(step_state->gradientVec),*gtmp_,s,algo_state.snorm,algo_state.iter+1);
533  }
534  (algo_state.iterateVec)->set(x);
535  }
536 
542  std::string printHeader( void ) const {
543  std::stringstream hist;
544  hist << " ";
545  hist << std::setw(6) << std::left << "iter";
546  hist << std::setw(15) << std::left << "value";
547  hist << std::setw(15) << std::left << "gnorm";
548  hist << std::setw(15) << std::left << "snorm";
549  hist << std::setw(10) << std::left << "#fval";
550  hist << std::setw(10) << std::left << "#grad";
551  if ( maxit_ > 1 ) {
552  hist << std::setw(10) << std::left << "iterPDAS";
553  hist << std::setw(10) << std::left << "flagPDAS";
554  }
555  else {
556  hist << std::setw(10) << std::left << "iterCR";
557  hist << std::setw(10) << std::left << "flagCR";
558  }
559  hist << std::setw(10) << std::left << "feasible";
560  hist << "\n";
561  return hist.str();
562  }
563 
569  std::string printName( void ) const {
570  std::stringstream hist;
571  hist << "\nPrimal Dual Active Set Newton's Method\n";
572  return hist.str();
573  }
574 
582  virtual std::string print( AlgorithmState<Real> &algo_state, bool print_header = false ) const {
583  std::stringstream hist;
584  hist << std::scientific << std::setprecision(6);
585  if ( algo_state.iter == 0 ) {
586  hist << printName();
587  }
588  if ( print_header ) {
589  hist << printHeader();
590  }
591  if ( algo_state.iter == 0 ) {
592  hist << " ";
593  hist << std::setw(6) << std::left << algo_state.iter;
594  hist << std::setw(15) << std::left << algo_state.value;
595  hist << std::setw(15) << std::left << algo_state.gnorm;
596  hist << "\n";
597  }
598  else {
599  hist << " ";
600  hist << std::setw(6) << std::left << algo_state.iter;
601  hist << std::setw(15) << std::left << algo_state.value;
602  hist << std::setw(15) << std::left << algo_state.gnorm;
603  hist << std::setw(15) << std::left << algo_state.snorm;
604  hist << std::setw(10) << std::left << algo_state.nfval;
605  hist << std::setw(10) << std::left << algo_state.ngrad;
606  if ( maxit_ > 1 ) {
607  hist << std::setw(10) << std::left << iter_;
608  hist << std::setw(10) << std::left << flag_;
609  }
610  else {
611  hist << std::setw(10) << std::left << iterCR_;
612  hist << std::setw(10) << std::left << flagCR_;
613  }
614  if ( feasible_ ) {
615  hist << std::setw(10) << std::left << "YES";
616  }
617  else {
618  hist << std::setw(10) << std::left << "NO";
619  }
620  hist << "\n";
621  }
622  return hist.str();
623  }
624 
625 }; // class PrimalDualActiveSetStep
626 
627 } // namespace ROL
628 
629 #endif
630 
631 // void solve(Vector<Real> &sol, const Vector<Real> &rhs, const Vector<Real> &xlam, const Vector<Real> &x,
632 // Objective<Real> &obj, BoundConstraint<Real> &con) {
633 // Real rnorm = rhs.norm();
634 // Real rtol = std::min(tol1_,tol2_*rnorm);
635 // itol_ = std::sqrt(ROL_EPSILON<Real>());
636 // sol.zero();
637 //
638 // ROL::Ptr<Vector<Real> > res = rhs.clone();
639 // res->set(rhs);
640 //
641 // ROL::Ptr<Vector<Real> > v = x.clone();
642 // con.pruneActive(*res,xlam,neps_);
643 // obj.precond(*v,*res,x,itol_);
644 // con.pruneActive(*v,xlam,neps_);
645 //
646 // ROL::Ptr<Vector<Real> > p = x.clone();
647 // p->set(*v);
648 //
649 // ROL::Ptr<Vector<Real> > Hp = x.clone();
650 //
651 // iterCR_ = 0;
652 // flagCR_ = 0;
653 //
654 // Real kappa = 0.0, beta = 0.0, alpha = 0.0, tmp = 0.0, rv = v->dot(*res);
655 //
656 // for (iterCR_ = 0; iterCR_ < maxitCR_; iterCR_++) {
657 // if ( false ) {
658 // itol_ = rtol/(maxitCR_*rnorm);
659 // }
660 // con.pruneActive(*p,xlam,neps_);
661 // if ( secant_ == ROL::nullPtr ) {
662 // obj.hessVec(*Hp, *p, x, itol_);
663 // }
664 // else {
665 // secant_->applyB( *Hp, *p, x );
666 // }
667 // con.pruneActive(*Hp,xlam,neps_);
668 //
669 // kappa = p->dot(*Hp);
670 // if ( kappa <= 0.0 ) { flagCR_ = 2; break; }
671 // alpha = rv/kappa;
672 // sol.axpy(alpha,*p);
673 //
674 // res->axpy(-alpha,*Hp);
675 // rnorm = res->norm();
676 // if ( rnorm < rtol ) { break; }
677 //
678 // con.pruneActive(*res,xlam,neps_);
679 // obj.precond(*v,*res,x,itol_);
680 // con.pruneActive(*v,xlam,neps_);
681 // tmp = rv;
682 // rv = v->dot(*res);
683 // beta = rv/tmp;
684 //
685 // p->scale(beta);
686 // p->axpy(1.0,*v);
687 // }
688 // if ( iterCR_ == maxitCR_ ) {
689 // flagCR_ = 1;
690 // }
691 // else {
692 // iterCR_++;
693 // }
694 // }
695 
696 
697 // /** \brief Apply the inactive components of the Hessian operator.
698 //
699 // I.e., the components corresponding to \f$\mathcal{I}_k\f$.
700 //
701 // @param[out] hv is the result of applying the Hessian at @b x to
702 // @b v
703 // @param[in] v is the direction in which we apply the Hessian
704 // @param[in] x is the current iteration vector \f$x_k\f$
705 // @param[in] xlam is the vector \f$x_k + c\lambda_k\f$
706 // @param[in] obj is the objective function
707 // @param[in] con are the bound constraints
708 // */
709 // void applyInactiveHessian(Vector<Real> &hv, const Vector<Real> &v, const Vector<Real> &x,
710 // const Vector<Real> &xlam, Objective<Real> &obj, BoundConstraint<Real> &con) {
711 // ROL::Ptr<Vector<Real> > tmp = v.clone();
712 // tmp->set(v);
713 // con.pruneActive(*tmp,xlam,neps_);
714 // if ( secant_ == ROL::nullPtr ) {
715 // obj.hessVec(hv,*tmp,x,itol_);
716 // }
717 // else {
718 // secant_->applyB(hv,*tmp,x);
719 // }
720 // con.pruneActive(hv,xlam,neps_);
721 // }
722 //
723 // /** \brief Apply the inactive components of the preconditioner operator.
724 //
725 // I.e., the components corresponding to \f$\mathcal{I}_k\f$.
726 //
727 // @param[out] hv is the result of applying the preconditioner at @b x to
728 // @b v
729 // @param[in] v is the direction in which we apply the preconditioner
730 // @param[in] x is the current iteration vector \f$x_k\f$
731 // @param[in] xlam is the vector \f$x_k + c\lambda_k\f$
732 // @param[in] obj is the objective function
733 // @param[in] con are the bound constraints
734 // */
735 // void applyInactivePrecond(Vector<Real> &pv, const Vector<Real> &v, const Vector<Real> &x,
736 // const Vector<Real> &xlam, Objective<Real> &obj, BoundConstraint<Real> &con) {
737 // ROL::Ptr<Vector<Real> > tmp = v.clone();
738 // tmp->set(v);
739 // con.pruneActive(*tmp,xlam,neps_);
740 // obj.precond(pv,*tmp,x,itol_);
741 // con.pruneActive(pv,xlam,neps_);
742 // }
743 //
744 // /** \brief Solve the inactive part of the PDAS optimality system.
745 //
746 // The inactive PDAS optimality system is
747 // \f[
748 // \nabla^2 f(x_k)_{\mathcal{I}_k,\mathcal{I}_k}s =
749 // -\nabla f(x_k)_{\mathcal{I}_k}
750 // -\nabla^2 f(x_k)_{\mathcal{I}_k,\mathcal{A}_k} (s_k)_{\mathcal{A}_k}.
751 // \f]
752 // Since the inactive part of the Hessian may not be positive definite, we solve
753 // using CR.
754 //
755 // @param[out] sol is the vector containing the solution
756 // @param[in] rhs is the right-hand side vector
757 // @param[in] xlam is the vector \f$x_k + c\lambda_k\f$
758 // @param[in] x is the current iteration vector \f$x_k\f$
759 // @param[in] obj is the objective function
760 // @param[in] con are the bound constraints
761 // */
762 // // Solve the inactive part of the optimality system using conjugate residuals
763 // void solve(Vector<Real> &sol, const Vector<Real> &rhs, const Vector<Real> &xlam, const Vector<Real> &x,
764 // Objective<Real> &obj, BoundConstraint<Real> &con) {
765 // // Initialize Residual
766 // ROL::Ptr<Vector<Real> > res = rhs.clone();
767 // res->set(rhs);
768 // Real rnorm = res->norm();
769 // Real rtol = std::min(tol1_,tol2_*rnorm);
770 // if ( false ) { itol_ = rtol/(maxitCR_*rnorm); }
771 // sol.zero();
772 //
773 // // Apply preconditioner to residual r = Mres
774 // ROL::Ptr<Vector<Real> > r = x.clone();
775 // applyInactivePrecond(*r,*res,x,xlam,obj,con);
776 //
777 // // Initialize direction p = v
778 // ROL::Ptr<Vector<Real> > p = x.clone();
779 // p->set(*r);
780 //
781 // // Apply Hessian to v
782 // ROL::Ptr<Vector<Real> > Hr = x.clone();
783 // applyInactiveHessian(*Hr,*r,x,xlam,obj,con);
784 //
785 // // Apply Hessian to p
786 // ROL::Ptr<Vector<Real> > Hp = x.clone();
787 // ROL::Ptr<Vector<Real> > MHp = x.clone();
788 // Hp->set(*Hr);
789 //
790 // iterCR_ = 0;
791 // flagCR_ = 0;
792 //
793 // Real kappa = 0.0, beta = 0.0, alpha = 0.0, tmp = 0.0, rHr = Hr->dot(*r);
794 //
795 // for (iterCR_ = 0; iterCR_ < maxitCR_; iterCR_++) {
796 // // Precondition Hp
797 // applyInactivePrecond(*MHp,*Hp,x,xlam,obj,con);
798 //
799 // kappa = Hp->dot(*MHp); // p' H M H p
800 // alpha = rHr/kappa; // r' M H M r
801 // sol.axpy(alpha,*p); // update step
802 // res->axpy(-alpha,*Hp); // residual
803 // r->axpy(-alpha,*MHp); // preconditioned residual
804 //
805 // // recompute rnorm and decide whether or not to exit
806 // rnorm = res->norm();
807 // if ( rnorm < rtol ) { break; }
808 //
809 // // Apply Hessian to v
810 // itol_ = rtol/(maxitCR_*rnorm);
811 // applyInactiveHessian(*Hr,*r,x,xlam,obj,con);
812 //
813 // tmp = rHr;
814 // rHr = Hr->dot(*r);
815 // beta = rHr/tmp;
816 // p->scale(beta);
817 // p->axpy(1.0,*r);
818 // Hp->scale(beta);
819 // Hp->axpy(1.0,*Hr);
820 // }
821 // if ( iterCR_ == maxitCR_ ) {
822 // flagCR_ = 1;
823 // }
824 // else {
825 // iterCR_++;
826 // }
827 // }
Implements the computation of optimization steps with the Newton primal-dual active set method...
Provides the interface to evaluate objective functions.
virtual const ROL::Ptr< const Vector< Real > > getUpperBound(void) const
Return the ref count pointer to the upper bound vector.
std::string printName(void) const
Print step name.
virtual const Vector & dual() const
Return dual representation of , for example, the result of applying a Riesz map, or change of basis...
Definition: ROL_Vector.hpp:226
virtual ROL::Ptr< Vector > clone() const =0
Clone to make a new (uninitialized) vector.
virtual void plus(const Vector &x)=0
Compute , where .
std::string printHeader(void) const
Print iterate header.
virtual Real value(const Vector< Real > &x, Real &tol)=0
Compute value.
Provides the interface to compute optimization steps.
Definition: ROL_Step.hpp:68
const ROL::Ptr< BoundConstraint< Real > > bnd_
virtual void hessVec(Vector< Real > &hv, const Vector< Real > &v, const Vector< Real > &x, Real &tol)
Apply Hessian approximation to vector.
Contains definitions of custom data types in ROL.
HessianPD(const ROL::Ptr< Objective< Real > > &obj, const ROL::Ptr< BoundConstraint< Real > > &bnd, const ROL::Ptr< Vector< Real > > &x, const ROL::Ptr< Vector< Real > > &xlam, const Real eps=0, const ROL::Ptr< Secant< Real > > &secant=ROL::nullPtr, const bool useSecant=false)
void pruneActive(Vector< Real > &v, const Vector< Real > &x, Real eps=0)
Set variables to zero if they correspond to the -active set.
ESecant StringToESecant(std::string s)
Definition: ROL_Types.hpp:541
ROL::Ptr< Secant< Real > > secant_
Secant object.
virtual void zero()
Set to zero vector.
Definition: ROL_Vector.hpp:167
Real scale_
Scale for dual variables in the active set, .
Defines the linear algebra or vector space interface.
Definition: ROL_Vector.hpp:80
ROL::Ptr< Vector< Real > > gtmp_
Container for temporary gradient storage.
Objective_SerialSimOpt(const Ptr< Obj > &obj, const V &ui) z0_ zero()
virtual void pruneLowerActive(Vector< Real > &v, const Vector< Real > &x, Real eps=0)
Set variables to zero if they correspond to the lower -active set.
ROL::Ptr< Vector< Real > > res_
Container for optimality system residual for quadratic model.
State for algorithm class. Will be used for restarts.
Definition: ROL_Types.hpp:143
ROL::Ptr< Vector< Real > > x0_
Container for initial priaml variables.
virtual void gradient(Vector< Real > &g, const Vector< Real > &x, Real &tol)
Compute gradient.
PrimalDualActiveSetStep(ROL::ParameterList &parlist)
Constructor.
ESecant
Enumeration of secant update algorithms.
Definition: ROL_Types.hpp:484
ROL::Ptr< Vector< Real > > rtmp_
Container for temporary right hand side storage.
ROL::Ptr< Vector< Real > > Ag_
Container for gradient projected onto active set.
ROL::Ptr< StepState< Real > > getState(void)
Definition: ROL_Step.hpp:73
void initialize(Vector< Real > &x, const Vector< Real > &s, const Vector< Real > &g, Objective< Real > &obj, BoundConstraint< Real > &con, AlgorithmState< Real > &algo_state)
Initialize step.
const ROL::Ptr< BoundConstraint< Real > > bnd_
bool feasible_
Flag whether the current iterate is feasible or not.
Provides interface for and implements limited-memory secant operators.
Definition: ROL_Secant.hpp:70
void apply(Vector< Real > &Hv, const Vector< Real > &v, Real &tol) const
Apply linear operator.
Real computeCriticalityMeasure(Vector< Real > &x, Objective< Real > &obj, BoundConstraint< Real > &con, Real tol)
Compute the gradient-based criticality measure.
virtual void pruneUpperActive(Vector< Real > &v, const Vector< Real > &x, Real eps=0)
Set variables to zero if they correspond to the upper -active set.
ROL::Ptr< Vector< Real > > iterateVec
Definition: ROL_Types.hpp:157
ROL::Ptr< Vector< Real > > xbnd_
Container for primal variable bounds.
Provides the interface to apply a linear operator.
Provides the interface to apply upper and lower bound constraints.
void compute(Vector< Real > &s, const Vector< Real > &x, Objective< Real > &obj, BoundConstraint< Real > &con, AlgorithmState< Real > &algo_state)
Compute step.
Real gtol_
PDAS gradient stopping tolerance.
PrecondPD(const ROL::Ptr< Objective< Real > > &obj, const ROL::Ptr< BoundConstraint< Real > > &bnd, const ROL::Ptr< Vector< Real > > &x, const ROL::Ptr< Vector< Real > > &xlam, const Real eps=0, const ROL::Ptr< Secant< Real > > &secant=ROL::nullPtr, const bool useSecant=false)
Real stol_
PDAS minimum step size stopping tolerance.
ROL::Ptr< Vector< Real > > lambda_
Container for dual variables.
virtual void set(const Vector &x)
Set where .
Definition: ROL_Vector.hpp:209
virtual Real norm() const =0
Returns where .
virtual const ROL::Ptr< const Vector< Real > > getLowerBound(void) const
Return the ref count pointer to the lower bound vector.
virtual void update(const Vector< Real > &x, bool flag=true, int iter=-1)
Update objective function.
Real ROL_EPSILON(void)
Platform-dependent machine epsilon.
Definition: ROL_Types.hpp:91
void applyInverse(Vector< Real > &Hv, const Vector< Real > &v, Real &tol) const
Apply inverse of linear operator.
ROL::Ptr< Vector< Real > > xlam_
Container for primal plus dual variables.
ESecant esec_
Enum for secant type.
virtual bool isFeasible(const Vector< Real > &v)
Check if the vector, v, is feasible.
void update(Vector< Real > &x, const Vector< Real > &s, Objective< Real > &obj, BoundConstraint< Real > &con, AlgorithmState< Real > &algo_state)
Update step, if successful.
virtual std::string print(AlgorithmState< Real > &algo_state, bool print_header=false) const
Print iterate status.
ROL::Ptr< Vector< Real > > As_
Container for step projected onto active set.
ROL::Ptr< Vector< Real > > xtmp_
Container for temporary primal storage.
virtual void project(Vector< Real > &x)
Project optimization variables onto the bounds.
void apply(Vector< Real > &Hv, const Vector< Real > &v, Real &tol) const
Apply linear operator.
int maxit_
Maximum number of PDAS iterations.