ROL
ROL_PrimalDualActiveSetStep.hpp
Go to the documentation of this file.
1 // @HEADER
2 // ************************************************************************
3 //
4 // Rapid Optimization Library (ROL) Package
5 // Copyright (2014) Sandia Corporation
6 //
7 // Under terms of Contract DE-AC04-94AL85000, there is a non-exclusive
8 // license for use of this work by or on behalf of the U.S. Government.
9 //
10 // Redistribution and use in source and binary forms, with or without
11 // modification, are permitted provided that the following conditions are
12 // met:
13 //
14 // 1. Redistributions of source code must retain the above copyright
15 // notice, this list of conditions and the following disclaimer.
16 //
17 // 2. Redistributions in binary form must reproduce the above copyright
18 // notice, this list of conditions and the following disclaimer in the
19 // documentation and/or other materials provided with the distribution.
20 //
21 // 3. Neither the name of the Corporation nor the names of the
22 // contributors may be used to endorse or promote products derived from
23 // this software without specific prior written permission.
24 //
25 // THIS SOFTWARE IS PROVIDED BY SANDIA CORPORATION "AS IS" AND ANY
26 // EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
27 // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
28 // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SANDIA CORPORATION OR THE
29 // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
30 // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
31 // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
32 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
33 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
34 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
35 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
36 //
37 // Questions? Contact lead developers:
38 // Drew Kouri (dpkouri@sandia.gov) and
39 // Denis Ridzal (dridzal@sandia.gov)
40 //
41 // ************************************************************************
42 // @HEADER
43 
44 #ifndef ROL_PRIMALDUALACTIVESETSTEP_H
45 #define ROL_PRIMALDUALACTIVESETSTEP_H
46 
47 #include "ROL_Step.hpp"
48 #include "ROL_Vector.hpp"
49 #include "ROL_Krylov.hpp"
50 #include "ROL_Objective.hpp"
51 #include "ROL_BoundConstraint.hpp"
52 #include "ROL_Types.hpp"
53 #include "ROL_Secant.hpp"
56 #include "Teuchos_ParameterList.hpp"
57 
132 namespace ROL {
133 
134 template <class Real>
135 class PrimalDualActiveSetStep : public Step<Real> {
136 private:
137 
138  Teuchos::RCP<PrimalDualHessian<Real> > hessian_;
139  Teuchos::RCP<PrimalDualPreconditioner<Real> > precond_;
140  Teuchos::RCP<Krylov<Real> > krylov_;
141 
142  // Krylov Parameters
143  int iterCR_;
144  int flagCR_;
145  Real itol_;
146 
147  // PDAS Parameters
148  int maxit_;
149  int iter_;
150  int flag_;
151  Real stol_;
152  Real gtol_;
153  Real scale_;
154  Real neps_;
155  bool feasible_;
156 
157  // Dual Variable
158  Teuchos::RCP<Vector<Real> > lambda_;
159  Teuchos::RCP<Vector<Real> > xlam_;
160  Teuchos::RCP<Vector<Real> > x0_;
161  Teuchos::RCP<Vector<Real> > xbnd_;
162  Teuchos::RCP<Vector<Real> > As_;
163  Teuchos::RCP<Vector<Real> > xtmp_;
164  Teuchos::RCP<Vector<Real> > res_;
165  Teuchos::RCP<Vector<Real> > Ag_;
166  Teuchos::RCP<Vector<Real> > rtmp_;
167  Teuchos::RCP<Vector<Real> > gtmp_;
168 
169  // Secant Information
171  Teuchos::RCP<Secant<Real> > secant_;
174 
188  Teuchos::RCP<StepState<Real> > step_state = Step<Real>::getState();
189  obj.gradient(*(step_state->gradientVec),x,tol);
190  xtmp_->set(x);
191  xtmp_->axpy(-1.0,(step_state->gradientVec)->dual());
192  con.project(*xtmp_);
193  xtmp_->axpy(-1.0,x);
194  return xtmp_->norm();
195  }
196 
197  void KrylovFactory(Teuchos::ParameterList &parlist) {
198  EKrylov ekv = StringToEKrylov(parlist.get("Krylov Method","Conjugate Residuals"));
199  Real absTol = parlist.get("Absolute Krylov Tolerance", 1.e-4);
200  Real relTol = parlist.get("Relative Krylov Tolerance", 1.e-2);
201  int maxit = parlist.get("Maximum Number of Krylov Iterations", 20);
202  bool inexact = parlist.get("Use Inexact Hessian-Times-A-Vector",false);
203  switch(ekv) {
204  case KRYLOV_CR:
205  krylov_ = Teuchos::rcp( new ConjugateResiduals<Real>(absTol,relTol,maxit,inexact) ); break;
206  case KRYLOV_CG:
207  default:
208  krylov_ = Teuchos::rcp( new ConjugateGradients<Real>(absTol,relTol,maxit,inexact) ); break;
209  }
210  }
211 
212 public:
219  PrimalDualActiveSetStep( Teuchos::ParameterList &parlist )
220  : Step<Real>::Step(), iterCR_(0), flagCR_(0), iter_(0), flag_(0), neps_(-ROL_EPSILON), feasible_(false) {
221  esec_ = StringToESecant(parlist.get("Secant Type","Limited-Memory BFGS"));
222  maxit_ = parlist.get("PDAS Maximum Number of Iterations",10);
223  stol_ = parlist.get("PDAS Relative Step Tolerance",1.e-8);
224  gtol_ = parlist.get("PDAS Relative Gradient Tolerance",1.e-6);
225  scale_ = parlist.get("PDAS Dual Scaling", 1.0);
226 
227  useSecantHessVec_ = parlist.get("Use Secant Hessian-Times-A-Vector", false);
228  useSecantPrecond_ = parlist.get("Use Secant Preconditioning", false);
229  secant_ = Teuchos::null;
231  int L = parlist.get("Maximum Secant Storage",10);
232  int BB = parlist.get("Barzilai-Borwein",1);
233  secant_ = getSecant<Real>(esec_,L,BB);
234  }
235  KrylovFactory(parlist);
236  }
237 
249  void initialize( Vector<Real> &x, const Vector<Real> &s, const Vector<Real> &g,
251  AlgorithmState<Real> &algo_state ) {
252  Teuchos::RCP<StepState<Real> > step_state = Step<Real>::getState();
253  // Initialize state descent direction and gradient storage
254  step_state->descentVec = s.clone();
255  step_state->gradientVec = g.clone();
256  step_state->searchSize = 0.0;
257  // Initialize additional storage
258  xlam_ = x.clone();
259  x0_ = x.clone();
260  xbnd_ = x.clone();
261  As_ = s.clone();
262  xtmp_ = x.clone();
263  res_ = g.clone();
264  Ag_ = g.clone();
265  rtmp_ = g.clone();
266  gtmp_ = g.clone();
267  // Project x onto constraint set
268  con.project(x);
269  // Update objective function, get value, and get gradient
270  Real tol = std::sqrt(ROL_EPSILON);
271  obj.update(x,true,algo_state.iter);
272  algo_state.value = obj.value(x,tol);
273  algo_state.nfval++;
274  algo_state.gnorm = computeCriticalityMeasure(x,obj,con,tol);
275  algo_state.ngrad++;
276  // Initialize dual variable
277  lambda_ = s.clone();
278  lambda_->set((step_state->gradientVec)->dual());
279  lambda_->scale(-1.0);
280  //con.setVectorToLowerBound(*lambda_);
281  // Initialize Hessian and preconditioner
282  Teuchos::RCP<Objective<Real> > obj_ptr = Teuchos::rcp(&obj, false);
283  Teuchos::RCP<BoundConstraint<Real> > con_ptr = Teuchos::rcp(&con, false);
284  hessian_ = Teuchos::rcp(
285  new PrimalDualHessian<Real>(secant_,obj_ptr,con_ptr,algo_state.iterateVec,xlam_,useSecantHessVec_) );
286  precond_ = Teuchos::rcp(
287  new PrimalDualPreconditioner<Real>(secant_,obj_ptr,con_ptr,algo_state.iterateVec,xlam_,
289  }
290 
317  AlgorithmState<Real> &algo_state ) {
318  Teuchos::RCP<StepState<Real> > step_state = Step<Real>::getState();
319  s.zero();
320  x0_->set(x);
321  res_->set(*(step_state->gradientVec));
322  for ( iter_ = 0; iter_ < maxit_; iter_++ ) {
323  /********************************************************************/
324  // MODIFY ITERATE VECTOR TO CHECK ACTIVE SET
325  /********************************************************************/
326  xlam_->set(*x0_); // xlam = x0
327  xlam_->axpy(scale_,*(lambda_)); // xlam = x0 + c*lambda
328  /********************************************************************/
329  // PROJECT x ONTO PRIMAL DUAL FEASIBLE SET
330  /********************************************************************/
331  As_->zero(); // As = 0
332 
333  con.setVectorToUpperBound(*xbnd_); // xbnd = u
334  xbnd_->axpy(-1.0,x); // xbnd = u - x
335  xtmp_->set(*xbnd_); // tmp = u - x
336  con.pruneUpperActive(*xtmp_,*xlam_,neps_); // tmp = I(u - x)
337  xbnd_->axpy(-1.0,*xtmp_); // xbnd = A(u - x)
338  As_->plus(*xbnd_); // As += A(u - x)
339 
340  con.setVectorToLowerBound(*xbnd_); // xbnd = l
341  xbnd_->axpy(-1.0,x); // xbnd = l - x
342  xtmp_->set(*xbnd_); // tmp = l - x
343  con.pruneLowerActive(*xtmp_,*xlam_,neps_); // tmp = I(l - x)
344  xbnd_->axpy(-1.0,*xtmp_); // xbnd = A(l - x)
345  As_->plus(*xbnd_); // As += A(l - x)
346  /********************************************************************/
347  // APPLY HESSIAN TO ACTIVE COMPONENTS OF s AND REMOVE INACTIVE
348  /********************************************************************/
349  itol_ = std::sqrt(ROL_EPSILON);
350  if ( useSecantHessVec_ && secant_ != Teuchos::null ) { // IHAs = H*As
351  secant_->applyB(*gtmp_,*As_,x);
352  }
353  else {
354  obj.hessVec(*gtmp_,*As_,x,itol_);
355  }
356  con.pruneActive(*gtmp_,*xlam_,neps_); // IHAs = I(H*As)
357  /********************************************************************/
358  // SEPARATE ACTIVE AND INACTIVE COMPONENTS OF THE GRADIENT
359  /********************************************************************/
360  rtmp_->set(*(step_state->gradientVec)); // Inactive components
361  con.pruneActive(*rtmp_,*xlam_,neps_);
362 
363  Ag_->set(*(step_state->gradientVec)); // Active components
364  Ag_->axpy(-1.0,*rtmp_);
365  /********************************************************************/
366  // SOLVE REDUCED NEWTON SYSTEM
367  /********************************************************************/
368  rtmp_->plus(*gtmp_);
369  rtmp_->scale(-1.0); // rhs = -Ig - I(H*As)
370  s.zero();
371  if ( rtmp_->norm() > 0.0 ) {
372  //solve(s,*rtmp_,*xlam_,x,obj,con); // Call conjugate residuals
374  con.pruneActive(s,*xlam_,neps_); // s <- Is
375  }
376  s.plus(*As_); // s = Is + As
377  /********************************************************************/
378  // UPDATE MULTIPLIER
379  /********************************************************************/
380  if ( useSecantHessVec_ && secant_ != Teuchos::null ) {
381  secant_->applyB(*rtmp_,s,x);
382  }
383  else {
384  obj.hessVec(*rtmp_,s,x,itol_);
385  }
386  gtmp_->set(*rtmp_);
387  con.pruneActive(*gtmp_,*xlam_,neps_);
388  lambda_->set(*rtmp_);
389  lambda_->axpy(-1.0,*gtmp_);
390  lambda_->plus(*Ag_);
391  lambda_->scale(-1.0);
392  /********************************************************************/
393  // UPDATE STEP
394  /********************************************************************/
395  x0_->set(x);
396  x0_->plus(s);
397  res_->set(*(step_state->gradientVec));
398  res_->plus(*rtmp_);
399  // Compute criticality measure
400  xtmp_->set(*x0_);
401  xtmp_->axpy(-1.0,res_->dual());
402  con.project(*xtmp_);
403  xtmp_->axpy(-1.0,*x0_);
404 // std::cout << s.norm() << " "
405 // << tmp->norm() << " "
406 // << res_->norm() << " "
407 // << lambda_->norm() << " "
408 // << flagCR_ << " "
409 // << iterCR_ << "\n";
410  if ( xtmp_->norm() < gtol_*algo_state.gnorm ) {
411  flag_ = 0;
412  break;
413  }
414  if ( s.norm() < stol_*x.norm() ) {
415  flag_ = 2;
416  break;
417  }
418  }
419  if ( iter_ == maxit_ ) {
420  flag_ = 1;
421  }
422  else {
423  iter_++;
424  }
425  }
426 
439  AlgorithmState<Real> &algo_state ) {
440  Teuchos::RCP<StepState<Real> > step_state = Step<Real>::getState();
441 
442  x.plus(s);
443  feasible_ = con.isFeasible(x);
444  algo_state.snorm = s.norm();
445  algo_state.iter++;
446  Real tol = std::sqrt(ROL_EPSILON);
447  obj.update(x,true,algo_state.iter);
448  algo_state.value = obj.value(x,tol);
449  algo_state.nfval++;
450 
451  if ( secant_ != Teuchos::null ) {
452  gtmp_->set(*(step_state->gradientVec));
453  }
454  algo_state.gnorm = computeCriticalityMeasure(x,obj,con,tol);
455  algo_state.ngrad++;
456 
457  if ( secant_ != Teuchos::null ) {
458  secant_->update(*(step_state->gradientVec),*gtmp_,s,algo_state.snorm,algo_state.iter+1);
459  }
460  (algo_state.iterateVec)->set(x);
461  }
462 
468  std::string printHeader( void ) const {
469  std::stringstream hist;
470  hist << " ";
471  hist << std::setw(6) << std::left << "iter";
472  hist << std::setw(15) << std::left << "value";
473  hist << std::setw(15) << std::left << "gnorm";
474  hist << std::setw(15) << std::left << "snorm";
475  hist << std::setw(10) << std::left << "#fval";
476  hist << std::setw(10) << std::left << "#grad";
477  if ( maxit_ > 1 ) {
478  hist << std::setw(10) << std::left << "iterPDAS";
479  hist << std::setw(10) << std::left << "flagPDAS";
480  }
481  else {
482  hist << std::setw(10) << std::left << "iterCR";
483  hist << std::setw(10) << std::left << "flagCR";
484  }
485  hist << std::setw(10) << std::left << "feasible";
486  hist << "\n";
487  return hist.str();
488  }
489 
495  std::string printName( void ) const {
496  std::stringstream hist;
497  hist << "\nPrimal Dual Active Set Newton's Method\n";
498  return hist.str();
499  }
500 
508  virtual std::string print( AlgorithmState<Real> &algo_state, bool print_header = false ) const {
509  std::stringstream hist;
510  hist << std::scientific << std::setprecision(6);
511  if ( algo_state.iter == 0 ) {
512  hist << printName();
513  }
514  if ( print_header ) {
515  hist << printHeader();
516  }
517  if ( algo_state.iter == 0 ) {
518  hist << " ";
519  hist << std::setw(6) << std::left << algo_state.iter;
520  hist << std::setw(15) << std::left << algo_state.value;
521  hist << std::setw(15) << std::left << algo_state.gnorm;
522  hist << "\n";
523  }
524  else {
525  hist << " ";
526  hist << std::setw(6) << std::left << algo_state.iter;
527  hist << std::setw(15) << std::left << algo_state.value;
528  hist << std::setw(15) << std::left << algo_state.gnorm;
529  hist << std::setw(15) << std::left << algo_state.snorm;
530  hist << std::setw(10) << std::left << algo_state.nfval;
531  hist << std::setw(10) << std::left << algo_state.ngrad;
532  if ( maxit_ > 1 ) {
533  hist << std::setw(10) << std::left << iter_;
534  hist << std::setw(10) << std::left << flag_;
535  }
536  else {
537  hist << std::setw(10) << std::left << iterCR_;
538  hist << std::setw(10) << std::left << flagCR_;
539  }
540  if ( feasible_ ) {
541  hist << std::setw(10) << std::left << "YES";
542  }
543  else {
544  hist << std::setw(10) << std::left << "NO";
545  }
546  hist << "\n";
547  }
548  return hist.str();
549  }
550 
551 }; // class PrimalDualActiveSetStep
552 
553 } // namespace ROL
554 
555 #endif
556 
557 // void solve(Vector<Real> &sol, const Vector<Real> &rhs, const Vector<Real> &xlam, const Vector<Real> &x,
558 // Objective<Real> &obj, BoundConstraint<Real> &con) {
559 // Real rnorm = rhs.norm();
560 // Real rtol = std::min(tol1_,tol2_*rnorm);
561 // itol_ = std::sqrt(ROL_EPSILON);
562 // sol.zero();
563 //
564 // Teuchos::RCP<Vector<Real> > res = rhs.clone();
565 // res->set(rhs);
566 //
567 // Teuchos::RCP<Vector<Real> > v = x.clone();
568 // con.pruneActive(*res,xlam,neps_);
569 // obj.precond(*v,*res,x,itol_);
570 // con.pruneActive(*v,xlam,neps_);
571 //
572 // Teuchos::RCP<Vector<Real> > p = x.clone();
573 // p->set(*v);
574 //
575 // Teuchos::RCP<Vector<Real> > Hp = x.clone();
576 //
577 // iterCR_ = 0;
578 // flagCR_ = 0;
579 //
580 // Real kappa = 0.0, beta = 0.0, alpha = 0.0, tmp = 0.0, rv = v->dot(*res);
581 //
582 // for (iterCR_ = 0; iterCR_ < maxitCR_; iterCR_++) {
583 // if ( false ) {
584 // itol_ = rtol/(maxitCR_*rnorm);
585 // }
586 // con.pruneActive(*p,xlam,neps_);
587 // if ( secant_ == Teuchos::null ) {
588 // obj.hessVec(*Hp, *p, x, itol_);
589 // }
590 // else {
591 // secant_->applyB( *Hp, *p, x );
592 // }
593 // con.pruneActive(*Hp,xlam,neps_);
594 //
595 // kappa = p->dot(*Hp);
596 // if ( kappa <= 0.0 ) { flagCR_ = 2; break; }
597 // alpha = rv/kappa;
598 // sol.axpy(alpha,*p);
599 //
600 // res->axpy(-alpha,*Hp);
601 // rnorm = res->norm();
602 // if ( rnorm < rtol ) { break; }
603 //
604 // con.pruneActive(*res,xlam,neps_);
605 // obj.precond(*v,*res,x,itol_);
606 // con.pruneActive(*v,xlam,neps_);
607 // tmp = rv;
608 // rv = v->dot(*res);
609 // beta = rv/tmp;
610 //
611 // p->scale(beta);
612 // p->axpy(1.0,*v);
613 // }
614 // if ( iterCR_ == maxitCR_ ) {
615 // flagCR_ = 1;
616 // }
617 // else {
618 // iterCR_++;
619 // }
620 // }
621 
622 
623 // /** \brief Apply the inactive components of the Hessian operator.
624 //
625 // I.e., the components corresponding to \f$\mathcal{I}_k\f$.
626 //
627 // @param[out] hv is the result of applying the Hessian at @b x to
628 // @b v
629 // @param[in] v is the direction in which we apply the Hessian
630 // @param[in] x is the current iteration vector \f$x_k\f$
631 // @param[in] xlam is the vector \f$x_k + c\lambda_k\f$
632 // @param[in] obj is the objective function
633 // @param[in] con are the bound constraints
634 // */
635 // void applyInactiveHessian(Vector<Real> &hv, const Vector<Real> &v, const Vector<Real> &x,
636 // const Vector<Real> &xlam, Objective<Real> &obj, BoundConstraint<Real> &con) {
637 // Teuchos::RCP<Vector<Real> > tmp = v.clone();
638 // tmp->set(v);
639 // con.pruneActive(*tmp,xlam,neps_);
640 // if ( secant_ == Teuchos::null ) {
641 // obj.hessVec(hv,*tmp,x,itol_);
642 // }
643 // else {
644 // secant_->applyB(hv,*tmp,x);
645 // }
646 // con.pruneActive(hv,xlam,neps_);
647 // }
648 //
649 // /** \brief Apply the inactive components of the preconditioner operator.
650 //
651 // I.e., the components corresponding to \f$\mathcal{I}_k\f$.
652 //
653 // @param[out] hv is the result of applying the preconditioner at @b x to
654 // @b v
655 // @param[in] v is the direction in which we apply the preconditioner
656 // @param[in] x is the current iteration vector \f$x_k\f$
657 // @param[in] xlam is the vector \f$x_k + c\lambda_k\f$
658 // @param[in] obj is the objective function
659 // @param[in] con are the bound constraints
660 // */
661 // void applyInactivePrecond(Vector<Real> &pv, const Vector<Real> &v, const Vector<Real> &x,
662 // const Vector<Real> &xlam, Objective<Real> &obj, BoundConstraint<Real> &con) {
663 // Teuchos::RCP<Vector<Real> > tmp = v.clone();
664 // tmp->set(v);
665 // con.pruneActive(*tmp,xlam,neps_);
666 // obj.precond(pv,*tmp,x,itol_);
667 // con.pruneActive(pv,xlam,neps_);
668 // }
669 //
670 // /** \brief Solve the inactive part of the PDAS optimality system.
671 //
672 // The inactive PDAS optimality system is
673 // \f[
674 // \nabla^2 f(x_k)_{\mathcal{I}_k,\mathcal{I}_k}s =
675 // -\nabla f(x_k)_{\mathcal{I}_k}
676 // -\nabla^2 f(x_k)_{\mathcal{I}_k,\mathcal{A}_k} (s_k)_{\mathcal{A}_k}.
677 // \f]
678 // Since the inactive part of the Hessian may not be positive definite, we solve
679 // using CR.
680 //
681 // @param[out] sol is the vector containing the solution
682 // @param[in] rhs is the right-hand side vector
683 // @param[in] xlam is the vector \f$x_k + c\lambda_k\f$
684 // @param[in] x is the current iteration vector \f$x_k\f$
685 // @param[in] obj is the objective function
686 // @param[in] con are the bound constraints
687 // */
688 // // Solve the inactive part of the optimality system using conjugate residuals
689 // void solve(Vector<Real> &sol, const Vector<Real> &rhs, const Vector<Real> &xlam, const Vector<Real> &x,
690 // Objective<Real> &obj, BoundConstraint<Real> &con) {
691 // // Initialize Residual
692 // Teuchos::RCP<Vector<Real> > res = rhs.clone();
693 // res->set(rhs);
694 // Real rnorm = res->norm();
695 // Real rtol = std::min(tol1_,tol2_*rnorm);
696 // if ( false ) { itol_ = rtol/(maxitCR_*rnorm); }
697 // sol.zero();
698 //
699 // // Apply preconditioner to residual r = Mres
700 // Teuchos::RCP<Vector<Real> > r = x.clone();
701 // applyInactivePrecond(*r,*res,x,xlam,obj,con);
702 //
703 // // Initialize direction p = v
704 // Teuchos::RCP<Vector<Real> > p = x.clone();
705 // p->set(*r);
706 //
707 // // Apply Hessian to v
708 // Teuchos::RCP<Vector<Real> > Hr = x.clone();
709 // applyInactiveHessian(*Hr,*r,x,xlam,obj,con);
710 //
711 // // Apply Hessian to p
712 // Teuchos::RCP<Vector<Real> > Hp = x.clone();
713 // Teuchos::RCP<Vector<Real> > MHp = x.clone();
714 // Hp->set(*Hr);
715 //
716 // iterCR_ = 0;
717 // flagCR_ = 0;
718 //
719 // Real kappa = 0.0, beta = 0.0, alpha = 0.0, tmp = 0.0, rHr = Hr->dot(*r);
720 //
721 // for (iterCR_ = 0; iterCR_ < maxitCR_; iterCR_++) {
722 // // Precondition Hp
723 // applyInactivePrecond(*MHp,*Hp,x,xlam,obj,con);
724 //
725 // kappa = Hp->dot(*MHp); // p' H M H p
726 // alpha = rHr/kappa; // r' M H M r
727 // sol.axpy(alpha,*p); // update step
728 // res->axpy(-alpha,*Hp); // residual
729 // r->axpy(-alpha,*MHp); // preconditioned residual
730 //
731 // // recompute rnorm and decide whether or not to exit
732 // rnorm = res->norm();
733 // if ( rnorm < rtol ) { break; }
734 //
735 // // Apply Hessian to v
736 // itol_ = rtol/(maxitCR_*rnorm);
737 // applyInactiveHessian(*Hr,*r,x,xlam,obj,con);
738 //
739 // tmp = rHr;
740 // rHr = Hr->dot(*r);
741 // beta = rHr/tmp;
742 // p->scale(beta);
743 // p->axpy(1.0,*r);
744 // Hp->scale(beta);
745 // Hp->axpy(1.0,*Hr);
746 // }
747 // if ( iterCR_ == maxitCR_ ) {
748 // flagCR_ = 1;
749 // }
750 // else {
751 // iterCR_++;
752 // }
753 // }
Teuchos::RCP< Vector< Real > > rtmp_
Container for temporary right hand side storage.
Implements the computation of optimization steps with the Newton primal-dual active set method...
Provides the interface to evaluate objective functions.
Teuchos::RCP< Vector< Real > > As_
Container for step projected onto active set.
std::string printName(void) const
Print step name.
Provides definition of the Conjugate Residual solver.
virtual void plus(const Vector &x)=0
Compute , where .
void KrylovFactory(Teuchos::ParameterList &parlist)
std::string printHeader(void) const
Print iterate header.
virtual Real value(const Vector< Real > &x, Real &tol)=0
Compute value.
Provides the interface to compute optimization steps.
Definition: ROL_Step.hpp:63
virtual void hessVec(Vector< Real > &hv, const Vector< Real > &v, const Vector< Real > &x, Real &tol)
Apply Hessian approximation to vector.
Teuchos::RCP< StepState< Real > > getState(void)
Definition: ROL_Step.hpp:68
Contains definitions of custom data types in ROL.
virtual void pruneLowerActive(Vector< Real > &v, const Vector< Real > &x, Real eps=0.0)
Set variables to zero if they correspond to the lower -active set.
virtual Teuchos::RCP< Vector > clone() const =0
Clone to make a new (uninitialized) vector.
Teuchos::RCP< Krylov< Real > > krylov_
ESecant StringToESecant(std::string s)
Definition: ROL_Types.hpp:352
virtual void setVectorToUpperBound(Vector< Real > &u)
Set the input vector to the upper bound.
Teuchos::RCP< Vector< Real > > gtmp_
Container for temporary gradient storage.
Teuchos::RCP< Secant< Real > > secant_
Secant object.
virtual void zero()
Set to zero vector.
Definition: ROL_Vector.hpp:155
Real scale_
Scale for dual variables in the active set, .
Defines the linear algebra or vector space interface.
Definition: ROL_Vector.hpp:72
Teuchos::RCP< Vector< Real > > x0_
Container for initial priaml variables.
EKrylov
Enumeration of Krylov methods.
Definition: ROL_Types.hpp:368
Teuchos::RCP< Vector< Real > > lambda_
Container for dual variables.
EKrylov StringToEKrylov(std::string s)
Definition: ROL_Types.hpp:415
State for algorithm class. Will be used for restarts.
Definition: ROL_Types.hpp:76
virtual void gradient(Vector< Real > &g, const Vector< Real > &x, Real &tol)
Compute gradient.
ESecant
Enumeration of secant update algorithms.
Definition: ROL_Types.hpp:295
void initialize(Vector< Real > &x, const Vector< Real > &s, const Vector< Real > &g, Objective< Real > &obj, BoundConstraint< Real > &con, AlgorithmState< Real > &algo_state)
Initialize step.
virtual void pruneUpperActive(Vector< Real > &v, const Vector< Real > &x, Real eps=0.0)
Set variables to zero if they correspond to the upper -active set.
Teuchos::RCP< PrimalDualHessian< Real > > hessian_
bool feasible_
Flag whether the current iterate is feasible or not.
Real computeCriticalityMeasure(Vector< Real > &x, Objective< Real > &obj, BoundConstraint< Real > &con, Real tol)
Compute the gradient-based criticality measure.
Teuchos::RCP< Vector< Real > > xbnd_
Container for primal variable bounds.
Teuchos::RCP< Vector< Real > > xlam_
Container for primal plus dual variables.
Provides definitions of the Conjugate Gradient solver.
virtual void setVectorToLowerBound(Vector< Real > &l)
Set the input vector to the lower bound.
Provides the interface to apply upper and lower bound constraints.
Teuchos::RCP< PrimalDualPreconditioner< Real > > precond_
void compute(Vector< Real > &s, const Vector< Real > &x, Objective< Real > &obj, BoundConstraint< Real > &con, AlgorithmState< Real > &algo_state)
Compute step.
Real gtol_
PDAS gradient stopping tolerance.
PrimalDualActiveSetStep(Teuchos::ParameterList &parlist)
Constructor.
Teuchos::RCP< Vector< Real > > xtmp_
Container for temporary primal storage.
Real stol_
PDAS minimum step size stopping tolerance.
Teuchos::RCP< Vector< Real > > iterateVec
Definition: ROL_Types.hpp:90
virtual void pruneActive(Vector< Real > &v, const Vector< Real > &x, Real eps=0.0)
Set variables to zero if they correspond to the -active set.
virtual Real norm() const =0
Returns where .
virtual void update(const Vector< Real > &x, bool flag=true, int iter=-1)
Update objective function.
ESecant esec_
Enum for secant type.
virtual bool isFeasible(const Vector< Real > &v)
Check if the vector, v, is feasible.
void update(Vector< Real > &x, const Vector< Real > &s, Objective< Real > &obj, BoundConstraint< Real > &con, AlgorithmState< Real > &algo_state)
Update step, if successful.
virtual std::string print(AlgorithmState< Real > &algo_state, bool print_header=false) const
Print iterate status.
Teuchos::RCP< Vector< Real > > res_
Container for optimality system residual for quadratic model.
virtual void project(Vector< Real > &x)
Project optimization variables onto the bounds.
int maxit_
Maximum number of PDAS iterations.
static const double ROL_EPSILON
Platform-dependent machine epsilon.
Definition: ROL_Types.hpp:115
Teuchos::RCP< Vector< Real > > Ag_
Container for gradient projected onto active set.