44 #ifndef ROL_TYPEB_GRADIENTALGORITHM_DEF_HPP
45 #define ROL_TYPEB_GRADIENTALGORITHM_DEF_HPP
50 template<
typename Real>
57 ParameterList &lslist = list.sublist(
"Step").sublist(
"Line Search");
58 maxit_ = lslist.get(
"Function Evaluation Limit", 20);
59 alpha0_ = lslist.get(
"Initial Step Size", 1.0);
60 normAlpha_ = lslist.get(
"Normalize Initial Step Size",
false);
61 alpha0bnd_ = lslist.get(
"Lower Bound for Initial Step Size", 1e-4);
62 useralpha_ = lslist.get(
"User Defined Initial Step Size",
false);
63 usePrevAlpha_ = lslist.get(
"Use Previous Step Length as Initial Guess",
false);
64 c1_ = lslist.get(
"Sufficient Decrease Tolerance", 1e-4);
65 maxAlpha_ = lslist.get(
"Maximum Step Size", alpha0_);
66 useAdapt_ = lslist.get(
"Use Adaptive Step Size Selection",
true);
67 rhodec_ = lslist.sublist(
"Line-Search Method").get(
"Backtracking Rate", 0.5);
68 rhoinc_ = lslist.sublist(
"Line-Search Method").get(
"Increase Rate" , 2.0);
69 verbosity_ = list.sublist(
"General").get(
"Output Level", 0);
70 writeHeader_ = verbosity_ > 2;
73 template<
typename Real>
78 std::ostream &outStream) {
80 if (proj_ == nullPtr) {
81 proj_ = makePtr<PolyhedralProjection<Real>>(makePtrFromRef(bnd));
86 Real ftol = std::sqrt(ROL_EPSILON<Real>());
87 proj_->project(x,outStream);
89 state_->value = obj.
value(x,ftol);
91 obj.
gradient(*state_->gradientVec,x,ftol);
93 state_->stepVec->set(x);
94 state_->stepVec->axpy(-one,state_->gradientVec->dual());
95 proj_->project(*state_->stepVec,outStream);
96 Real fnew = state_->value;
100 fnew = obj.
value(*state_->stepVec,ftol);
104 state_->stepVec->axpy(-one,x);
105 state_->gnorm = state_->stepVec->norm();
106 state_->snorm = ROL_INF<Real>();
108 const Real half(0.5);
111 Real gs = state_->stepVec->apply(*state_->gradientVec);
112 Real denom = (fnew - state_->value - gs);
113 bool flag = maxAlpha_ == alpha0_;
114 alpha0_ = ((denom > ROL_EPSILON<Real>()) ? -half*gs/denom : alpha0bnd_);
115 alpha0_ = ((alpha0_ > alpha0bnd_) ? alpha0_ : one);
116 if (flag) maxAlpha_ = alpha0_;
120 alpha0_ /= state_->gradientVec->norm();
122 state_->searchSize = alpha0_;
125 template<
typename Real>
130 std::ostream &outStream ) {
133 initialize(x,g,obj,bnd,outStream);
134 Ptr<Vector<Real>> s = x.
clone();
135 Real ftrial(0), gs(0), ftrialP(0), alphaP(0), tol(std::sqrt(ROL_EPSILON<Real>()));
137 bool incAlpha =
false, accept =
true;
140 if (verbosity_ > 0) writeOutput(outStream,
true);
143 state_->stepVec->set(state_->gradientVec->dual());
144 while (status_->check(*state_)) {
147 if (!usePrevAlpha_ && !useAdapt_) state_->searchSize = alpha0_;
148 state_->iterateVec->set(x);
149 state_->iterateVec->axpy(-state_->searchSize,*state_->stepVec);
150 proj_->project(*state_->iterateVec,outStream);
152 ftrial = obj.
value(*state_->iterateVec,tol);
154 s->set(*state_->iterateVec);
156 gs = s->dot(*state_->stepVec);
157 incAlpha = (state_->value - ftrial >= -c1_*gs);
158 if (verbosity_ > 1) {
159 outStream <<
" In TypeB::GradientAlgorithm: Line Search" << std::endl;
160 outStream <<
" Step size: " << state_->searchSize << std::endl;
161 outStream <<
" Trial objective value: " << ftrial << std::endl;
162 outStream <<
" Computed reduction: " << state_->value-ftrial << std::endl;
163 outStream <<
" Dot product of gradient and step: " << gs << std::endl;
164 outStream <<
" Sufficient decrease bound: " << -gs*c1_ << std::endl;
165 outStream <<
" Number of function evaluations: " << ls_nfval << std::endl;
166 outStream <<
" Increase alpha?: " << incAlpha << std::endl;
168 if (incAlpha && useAdapt_) {
169 ftrialP = ROL_INF<Real>();
170 while ( state_->value - ftrial >= -c1_*gs
172 && state_->searchSize < maxAlpha_
173 && ls_nfval < maxit_ ) {
176 alphaP = state_->searchSize;
178 state_->searchSize *= rhoinc_;
179 state_->searchSize = std::min(state_->searchSize,maxAlpha_);
180 state_->iterateVec->set(x);
181 state_->iterateVec->axpy(-state_->searchSize,*state_->stepVec);
182 proj_->project(*state_->iterateVec,outStream);
184 ftrial = obj.
value(*state_->iterateVec,tol);
186 s->set(*state_->iterateVec);
188 gs = s->dot(*state_->stepVec);
189 if (verbosity_ > 1) {
190 outStream << std::endl;
191 outStream <<
" Step size: " << state_->searchSize << std::endl;
192 outStream <<
" Trial objective value: " << ftrial << std::endl;
193 outStream <<
" Computed reduction: " << state_->value-ftrial << std::endl;
194 outStream <<
" Dot product of gradient and step: " << gs << std::endl;
195 outStream <<
" Sufficient decrease bound: " << -gs*c1_ << std::endl;
196 outStream <<
" Number of function evaluations: " << ls_nfval << std::endl;
199 if (state_->value - ftrial < -c1_*gs || ftrial > ftrialP) {
201 state_->searchSize = alphaP;
202 state_->iterateVec->set(x);
203 state_->iterateVec->axpy(-state_->searchSize,*state_->stepVec);
204 proj_->project(*state_->iterateVec,outStream);
205 s->set(*state_->iterateVec);
211 while ( state_->value - ftrial < -c1_*gs && ls_nfval < maxit_ ) {
212 state_->searchSize *= rhodec_;
213 state_->iterateVec->set(x);
214 state_->iterateVec->axpy(-state_->searchSize,*state_->stepVec);
215 proj_->project(*state_->iterateVec,outStream);
217 ftrial = obj.
value(*state_->iterateVec,tol);
219 s->set(*state_->iterateVec);
221 gs = s->dot(*state_->stepVec);
222 if (verbosity_ > 1) {
223 outStream << std::endl;
224 outStream <<
" Step size: " << state_->searchSize << std::endl;
225 outStream <<
" Trial objective value: " << ftrial << std::endl;
226 outStream <<
" Computed reduction: " << state_->value-ftrial << std::endl;
227 outStream <<
" Dot product of gradient and step: " << gs << std::endl;
228 outStream <<
" Sufficient decrease bound: " << -gs*c1_ << std::endl;
229 outStream <<
" Number of function evaluations: " << ls_nfval << std::endl;
233 state_->nfval += ls_nfval;
236 state_->stepVec->set(*s);
237 state_->snorm = state_->stepVec->norm();
240 x.
set(*state_->iterateVec);
244 state_->value = ftrial;
247 obj.
gradient(*state_->gradientVec,x,tol);
251 state_->stepVec->set(state_->gradientVec->dual());
254 s->set(x); s->axpy(-one,*state_->stepVec);
255 proj_->project(*s,outStream);
257 state_->gnorm = s->norm();
260 if (verbosity_ > 0) writeOutput(outStream,writeHeader_);
265 template<
typename Real>
267 std::ios_base::fmtflags osFlags(os.flags());
268 if (verbosity_ > 1) {
269 os << std::string(109,
'-') << std::endl;
270 os <<
"Projected gradient descent";
271 os <<
" status output definitions" << std::endl << std::endl;
272 os <<
" iter - Number of iterates (steps taken)" << std::endl;
273 os <<
" value - Objective function value" << std::endl;
274 os <<
" gnorm - Norm of the gradient" << std::endl;
275 os <<
" snorm - Norm of the step (update to optimization vector)" << std::endl;
276 os <<
" alpha - Line search step length" << std::endl;
277 os <<
" #fval - Cumulative number of times the objective function was evaluated" << std::endl;
278 os <<
" #grad - Cumulative number of times the gradient was computed" << std::endl;
279 os << std::string(109,
'-') << std::endl;
283 os << std::setw(6) << std::left <<
"iter";
284 os << std::setw(15) << std::left <<
"value";
285 os << std::setw(15) << std::left <<
"gnorm";
286 os << std::setw(15) << std::left <<
"snorm";
287 os << std::setw(15) << std::left <<
"alpha";
288 os << std::setw(10) << std::left <<
"#fval";
289 os << std::setw(10) << std::left <<
"#grad";
294 template<
typename Real>
296 std::ios_base::fmtflags osFlags(os.flags());
297 os << std::endl <<
"Projected Gradient Descent with Backtracking Line Search (Type B, Bound Constraints)" << std::endl;
301 template<
typename Real>
303 std::ios_base::fmtflags osFlags(os.flags());
304 os << std::scientific << std::setprecision(6);
305 if ( state_->iter == 0 ) writeName(os);
306 if ( write_header ) writeHeader(os);
307 if ( state_->iter == 0 ) {
309 os << std::setw(6) << std::left << state_->iter;
310 os << std::setw(15) << std::left << state_->value;
311 os << std::setw(15) << std::left << state_->gnorm;
312 os << std::setw(15) << std::left <<
"---";
313 os << std::setw(15) << std::left <<
"---";
314 os << std::setw(10) << std::left << state_->nfval;
315 os << std::setw(10) << std::left << state_->ngrad;
320 os << std::setw(6) << std::left << state_->iter;
321 os << std::setw(15) << std::left << state_->value;
322 os << std::setw(15) << std::left << state_->gnorm;
323 os << std::setw(15) << std::left << state_->snorm;
324 os << std::setw(15) << std::left << state_->searchSize;
325 os << std::setw(10) << std::left << state_->nfval;
326 os << std::setw(10) << std::left << state_->ngrad;
Provides the interface to evaluate objective functions.
virtual ROL::Ptr< Vector > clone() const =0
Clone to make a new (uninitialized) vector.
void writeName(std::ostream &os) const override
Print step name.
virtual Real value(const Vector< Real > &x, Real &tol)=0
Compute value.
virtual void writeExitStatus(std::ostream &os) const
Defines the linear algebra or vector space interface.
virtual void update(const Vector< Real > &x, UpdateType type, int iter=-1)
Update objective function.
virtual void gradient(Vector< Real > &g, const Vector< Real > &x, Real &tol)
Compute gradient.
void writeOutput(std::ostream &os, const bool write_header=false) const override
Print iterate status.
void initialize(Vector< Real > &x, const Vector< Real > &g, Objective< Real > &obj, BoundConstraint< Real > &bnd, std::ostream &outStream=std::cout)
Provides an interface to check status of optimization algorithms.
Provides the interface to apply upper and lower bound constraints.
void writeHeader(std::ostream &os) const override
Print iterate header.
void run(Vector< Real > &x, const Vector< Real > &g, Objective< Real > &obj, BoundConstraint< Real > &bnd, std::ostream &outStream=std::cout) override
Run algorithm on bound constrained problems (Type-B). This general interface supports the use of dual...
void initialize(const Vector< Real > &x, const Vector< Real > &g)
virtual void set(const Vector &x)
Set where .
GradientAlgorithm(ParameterList &list)