10#ifndef ROL_TYPEB_GRADIENTALGORITHM_DEF_HPP
11#define ROL_TYPEB_GRADIENTALGORITHM_DEF_HPP
16template<
typename Real>
23 ParameterList &lslist = list.sublist(
"Step").sublist(
"Line Search");
24 maxit_ = lslist.get(
"Function Evaluation Limit", 20);
25 alpha0_ = lslist.get(
"Initial Step Size", 1.0);
26 normAlpha_ = lslist.get(
"Normalize Initial Step Size",
false);
27 alpha0bnd_ = lslist.get(
"Lower Bound for Initial Step Size", 1e-4);
28 useralpha_ = lslist.get(
"User Defined Initial Step Size",
false);
29 usePrevAlpha_ = lslist.get(
"Use Previous Step Length as Initial Guess",
false);
30 c1_ = lslist.get(
"Sufficient Decrease Tolerance", 1e-4);
32 useAdapt_ = lslist.get(
"Use Adaptive Step Size Selection",
true);
33 rhodec_ = lslist.sublist(
"Line-Search Method").get(
"Backtracking Rate", 0.5);
34 rhoinc_ = lslist.sublist(
"Line-Search Method").get(
"Increase Rate" , 2.0);
35 verbosity_ = list.sublist(
"General").get(
"Output Level", 0);
39template<
typename Real>
44 std::ostream &outStream) {
46 if (
proj_ == nullPtr) {
47 proj_ = makePtr<PolyhedralProjection<Real>>(makePtrFromRef(bnd));
53 proj_->project(x,outStream);
70 state_->stepVec->axpy(-one,x);
78 Real denom = (fnew -
state_->value - gs);
91template<
typename Real>
96 std::ostream &outStream ) {
100 Ptr<Vector<Real>> s = x.
clone();
101 Real ftrial(0), gs(0), ftrialP(0), alphaP(0), tol(std::sqrt(
ROL_EPSILON<Real>()));
103 bool incAlpha =
false, accept =
true;
114 state_->iterateVec->set(x);
120 s->set(*
state_->iterateVec);
122 gs = s->dot(*
state_->stepVec);
123 incAlpha = (
state_->value - ftrial >= -
c1_*gs);
125 outStream <<
" In TypeB::GradientAlgorithm: Line Search" << std::endl;
126 outStream <<
" Step size: " <<
state_->searchSize << std::endl;
127 outStream <<
" Trial objective value: " << ftrial << std::endl;
128 outStream <<
" Computed reduction: " <<
state_->value-ftrial << std::endl;
129 outStream <<
" Dot product of gradient and step: " << gs << std::endl;
130 outStream <<
" Sufficient decrease bound: " << -gs*
c1_ << std::endl;
131 outStream <<
" Number of function evaluations: " << ls_nfval << std::endl;
132 outStream <<
" Increase alpha?: " << incAlpha << std::endl;
136 while (
state_->value - ftrial >= -
c1_*gs
142 alphaP =
state_->searchSize;
146 state_->iterateVec->set(x);
152 s->set(*
state_->iterateVec);
154 gs = s->dot(*
state_->stepVec);
156 outStream << std::endl;
157 outStream <<
" Step size: " <<
state_->searchSize << std::endl;
158 outStream <<
" Trial objective value: " << ftrial << std::endl;
159 outStream <<
" Computed reduction: " <<
state_->value-ftrial << std::endl;
160 outStream <<
" Dot product of gradient and step: " << gs << std::endl;
161 outStream <<
" Sufficient decrease bound: " << -gs*
c1_ << std::endl;
162 outStream <<
" Number of function evaluations: " << ls_nfval << std::endl;
165 if (
state_->value - ftrial < -c1_*gs || ftrial > ftrialP) {
167 state_->searchSize = alphaP;
168 state_->iterateVec->set(x);
171 s->set(*
state_->iterateVec);
179 state_->iterateVec->set(x);
185 s->set(*
state_->iterateVec);
187 gs = s->dot(*
state_->stepVec);
189 outStream << std::endl;
190 outStream <<
" Step size: " <<
state_->searchSize << std::endl;
191 outStream <<
" Trial objective value: " << ftrial << std::endl;
192 outStream <<
" Computed reduction: " <<
state_->value-ftrial << std::endl;
193 outStream <<
" Dot product of gradient and step: " << gs << std::endl;
194 outStream <<
" Sufficient decrease bound: " << -gs*
c1_ << std::endl;
195 outStream <<
" Number of function evaluations: " << ls_nfval << std::endl;
199 state_->nfval += ls_nfval;
220 s->set(x); s->axpy(-one,*
state_->stepVec);
221 proj_->project(*s,outStream);
223 state_->gnorm = s->norm();
231template<
typename Real>
233 std::ios_base::fmtflags osFlags(os.flags());
235 os << std::string(109,
'-') << std::endl;
236 os <<
"Projected gradient descent";
237 os <<
" status output definitions" << std::endl << std::endl;
238 os <<
" iter - Number of iterates (steps taken)" << std::endl;
239 os <<
" value - Objective function value" << std::endl;
240 os <<
" gnorm - Norm of the gradient" << std::endl;
241 os <<
" snorm - Norm of the step (update to optimization vector)" << std::endl;
242 os <<
" alpha - Line search step length" << std::endl;
243 os <<
" #fval - Cumulative number of times the objective function was evaluated" << std::endl;
244 os <<
" #grad - Cumulative number of times the gradient was computed" << std::endl;
245 os << std::string(109,
'-') << std::endl;
249 os << std::setw(6) << std::left <<
"iter";
250 os << std::setw(15) << std::left <<
"value";
251 os << std::setw(15) << std::left <<
"gnorm";
252 os << std::setw(15) << std::left <<
"snorm";
253 os << std::setw(15) << std::left <<
"alpha";
254 os << std::setw(10) << std::left <<
"#fval";
255 os << std::setw(10) << std::left <<
"#grad";
260template<
typename Real>
262 std::ios_base::fmtflags osFlags(os.flags());
263 os << std::endl <<
"Projected Gradient Descent with Backtracking Line Search (Type B, Bound Constraints)" << std::endl;
267template<
typename Real>
269 std::ios_base::fmtflags osFlags(os.flags());
270 os << std::scientific << std::setprecision(6);
273 if (
state_->iter == 0 ) {
275 os << std::setw(6) << std::left <<
state_->iter;
276 os << std::setw(15) << std::left <<
state_->value;
277 os << std::setw(15) << std::left <<
state_->gnorm;
278 os << std::setw(15) << std::left <<
"---";
279 os << std::setw(15) << std::left <<
"---";
280 os << std::setw(10) << std::left <<
state_->nfval;
281 os << std::setw(10) << std::left <<
state_->ngrad;
286 os << std::setw(6) << std::left <<
state_->iter;
287 os << std::setw(15) << std::left <<
state_->value;
288 os << std::setw(15) << std::left <<
state_->gnorm;
289 os << std::setw(15) << std::left <<
state_->snorm;
290 os << std::setw(15) << std::left <<
state_->searchSize;
291 os << std::setw(10) << std::left <<
state_->nfval;
292 os << std::setw(10) << std::left <<
state_->ngrad;
virtual void initialize(const Vector< Real > &x)
Initialize temporary variables.
Provides the interface to apply upper and lower bound constraints.
Provides the interface to evaluate objective functions.
virtual void gradient(Vector< Real > &g, const Vector< Real > &x, Real &tol)
Compute gradient.
virtual Real value(const Vector< Real > &x, Real &tol)=0
Compute value.
virtual void update(const Vector< Real > &x, UpdateType type, int iter=-1)
Update objective function.
Provides an interface to check status of optimization algorithms.
Ptr< PolyhedralProjection< Real > > proj_
void initialize(const Vector< Real > &x, const Vector< Real > &g)
virtual void writeExitStatus(std::ostream &os) const
const Ptr< AlgorithmState< Real > > state_
const Ptr< CombinedStatusTest< Real > > status_
void run(Vector< Real > &x, const Vector< Real > &g, Objective< Real > &obj, BoundConstraint< Real > &bnd, std::ostream &outStream=std::cout) override
Run algorithm on bound constrained problems (Type-B). This general interface supports the use of dual...
void writeHeader(std::ostream &os) const override
Print iterate header.
void initialize(Vector< Real > &x, const Vector< Real > &g, Objective< Real > &obj, BoundConstraint< Real > &bnd, std::ostream &outStream=std::cout)
void writeName(std::ostream &os) const override
Print step name.
GradientAlgorithm(ParameterList &list)
void writeOutput(std::ostream &os, const bool write_header=false) const override
Print iterate status.
Defines the linear algebra or vector space interface.
virtual void set(const Vector &x)
Set where .
virtual ROL::Ptr< Vector > clone() const =0
Clone to make a new (uninitialized) vector.
Real ROL_EPSILON(void)
Platform-dependent machine epsilon.