ROL
ROL_TypeB_GradientAlgorithm_Def.hpp
Go to the documentation of this file.
1// @HEADER
2// *****************************************************************************
3// Rapid Optimization Library (ROL) Package
4//
5// Copyright 2014 NTESS and the ROL contributors.
6// SPDX-License-Identifier: BSD-3-Clause
7// *****************************************************************************
8// @HEADER
9
10#ifndef ROL_TYPEB_GRADIENTALGORITHM_DEF_HPP
11#define ROL_TYPEB_GRADIENTALGORITHM_DEF_HPP
12
13namespace ROL {
14namespace TypeB {
15
16template<typename Real>
18 // Set status test
19 status_->reset();
20 status_->add(makePtr<StatusTest<Real>>(list));
21
22 // Parse parameter list
23 ParameterList &lslist = list.sublist("Step").sublist("Line Search");
24 maxit_ = lslist.get("Function Evaluation Limit", 20);
25 alpha0_ = lslist.get("Initial Step Size", 1.0);
26 normAlpha_ = lslist.get("Normalize Initial Step Size", false);
27 alpha0bnd_ = lslist.get("Lower Bound for Initial Step Size", 1e-4);
28 useralpha_ = lslist.get("User Defined Initial Step Size", false);
29 usePrevAlpha_ = lslist.get("Use Previous Step Length as Initial Guess", false);
30 c1_ = lslist.get("Sufficient Decrease Tolerance", 1e-4);
31 maxAlpha_ = lslist.get("Maximum Step Size", alpha0_);
32 useAdapt_ = lslist.get("Use Adaptive Step Size Selection", true);
33 rhodec_ = lslist.sublist("Line-Search Method").get("Backtracking Rate", 0.5);
34 rhoinc_ = lslist.sublist("Line-Search Method").get("Increase Rate" , 2.0);
35 verbosity_ = list.sublist("General").get("Output Level", 0);
37}
38
39template<typename Real>
41 const Vector<Real> &g,
42 Objective<Real> &obj,
44 std::ostream &outStream) {
45 const Real one(1);
46 if (proj_ == nullPtr) {
47 proj_ = makePtr<PolyhedralProjection<Real>>(makePtrFromRef(bnd));
48 }
49 // Initialize data
51 // Update approximate gradient and approximate objective function.
52 Real ftol = std::sqrt(ROL_EPSILON<Real>());
53 proj_->project(x,outStream);
55 state_->value = obj.value(x,ftol);
56 state_->nfval++;
57 obj.gradient(*state_->gradientVec,x,ftol);
58 state_->ngrad++;
59 state_->stepVec->set(x);
60 state_->stepVec->axpy(-one,state_->gradientVec->dual());
61 proj_->project(*state_->stepVec,outStream);
62 Real fnew = state_->value;
63 if (!useralpha_) {
64 // Evaluate objective at P(x - g)
65 obj.update(*state_->stepVec,UpdateType::Trial);
66 fnew = obj.value(*state_->stepVec,ftol);
68 state_->nfval++;
69 }
70 state_->stepVec->axpy(-one,x);
71 state_->gnorm = state_->stepVec->norm();
72 state_->snorm = ROL_INF<Real>();
73 if (!useralpha_) {
74 const Real half(0.5);
75 // Minimize quadratic interpolate to compute new alpha
76 //Real gs = state_->stepVec->dot(state_->gradientVec->dual());
77 Real gs = state_->stepVec->apply(*state_->gradientVec);
78 Real denom = (fnew - state_->value - gs);
79 bool flag = maxAlpha_ == alpha0_;
80 alpha0_ = ((denom > ROL_EPSILON<Real>()) ? -half*gs/denom : alpha0bnd_);
81 alpha0_ = ((alpha0_ > alpha0bnd_) ? alpha0_ : one);
82 if (flag) maxAlpha_ = alpha0_;
83 }
84 // Normalize initial CP step length
85 if (normAlpha_) {
86 alpha0_ /= state_->gradientVec->norm();
87 }
88 state_->searchSize = alpha0_;
89}
90
91template<typename Real>
93 const Vector<Real> &g,
94 Objective<Real> &obj,
96 std::ostream &outStream ) {
97 const Real one(1);
98 // Initialize trust-region data
99 initialize(x,g,obj,bnd,outStream);
100 Ptr<Vector<Real>> s = x.clone();
101 Real ftrial(0), gs(0), ftrialP(0), alphaP(0), tol(std::sqrt(ROL_EPSILON<Real>()));
102 int ls_nfval = 0;
103 bool incAlpha = false, accept = true;
104
105 // Output
106 if (verbosity_ > 0) writeOutput(outStream,true);
107
108 // Compute steepest descent step
109 state_->stepVec->set(state_->gradientVec->dual());
110 while (status_->check(*state_)) {
111 accept = true;
112 // Perform backtracking line search
113 if (!usePrevAlpha_ && !useAdapt_) state_->searchSize = alpha0_;
114 state_->iterateVec->set(x);
115 state_->iterateVec->axpy(-state_->searchSize,*state_->stepVec);
116 proj_->project(*state_->iterateVec,outStream);
117 obj.update(*state_->iterateVec,UpdateType::Trial);
118 ftrial = obj.value(*state_->iterateVec,tol);
119 ls_nfval = 1;
120 s->set(*state_->iterateVec);
121 s->axpy(-one,x);
122 gs = s->dot(*state_->stepVec);
123 incAlpha = (state_->value - ftrial >= -c1_*gs);
124 if (verbosity_ > 1) {
125 outStream << " In TypeB::GradientAlgorithm: Line Search" << std::endl;
126 outStream << " Step size: " << state_->searchSize << std::endl;
127 outStream << " Trial objective value: " << ftrial << std::endl;
128 outStream << " Computed reduction: " << state_->value-ftrial << std::endl;
129 outStream << " Dot product of gradient and step: " << gs << std::endl;
130 outStream << " Sufficient decrease bound: " << -gs*c1_ << std::endl;
131 outStream << " Number of function evaluations: " << ls_nfval << std::endl;
132 outStream << " Increase alpha?: " << incAlpha << std::endl;
133 }
134 if (incAlpha && useAdapt_) {
135 ftrialP = ROL_INF<Real>();
136 while ( state_->value - ftrial >= -c1_*gs
137 && ftrial <= ftrialP
138 && state_->searchSize < maxAlpha_
139 && ls_nfval < maxit_ ) {
140 // Previous value was acceptable
141 obj.update(*state_->iterateVec,UpdateType::Accept);
142 alphaP = state_->searchSize;
143 ftrialP = ftrial;
144 state_->searchSize *= rhoinc_;
145 state_->searchSize = std::min(state_->searchSize,maxAlpha_);
146 state_->iterateVec->set(x);
147 state_->iterateVec->axpy(-state_->searchSize,*state_->stepVec);
148 proj_->project(*state_->iterateVec,outStream);
149 obj.update(*state_->iterateVec,UpdateType::Trial);
150 ftrial = obj.value(*state_->iterateVec,tol);
151 ls_nfval++;
152 s->set(*state_->iterateVec);
153 s->axpy(-one,x);
154 gs = s->dot(*state_->stepVec);
155 if (verbosity_ > 1) {
156 outStream << std::endl;
157 outStream << " Step size: " << state_->searchSize << std::endl;
158 outStream << " Trial objective value: " << ftrial << std::endl;
159 outStream << " Computed reduction: " << state_->value-ftrial << std::endl;
160 outStream << " Dot product of gradient and step: " << gs << std::endl;
161 outStream << " Sufficient decrease bound: " << -gs*c1_ << std::endl;
162 outStream << " Number of function evaluations: " << ls_nfval << std::endl;
163 }
164 }
165 if (state_->value - ftrial < -c1_*gs || ftrial > ftrialP) {
166 ftrial = ftrialP;
167 state_->searchSize = alphaP;
168 state_->iterateVec->set(x);
169 state_->iterateVec->axpy(-state_->searchSize,*state_->stepVec);
170 proj_->project(*state_->iterateVec,outStream);
171 s->set(*state_->iterateVec);
172 s->axpy(-one,x);
173 accept = false;
174 }
175 }
176 else {
177 while ( state_->value - ftrial < -c1_*gs && ls_nfval < maxit_ ) {
178 state_->searchSize *= rhodec_;
179 state_->iterateVec->set(x);
180 state_->iterateVec->axpy(-state_->searchSize,*state_->stepVec);
181 proj_->project(*state_->iterateVec,outStream);
182 obj.update(*state_->iterateVec,UpdateType::Trial);
183 ftrial = obj.value(*state_->iterateVec,tol);
184 ls_nfval++;
185 s->set(*state_->iterateVec);
186 s->axpy(-one,x);
187 gs = s->dot(*state_->stepVec);
188 if (verbosity_ > 1) {
189 outStream << std::endl;
190 outStream << " Step size: " << state_->searchSize << std::endl;
191 outStream << " Trial objective value: " << ftrial << std::endl;
192 outStream << " Computed reduction: " << state_->value-ftrial << std::endl;
193 outStream << " Dot product of gradient and step: " << gs << std::endl;
194 outStream << " Sufficient decrease bound: " << -gs*c1_ << std::endl;
195 outStream << " Number of function evaluations: " << ls_nfval << std::endl;
196 }
197 }
198 }
199 state_->nfval += ls_nfval;
200
201 // Compute norm of step
202 state_->stepVec->set(*s);
203 state_->snorm = state_->stepVec->norm();
204
205 // Update iterate
206 x.set(*state_->iterateVec);
207
208 // Compute new value and gradient
209 state_->iter++;
210 state_->value = ftrial;
211 if (accept) obj.update(x,UpdateType::Accept,state_->iter);
212 else obj.update(x,UpdateType::Revert,state_->iter);
213 obj.gradient(*state_->gradientVec,x,tol);
214 state_->ngrad++;
215
216 // Compute steepest descent step
217 state_->stepVec->set(state_->gradientVec->dual());
218
219 // Compute projected gradient norm
220 s->set(x); s->axpy(-one,*state_->stepVec);
221 proj_->project(*s,outStream);
222 s->axpy(-one,x);
223 state_->gnorm = s->norm();
224
225 // Update Output
226 if (verbosity_ > 0) writeOutput(outStream,writeHeader_);
227 }
229}
230
231template<typename Real>
232void GradientAlgorithm<Real>::writeHeader( std::ostream& os ) const {
233 std::ios_base::fmtflags osFlags(os.flags());
234 if (verbosity_ > 1) {
235 os << std::string(109,'-') << std::endl;
236 os << "Projected gradient descent";
237 os << " status output definitions" << std::endl << std::endl;
238 os << " iter - Number of iterates (steps taken)" << std::endl;
239 os << " value - Objective function value" << std::endl;
240 os << " gnorm - Norm of the gradient" << std::endl;
241 os << " snorm - Norm of the step (update to optimization vector)" << std::endl;
242 os << " alpha - Line search step length" << std::endl;
243 os << " #fval - Cumulative number of times the objective function was evaluated" << std::endl;
244 os << " #grad - Cumulative number of times the gradient was computed" << std::endl;
245 os << std::string(109,'-') << std::endl;
246 }
247
248 os << " ";
249 os << std::setw(6) << std::left << "iter";
250 os << std::setw(15) << std::left << "value";
251 os << std::setw(15) << std::left << "gnorm";
252 os << std::setw(15) << std::left << "snorm";
253 os << std::setw(15) << std::left << "alpha";
254 os << std::setw(10) << std::left << "#fval";
255 os << std::setw(10) << std::left << "#grad";
256 os << std::endl;
257 os.flags(osFlags);
258}
259
260template<typename Real>
261void GradientAlgorithm<Real>::writeName( std::ostream& os ) const {
262 std::ios_base::fmtflags osFlags(os.flags());
263 os << std::endl << "Projected Gradient Descent with Backtracking Line Search (Type B, Bound Constraints)" << std::endl;
264 os.flags(osFlags);
265}
266
267template<typename Real>
268void GradientAlgorithm<Real>::writeOutput( std::ostream& os, bool write_header ) const {
269 std::ios_base::fmtflags osFlags(os.flags());
270 os << std::scientific << std::setprecision(6);
271 if ( state_->iter == 0 ) writeName(os);
272 if ( write_header ) writeHeader(os);
273 if ( state_->iter == 0 ) {
274 os << " ";
275 os << std::setw(6) << std::left << state_->iter;
276 os << std::setw(15) << std::left << state_->value;
277 os << std::setw(15) << std::left << state_->gnorm;
278 os << std::setw(15) << std::left << "---";
279 os << std::setw(15) << std::left << "---";
280 os << std::setw(10) << std::left << state_->nfval;
281 os << std::setw(10) << std::left << state_->ngrad;
282 os << std::endl;
283 }
284 else {
285 os << " ";
286 os << std::setw(6) << std::left << state_->iter;
287 os << std::setw(15) << std::left << state_->value;
288 os << std::setw(15) << std::left << state_->gnorm;
289 os << std::setw(15) << std::left << state_->snorm;
290 os << std::setw(15) << std::left << state_->searchSize;
291 os << std::setw(10) << std::left << state_->nfval;
292 os << std::setw(10) << std::left << state_->ngrad;
293 os << std::endl;
294 }
295 os.flags(osFlags);
296}
297
298} // namespace TypeB
299} // namespace ROL
300
301#endif
virtual void initialize(const Vector< Real > &x)
Initialize temporary variables.
Provides the interface to apply upper and lower bound constraints.
Provides the interface to evaluate objective functions.
virtual void gradient(Vector< Real > &g, const Vector< Real > &x, Real &tol)
Compute gradient.
virtual Real value(const Vector< Real > &x, Real &tol)=0
Compute value.
virtual void update(const Vector< Real > &x, UpdateType type, int iter=-1)
Update objective function.
Provides an interface to check status of optimization algorithms.
Ptr< PolyhedralProjection< Real > > proj_
void initialize(const Vector< Real > &x, const Vector< Real > &g)
virtual void writeExitStatus(std::ostream &os) const
const Ptr< AlgorithmState< Real > > state_
const Ptr< CombinedStatusTest< Real > > status_
void run(Vector< Real > &x, const Vector< Real > &g, Objective< Real > &obj, BoundConstraint< Real > &bnd, std::ostream &outStream=std::cout) override
Run algorithm on bound constrained problems (Type-B). This general interface supports the use of dual...
void writeHeader(std::ostream &os) const override
Print iterate header.
void initialize(Vector< Real > &x, const Vector< Real > &g, Objective< Real > &obj, BoundConstraint< Real > &bnd, std::ostream &outStream=std::cout)
void writeName(std::ostream &os) const override
Print step name.
void writeOutput(std::ostream &os, const bool write_header=false) const override
Print iterate status.
Defines the linear algebra or vector space interface.
virtual void set(const Vector &x)
Set where .
virtual ROL::Ptr< Vector > clone() const =0
Clone to make a new (uninitialized) vector.
Real ROL_EPSILON(void)
Platform-dependent machine epsilon.
Definition ROL_Types.hpp:57
Real ROL_INF(void)
Definition ROL_Types.hpp:71