ROL
ROL_TypeB_SpectralGradientAlgorithm_Def.hpp
Go to the documentation of this file.
1// @HEADER
2// *****************************************************************************
3// Rapid Optimization Library (ROL) Package
4//
5// Copyright 2014 NTESS and the ROL contributors.
6// SPDX-License-Identifier: BSD-3-Clause
7// *****************************************************************************
8// @HEADER
9
10#ifndef ROL_TYPEB_SPECTRALGRADIENTALGORITHM_DEF_HPP
11#define ROL_TYPEB_SPECTRALGRADIENTALGORITHM_DEF_HPP
12
13#include <deque>
14
15namespace ROL {
16namespace TypeB {
17
18template<typename Real>
20 // Set status test
21 status_->reset();
22 status_->add(makePtr<StatusTest<Real>>(list));
23
24 // Parse parameter list
25 ParameterList &lslist = list.sublist("Step").sublist("Spectral Gradient");
26 maxit_ = lslist.get("Function Evaluation Limit", 20);
27 lambda_ = lslist.get("Initial Spectral Step Size", -1.0);
28 lambdaMin_ = lslist.get("Minimum Spectral Step Size", 1e-8);
29 lambdaMax_ = lslist.get("Maximum Spectral Step Size", 1e8);
30 sigma1_ = lslist.get("Lower Step Size Safeguard", 0.1);
31 sigma2_ = lslist.get("Upper Step Size Safeguard", 0.9);
32 rhodec_ = lslist.get("Backtracking Rate", 0.5);
33 gamma_ = lslist.get("Sufficient Decrease Tolerance", 1e-4);
34 maxSize_ = lslist.get("Maximum Storage Size", 10);
35 verbosity_ = list.sublist("General").get("Output Level", 0);
37}
38
39template<typename Real>
41 const Vector<Real> &g,
42 Objective<Real> &obj,
44 std::ostream &outStream) {
45 const Real zero(0), one(1);
46 if (proj_ == nullPtr)
47 proj_ = makePtr<PolyhedralProjection<Real>>(makePtrFromRef(bnd));
48 // Initialize data
50 // Update approximate gradient and approximate objective function.
51 Real ftol = std::sqrt(ROL_EPSILON<Real>());
52 proj_->project(x,outStream); state_->nproj++;
54 state_->value = obj.value(x,ftol); state_->nfval++;
55 obj.gradient(*state_->gradientVec,x,ftol); state_->ngrad++;
56 state_->stepVec->set(x);
57 state_->stepVec->axpy(-one,state_->gradientVec->dual());
58 proj_->project(*state_->stepVec,outStream); state_->nproj++;
59 state_->stepVec->axpy(-one,x);
60 state_->gnorm = state_->stepVec->norm();
61 state_->snorm = ROL_INF<Real>();
62 if (lambda_ <= zero && state_->gnorm != zero)
63 lambda_ = std::max(lambdaMin_,std::min(one/state_->gnorm,lambdaMax_));
64}
65
66template<typename Real>
68 const Vector<Real> &g,
69 Objective<Real> &obj,
71 std::ostream &outStream ) {
72 const Real half(0.5), one(1), eps(std::sqrt(ROL_EPSILON<Real>()));
73 // Initialize trust-region data
74 initialize(x,g,obj,bnd,outStream);
75 Ptr<Vector<Real>> s = x.clone(), y = g.clone(), xmin = x.clone();
76 Real ftrial(0), fmax(0), gs(0), alpha(1), alphaTmp(1), fmin(0);
77 Real ys(0), ss(0), tol(std::sqrt(ROL_EPSILON<Real>()));
78 int ls_nfval = 0;
79 std::deque<Real> fqueue; fqueue.push_back(state_->value);
80
81 fmin = state_->value;
82 xmin->set(x);
83
84 // Output
85 if (verbosity_ > 0) writeOutput(outStream, true);
86
87 // Iterate spectral projected gradient
88 state_->stepVec->set(state_->gradientVec->dual());
89 while (status_->check(*state_)) {
90 // Compute projected spectral step
91 state_->iterateVec->set(x);
92 state_->iterateVec->axpy(-lambda_,*state_->stepVec);
93 proj_->project(*state_->iterateVec,outStream); state_->nproj++;
94 s->set(*state_->iterateVec);
95 s->axpy(-one,x);
96
97 // Nonmonotone Linesearch
98 ls_nfval = 0;
99 obj.update(*state_->iterateVec,UpdateType::Trial);
100 ftrial = obj.value(*state_->iterateVec,tol); ls_nfval++;
101 alpha = one;
102 fmax = *std::max_element(fqueue.begin(),fqueue.end());
103 gs = state_->gradientVec->apply(*s);
104 if (verbosity_ > 1) {
105 outStream << " In TypeB::SpectralGradientAlgorithm Line Search" << std::endl;
106 outStream << " Step size: " << alpha << std::endl;
107 outStream << " Trial objective value: " << ftrial << std::endl;
108 outStream << " Max stored objective value: " << fmax << std::endl;
109 outStream << " Computed reduction: " << fmax-ftrial << std::endl;
110 outStream << " Dot product of gradient and step: " << gs << std::endl;
111 outStream << " Sufficient decrease bound: " << -gs*gamma_*alpha << std::endl;
112 outStream << " Number of function evaluations: " << ls_nfval << std::endl;
113 }
114 while (ftrial > fmax + gamma_*alpha*gs && ls_nfval < maxit_) {
115 alphaTmp = -half*alpha*alpha*gs/(ftrial-state_->value-alpha*gs);
116 alpha = (sigma1_*alpha <= alphaTmp && alphaTmp <= sigma2_*alpha) ? alphaTmp : rhodec_*alpha;
117 state_->iterateVec->set(x);
118 state_->iterateVec->axpy(alpha,*s);
119 obj.update(*state_->iterateVec,UpdateType::Trial);
120 ftrial = obj.value(*state_->iterateVec,tol); ls_nfval++;
121 if (verbosity_ > 1) {
122 outStream << " In TypeB::SpectralGradientAlgorithm: Line Search" << std::endl;
123 outStream << " Step size: " << alpha << std::endl;
124 outStream << " Trial objective value: " << ftrial << std::endl;
125 outStream << " Max stored objective value: " << fmax << std::endl;
126 outStream << " Computed reduction: " << fmax-ftrial << std::endl;
127 outStream << " Dot product of gradient and step: " << gs << std::endl;
128 outStream << " Sufficient decrease bound: " << -gs*gamma_*alpha << std::endl;
129 outStream << " Number of function evaluations: " << ls_nfval << std::endl;
130 }
131 }
132 state_->nfval += ls_nfval;
133 if (static_cast<int>(fqueue.size()) == maxSize_) fqueue.pop_front();
134 fqueue.push_back(ftrial);
135
136 // Update state
137 state_->iter++;
138 state_->value = ftrial;
139 state_->searchSize = alpha;
140 x.set(*state_->iterateVec);
141 obj.update(x,UpdateType::Accept,state_->iter);
142
143 // Store the best iterate
144 if (state_->value <= fmin) {
145 fmin = state_->value;
146 xmin->set(x);
147 }
148
149 // Compute spectral step length
150 s->scale(alpha);
151 y->set(*state_->gradientVec);
152 y->scale(-one);
153 obj.gradient(*state_->gradientVec,x,tol); state_->ngrad++;
154 y->plus(*state_->gradientVec);
155 ys = y->apply(*s);
156 ss = s->dot(*s);
157 lambda_ = (ys<=eps ? lambdaMax_ : std::max(lambdaMin_,std::min(ss/ys,lambdaMax_)));
158 state_->snorm = std::sqrt(ss);
159
160 // Compute gradient step
161 state_->stepVec->set(state_->gradientVec->dual());
162
163 // Compute projected gradient norm
164 s->set(x); s->axpy(-one,*state_->stepVec);
165 proj_->project(*s,outStream); state_->nproj++;
166 s->axpy(-one,x);
167 state_->gnorm = s->norm();
168
169 // Update Output
170 if (verbosity_ > 0) writeOutput(outStream,writeHeader_);
171 }
172 x.set(*xmin);
173 state_->value = fmin;
175}
176
177template<typename Real>
178void SpectralGradientAlgorithm<Real>::writeHeader( std::ostream& os ) const {
179 std::ios_base::fmtflags osFlags(os.flags());
180 if (verbosity_ > 1) {
181 os << std::string(109,'-') << std::endl;
182 os << "Spectral projected gradient descent";
183 os << " status output definitions" << std::endl << std::endl;
184 os << " iter - Number of iterates (steps taken)" << std::endl;
185 os << " value - Objective function value" << std::endl;
186 os << " gnorm - Norm of the gradient" << std::endl;
187 os << " snorm - Norm of the step (update to optimization vector)" << std::endl;
188 os << " alpha - Line search step length" << std::endl;
189 os << " lambda - Spectral step length" << std::endl;
190 os << " #fval - Cumulative number of times the objective function was evaluated" << std::endl;
191 os << " #grad - Cumulative number of times the gradient was computed" << std::endl;
192 os << " #proj - Cumulative number of times the projection was computed" << std::endl;
193 os << std::string(109,'-') << std::endl;
194 }
195
196 os << " ";
197 os << std::setw(6) << std::left << "iter";
198 os << std::setw(15) << std::left << "value";
199 os << std::setw(15) << std::left << "gnorm";
200 os << std::setw(15) << std::left << "snorm";
201 os << std::setw(15) << std::left << "alpha";
202 os << std::setw(15) << std::left << "lambda";
203 os << std::setw(10) << std::left << "#fval";
204 os << std::setw(10) << std::left << "#grad";
205 os << std::setw(10) << std::left << "#proj";
206 os << std::endl;
207 os.flags(osFlags);
208}
209
210template<typename Real>
211void SpectralGradientAlgorithm<Real>::writeName( std::ostream& os ) const {
212 std::ios_base::fmtflags osFlags(os.flags());
213 os << std::endl << "Projected Spectral Gradient Method (Type B, Bound Constraints)" << std::endl;
214 os.flags(osFlags);
215}
216
217template<typename Real>
218void SpectralGradientAlgorithm<Real>::writeOutput( std::ostream& os, bool write_header ) const {
219 std::ios_base::fmtflags osFlags(os.flags());
220 os << std::scientific << std::setprecision(6);
221 if ( state_->iter == 0 ) writeName(os);
222 if ( write_header ) writeHeader(os);
223 if ( state_->iter == 0 ) {
224 os << " ";
225 os << std::setw(6) << std::left << state_->iter;
226 os << std::setw(15) << std::left << state_->value;
227 os << std::setw(15) << std::left << state_->gnorm;
228 os << std::setw(15) << std::left << "---";
229 os << std::setw(15) << std::left << "---";
230 os << std::setw(15) << std::left << lambda_;
231 os << std::setw(10) << std::left << state_->nfval;
232 os << std::setw(10) << std::left << state_->ngrad;
233 os << std::setw(10) << std::left << state_->nproj;
234 os << std::endl;
235 }
236 else {
237 os << " ";
238 os << std::setw(6) << std::left << state_->iter;
239 os << std::setw(15) << std::left << state_->value;
240 os << std::setw(15) << std::left << state_->gnorm;
241 os << std::setw(15) << std::left << state_->snorm;
242 os << std::setw(15) << std::left << state_->searchSize;
243 os << std::setw(15) << std::left << lambda_;
244 os << std::setw(10) << std::left << state_->nfval;
245 os << std::setw(10) << std::left << state_->ngrad;
246 os << std::setw(10) << std::left << state_->nproj;
247 os << std::endl;
248 }
249 os.flags(osFlags);
250}
251
252} // namespace TypeB
253} // namespace ROL
254
255#endif
Objective_SerialSimOpt(const Ptr< Obj > &obj, const V &ui) z0 zero)()
virtual void initialize(const Vector< Real > &x)
Initialize temporary variables.
Provides the interface to apply upper and lower bound constraints.
Provides the interface to evaluate objective functions.
virtual void gradient(Vector< Real > &g, const Vector< Real > &x, Real &tol)
Compute gradient.
virtual Real value(const Vector< Real > &x, Real &tol)=0
Compute value.
virtual void update(const Vector< Real > &x, UpdateType type, int iter=-1)
Update objective function.
Provides an interface to check status of optimization algorithms.
Ptr< PolyhedralProjection< Real > > proj_
void initialize(const Vector< Real > &x, const Vector< Real > &g)
virtual void writeExitStatus(std::ostream &os) const
const Ptr< AlgorithmState< Real > > state_
const Ptr< CombinedStatusTest< Real > > status_
void writeHeader(std::ostream &os) const override
Print iterate header.
void initialize(Vector< Real > &x, const Vector< Real > &g, Objective< Real > &obj, BoundConstraint< Real > &bnd, std::ostream &outStream=std::cout)
void writeName(std::ostream &os) const override
Print step name.
void writeOutput(std::ostream &os, const bool write_header=false) const override
Print iterate status.
void run(Vector< Real > &x, const Vector< Real > &g, Objective< Real > &obj, BoundConstraint< Real > &bnd, std::ostream &outStream=std::cout) override
Run algorithm on bound constrained problems (Type-B). This general interface supports the use of dual...
Defines the linear algebra or vector space interface.
virtual void set(const Vector &x)
Set where .
virtual ROL::Ptr< Vector > clone() const =0
Clone to make a new (uninitialized) vector.
Real ROL_EPSILON(void)
Platform-dependent machine epsilon.
Definition ROL_Types.hpp:57
Real ROL_INF(void)
Definition ROL_Types.hpp:71