ROL
ROL_TypeG_MoreauYosidaAlgorithm_Def.hpp
Go to the documentation of this file.
1// @HEADER
2// *****************************************************************************
3// Rapid Optimization Library (ROL) Package
4//
5// Copyright 2014 NTESS and the ROL contributors.
6// SPDX-License-Identifier: BSD-3-Clause
7// *****************************************************************************
8// @HEADER
9
10#ifndef ROL_TYPEG_MOREAUYOSIDAALGORITHM_DEF_H
11#define ROL_TYPEG_MOREAUYOSIDAALGORITHM_DEF_H
12
14
15namespace ROL {
16namespace TypeG {
17
18template<typename Real>
20 : TypeG::Algorithm<Real>::Algorithm(), secant_(secant),
21 tau_(10), print_(false), list_(list), subproblemIter_(0) {
22 // Set status test
23 status_->reset();
24 status_->add(makePtr<ConstraintStatusTest<Real>>(list));
25
26 // Parse parameters
27 Real ten(10), oem6(1.e-6), oem8(1.e-8), oe8(1e8);
28 ParameterList& steplist = list.sublist("Step").sublist("Moreau-Yosida Penalty");
29 state_->searchSize = steplist.get("Initial Penalty Parameter", ten);
30 maxPenalty_ = steplist.get("Maximum Penalty Parameter", oe8);
31 tau_ = steplist.get("Penalty Parameter Growth Factor", ten);
32 updatePenalty_ = steplist.get("Update Penalty", true);
33 updateMultiplier_ = steplist.get("Update Multiplier", true);
34 print_ = steplist.sublist("Subproblem").get("Print History", false);
35 // Set parameters for step subproblem
36 Real gtol = steplist.sublist("Subproblem").get("Optimality Tolerance", oem8);
37 Real ctol = steplist.sublist("Subproblem").get("Feasibility Tolerance", oem8);
38 int maxit = steplist.sublist("Subproblem").get("Iteration Limit", 1000);
39 bool reltol = steplist.sublist("Subproblem").get("Use Relative Tolerances", true);
40 Real stol = oem6*std::min(gtol,ctol);
41 list_.sublist("Status Test").set("Gradient Tolerance", gtol);
42 list_.sublist("Status Test").set("Constraint Tolerance", ctol);
43 list_.sublist("Status Test").set("Step Tolerance", stol);
44 list_.sublist("Status Test").set("Iteration Limit", maxit);
45 list_.sublist("Status Test").set("Use Relative Tolerances", reltol);
46 // Get step name from parameterlist
47 stepname_ = steplist.sublist("Subproblem").get("Step Type","Augmented Lagrangian");
48 list_.sublist("Step").set("Type",stepname_);
49
50 // Output settings
51 verbosity_ = list.sublist("General").get("Output Level", 0);
53 print_ = (verbosity_ > 2 ? true : print_);
54 list_.sublist("General").set("Output Level",(print_ ? verbosity_ : 0));
55}
56
57template<typename Real>
59 const Vector<Real> &g,
60 const Vector<Real> &l,
61 const Vector<Real> &c,
65 Vector<Real> &pwa,
66 Vector<Real> &dwa,
67 std::ostream &outStream) {
68 hasPolyProj_ = true;
69 if (proj_ == nullPtr) {
70 proj_ = makePtr<PolyhedralProjection<Real>>(makePtrFromRef(bnd));
71 hasPolyProj_ = false;
72 }
73 proj_->project(x,outStream);
74 // Initialize data
76 // Initialize the algorithm state
77 state_->nfval = 0;
78 state_->ngrad = 0;
79 state_->ncval = 0;
80 updateState(x,l,myobj,bnd,con,pwa,dwa,outStream);
81}
82
83
84template<typename Real>
86 const Vector<Real> &l,
90 Vector<Real> &pwa,
91 Vector<Real> &dwa,
92 std::ostream &outStream) {
93 const Real one(1);
94 Real zerotol = std::sqrt(ROL_EPSILON<Real>());
95 // Update objective and constraint
96 if (state_->iter == 0) {
97 myobj.update(x,UpdateType::Initial,state_->iter);
99 }
100 //else {
101 // myobj.update(x,UpdateType::Accept,state_->iter);
102 // con.update(x,UpdateType::Accept,state_->iter);
103 //}
104 // Compute norm of the gradient of the Lagrangian
105 state_->value = myobj.getObjectiveValue(x, zerotol);
106 myobj.getObjectiveGradient(*state_->gradientVec, x, zerotol);
107 //myobj.gradient(*state_->gradientVec, x, zerotol);
108 con.applyAdjointJacobian(dwa, l, x, zerotol);
109 state_->gradientVec->plus(dwa);
110 //gnorm_ = state_->gradientVec->norm();
111 pwa.set(x);
112 pwa.axpy(-one,state_->gradientVec->dual());
113 proj_->project(pwa,outStream);
114 pwa.axpy(-one,x);
115 gnorm_ = pwa.norm();
116 // Compute constraint violation
117 con.value(*state_->constraintVec, x, zerotol);
118 state_->cnorm = state_->constraintVec->norm();
120 state_->gnorm = std::max(gnorm_,compViolation_);
121 // Update state
122 state_->nfval++;
123 state_->ngrad++;
124 state_->ncval++;
125}
126
127template<typename Real>
129 const Vector<Real> &g,
130 Objective<Real> &obj,
132 Constraint<Real> &econ,
133 Vector<Real> &emul,
134 const Vector<Real> &eres,
135 std::ostream &outStream ) {
136 const Real one(1);
137 Ptr<Vector<Real>> pwa = x.clone(), dwa = g.clone();
138 // Initialize Moreau-Yosida data
139 MoreauYosidaObjective<Real> myobj(makePtrFromRef(obj),makePtrFromRef(bnd),
140 x,g,state_->searchSize,updateMultiplier_,
142 initialize(x,g,emul,eres,myobj,bnd,econ,*pwa,*dwa,outStream);
143 Ptr<TypeE::Algorithm<Real>> algo;
144
145 // Output
146 if (verbosity_ > 0) writeOutput(outStream,true);
147
148 while (status_->check(*state_)) {
149 // Solve augmented Lagrangian subproblem
151 emul.zero();
152 if (hasPolyProj_) algo->run(x,g,myobj,econ,emul,eres,
153 *proj_->getLinearConstraint(),
154 *proj_->getMultiplier(),
155 *proj_->getResidual(),outStream);
156 else algo->run(x,g,myobj,econ,emul,eres,outStream);
157 subproblemIter_ = algo->getState()->iter;
158 state_->nfval += algo->getState()->nfval;
159 state_->ngrad += algo->getState()->ngrad;
160 state_->ncval += algo->getState()->ncval;
161
162 // Compute step
163 state_->stepVec->set(x);
164 state_->stepVec->axpy(-one,*state_->iterateVec);
165 state_->snorm = state_->stepVec->norm();
166 state_->lagmultVec->axpy(-one,emul);
167 state_->snorm += state_->lagmultVec->norm();
168
169 // Update iterate and Lagrange multiplier
170 state_->iterateVec->set(x);
171 state_->lagmultVec->set(emul);
172
173 // Update objective and constraint
174 state_->iter++;
175
176 // Update state
177 updateState(x,emul,myobj,bnd,econ,*pwa,*dwa);
178
179 // Update multipliers
180 if (updatePenalty_)
181 state_->searchSize = std::min(tau_*state_->searchSize,maxPenalty_);
182 myobj.updateMultipliers(state_->searchSize,x);
183
184 // Update Output
185 if (verbosity_ > 0) writeOutput(outStream,printHeader_);
186 }
188}
189
190template<typename Real>
191void MoreauYosidaAlgorithm<Real>::writeHeader( std::ostream& os ) const {
192 std::ios_base::fmtflags osFlags(os.flags());
193 if (verbosity_ > 1) {
194 os << std::string(109,'-') << std::endl;
195 os << "Moreau-Yosida Penalty Solver";
196 os << " status output definitions" << std::endl << std::endl;
197 os << " iter - Number of iterates (steps taken)" << std::endl;
198 os << " fval - Objective function value" << std::endl;
199 os << " cnorm - Norm of the constraint" << std::endl;
200 os << " gLnorm - Norm of the gradient of the Lagrangian" << std::endl;
201 os << " ifeas - Infeasibility metric" << std::endl;
202 os << " snorm - Norm of the step (update to optimization vector)" << std::endl;
203 os << " penalty - Penalty parameter for bound constraints" << std::endl;
204 os << " #fval - Cumulative number of times the objective function was evaluated" << std::endl;
205 os << " #grad - Cumulative number of times the gradient was computed" << std::endl;
206 os << " #cval - Cumulative number of times the constraint was evaluated" << std::endl;
207 os << " subiter - Number of subproblem iterations" << std::endl;
208 os << std::string(109,'-') << std::endl;
209 }
210
211 os << " ";
212 os << std::setw(6) << std::left << "iter";
213 os << std::setw(15) << std::left << "fval";
214 os << std::setw(15) << std::left << "cnorm";
215 os << std::setw(15) << std::left << "gLnorm";
216 os << std::setw(15) << std::left << "ifeas";
217 os << std::setw(15) << std::left << "snorm";
218 os << std::setw(10) << std::left << "penalty";
219 os << std::setw(8) << std::left << "#fval";
220 os << std::setw(8) << std::left << "#grad";
221 os << std::setw(8) << std::left << "#cval";
222 os << std::setw(8) << std::left << "subIter";
223 os << std::endl;
224 os.flags(osFlags);
225}
226
227template<typename Real>
228void MoreauYosidaAlgorithm<Real>::writeName( std::ostream& os ) const {
229 std::ios_base::fmtflags osFlags(os.flags());
230 os << std::endl << "Moreau-Yosida Penalty Solver (Type G, General Constraints)";
231 os << std::endl;
232 os << "Subproblem Solver: " << stepname_ << std::endl;
233 os.flags(osFlags);
234}
235
236template<typename Real>
237void MoreauYosidaAlgorithm<Real>::writeOutput( std::ostream& os, const bool print_header ) const {
238 std::ios_base::fmtflags osFlags(os.flags());
239 os << std::scientific << std::setprecision(6);
240 if ( state_->iter == 0 ) writeName(os);
241 if ( print_header ) writeHeader(os);
242 if ( state_->iter == 0 ) {
243 os << " ";
244 os << std::setw(6) << std::left << state_->iter;
245 os << std::setw(15) << std::left << state_->value;
246 os << std::setw(15) << std::left << state_->cnorm;
247 os << std::setw(15) << std::left << gnorm_;
248 os << std::setw(15) << std::left << compViolation_;
249 os << std::setw(15) << std::left << "---";
250 os << std::scientific << std::setprecision(2);
251 os << std::setw(10) << std::left << state_->searchSize;
252 os << std::setw(8) << std::left << state_->nfval;
253 os << std::setw(8) << std::left << state_->ngrad;
254 os << std::setw(8) << std::left << state_->ncval;
255 os << std::setw(8) << std::left << "---";
256 os << std::endl;
257 }
258 else {
259 os << " ";
260 os << std::setw(6) << std::left << state_->iter;
261 os << std::setw(15) << std::left << state_->value;
262 os << std::setw(15) << std::left << state_->cnorm;
263 os << std::setw(15) << std::left << gnorm_;
264 os << std::setw(15) << std::left << compViolation_;
265 os << std::setw(15) << std::left << state_->snorm;
266 os << std::scientific << std::setprecision(2);
267 os << std::setw(10) << std::left << state_->searchSize;
268 os << std::scientific << std::setprecision(6);
269 os << std::setw(8) << std::left << state_->nfval;
270 os << std::setw(8) << std::left << state_->ngrad;
271 os << std::setw(8) << std::left << state_->ncval;
272 os << std::setw(8) << std::left << subproblemIter_;
273 os << std::endl;
274 }
275 os.flags(osFlags);
276}
277
278} // namespace TypeG
279} // namespace ROL
280
281#endif
virtual void initialize(const Vector< Real > &x)
Initialize temporary variables.
Provides the interface to apply upper and lower bound constraints.
Provides an interface to check status of optimization algorithms for problems with equality constrain...
Defines the general constraint operator interface.
virtual void value(Vector< Real > &c, const Vector< Real > &x, Real &tol)=0
Evaluate the constraint operator at .
virtual void update(const Vector< Real > &x, UpdateType type, int iter=-1)
Update constraint function.
virtual void applyAdjointJacobian(Vector< Real > &ajv, const Vector< Real > &v, const Vector< Real > &x, Real &tol)
Apply the adjoint of the the constraint Jacobian at , , to vector .
Provides the interface to evaluate the Moreau-Yosida penalty function.
void getObjectiveGradient(Vector< Real > &g, const Vector< Real > &x, Real &tol)
Real getObjectiveValue(const Vector< Real > &x, Real &tol)
Real testComplementarity(const Vector< Real > &x)
void update(const Vector< Real > &x, UpdateType type, int iter=-1)
Update Moreau-Yosida penalty function.
void updateMultipliers(Real mu, const Vector< Real > &x)
Provides the interface to evaluate objective functions.
Provides interface for and implements limited-memory secant operators.
Algorithm()
Constructor, given a step and a status test.
void initialize(const Vector< Real > &x, const Vector< Real > &g, const Vector< Real > &mul, const Vector< Real > &c)
virtual void writeExitStatus(std::ostream &os) const
const Ptr< CombinedStatusTest< Real > > status_
Ptr< PolyhedralProjection< Real > > proj_
const Ptr< AlgorithmState< Real > > state_
void writeName(std::ostream &os) const override
Print step name.
void initialize(Vector< Real > &x, const Vector< Real > &g, const Vector< Real > &l, const Vector< Real > &c, MoreauYosidaObjective< Real > &myobj, BoundConstraint< Real > &bnd, Constraint< Real > &con, Vector< Real > &pwa, Vector< Real > &dwa, std::ostream &outStream=std::cout)
void updateState(const Vector< Real > &x, const Vector< Real > &l, MoreauYosidaObjective< Real > &myobj, BoundConstraint< Real > &bnd, Constraint< Real > &con, Vector< Real > &pwa, Vector< Real > &dwa, std::ostream &outStream=std::cout)
void writeOutput(std::ostream &os, const bool print_header=false) const override
Print iterate status.
void run(Vector< Real > &x, const Vector< Real > &g, Objective< Real > &obj, BoundConstraint< Real > &bnd, Constraint< Real > &econ, Vector< Real > &emul, const Vector< Real > &eres, std::ostream &outStream=std::cout) override
Run algorithm on general constrained problems (Type-G). This is the primary Type-G interface.
void writeHeader(std::ostream &os) const override
Print iterate header.
MoreauYosidaAlgorithm(ParameterList &list, const Ptr< Secant< Real > > &secant=nullPtr)
Defines the linear algebra or vector space interface.
virtual Real norm() const =0
Returns where .
virtual void set(const Vector &x)
Set where .
virtual void zero()
Set to zero vector.
virtual ROL::Ptr< Vector > clone() const =0
Clone to make a new (uninitialized) vector.
virtual void axpy(const Real alpha, const Vector &x)
Compute where .
Ptr< TypeE::Algorithm< Real > > AlgorithmFactory(ParameterList &parlist, const Ptr< Secant< Real > > &secant=nullPtr)
Real ROL_EPSILON(void)
Platform-dependent machine epsilon.
Definition ROL_Types.hpp:57