ROL
ROL_TypeB_MoreauYosidaAlgorithm_Def.hpp
Go to the documentation of this file.
1// @HEADER
2// *****************************************************************************
3// Rapid Optimization Library (ROL) Package
4//
5// Copyright 2014 NTESS and the ROL contributors.
6// SPDX-License-Identifier: BSD-3-Clause
7// *****************************************************************************
8// @HEADER
9
10#ifndef ROL_TYPEB_MOREAUYOSIDAALGORITHM_DEF_HPP
11#define ROL_TYPEB_MOREAUYOSIDAALGORITHM_DEF_HPP
12
14
15namespace ROL {
16namespace TypeB {
17
18template<typename Real>
20 : TypeB::Algorithm<Real>::Algorithm(), secant_(secant),
21 tau_(10), print_(false), list_(list), subproblemIter_(0) {
22 // Set status test
23 status_->reset();
24 status_->add(makePtr<StatusTest<Real>>(list));
25
26 // Parse parameters
27 Real ten(10), oem6(1.e-6), oem8(1.e-8), oe8(1e8);
28 ParameterList& steplist = list.sublist("Step").sublist("Moreau-Yosida Penalty");
29 state_->searchSize = steplist.get("Initial Penalty Parameter", ten);
30 maxPenalty_ = steplist.get("Maximum Penalty Parameter", oe8);
31 tau_ = steplist.get("Penalty Parameter Growth Factor", ten);
32 updatePenalty_ = steplist.get("Update Penalty", true);
33 updateMultiplier_ = steplist.get("Update Multiplier", true);
34 print_ = steplist.sublist("Subproblem").get("Print History", false);
35 // Set parameters for step subproblem
36 Real gtol = steplist.sublist("Subproblem").get("Optimality Tolerance", oem8);
37 Real ctol = steplist.sublist("Subproblem").get("Feasibility Tolerance", oem8);
38 int maxit = steplist.sublist("Subproblem").get("Iteration Limit", 1000);
39 bool reltol = steplist.sublist("Subproblem").get("Use Relative Tolerances", true);
40 Real stol = oem6*std::min(gtol,ctol);
41 list_.sublist("Status Test").set("Gradient Tolerance", gtol);
42 list_.sublist("Status Test").set("Constraint Tolerance", ctol);
43 list_.sublist("Status Test").set("Step Tolerance", stol);
44 list_.sublist("Status Test").set("Iteration Limit", maxit);
45 list_.sublist("Status Test").set("Use Relative Tolerances", reltol);
46 // Get step name from parameterlist
47 stepname_ = steplist.sublist("Subproblem").get("Step Type","Trust Region");
48 list_.sublist("Step").set("Type",stepname_);
49
50 // Output settings
51 verbosity_ = list.sublist("General").get("Output Level", 0);
53 print_ = (verbosity_ > 2 ? true : print_);
54 list_.sublist("General").set("Output Level",(print_ ? verbosity_ : 0));
55}
56
57template<typename Real>
59 const Vector<Real> &g,
62 Vector<Real> &pwa,
63 std::ostream &outStream) {
64 hasEcon_ = true;
65 if (proj_ == nullPtr) {
66 proj_ = makePtr<PolyhedralProjection<Real>>(makePtrFromRef(bnd));
67 hasEcon_ = false;
68 }
69 // Initialize data
71 // Initialize the algorithm state
72 state_->nfval = 0;
73 state_->ngrad = 0;
74 updateState(x,myobj,bnd,pwa,outStream);
75}
76
77
78template<typename Real>
82 Vector<Real> &pwa,
83 std::ostream &outStream) {
84 const Real one(1);
85 Real zerotol = std::sqrt(ROL_EPSILON<Real>());
86 // Update objective and constraint.
87 if (state_->iter == 0) {
88 myobj.update(x,UpdateType::Initial,state_->iter);
89 }
90 //else {
91 // myobj.update(x,UpdateType::Accept,state_->iter);
92 //}
93 // Compute norm of the gradient of the Lagrangian
94 state_->value = myobj.getObjectiveValue(x, zerotol);
95 myobj.getObjectiveGradient(*state_->gradientVec, x, zerotol);
96 //myobj.gradient(*state_->gradientVec, x, zerotol);
97 //gnorm_ = state_->gradientVec->norm();
98 pwa.set(x);
99 pwa.axpy(-one,state_->gradientVec->dual());
100 proj_->project(pwa,outStream);
101 pwa.axpy(-one,x);
102 gnorm_ = pwa.norm();
103 // Compute constraint violation
105 state_->gnorm = std::max(gnorm_,compViolation_);
106 // Update state
107 state_->nfval++;
108 state_->ngrad++;
109}
110
111template<typename Real>
113 const Vector<Real> &g,
114 Objective<Real> &obj,
116 std::ostream &outStream ) {
117 const Real one(1);
118 Ptr<Vector<Real>> pwa = x.clone();
119 // Initialize Moreau-Yosida data
120 MoreauYosidaObjective<Real> myobj(makePtrFromRef(obj),makePtrFromRef(bnd),
121 x,g,state_->searchSize,updateMultiplier_,
123 initialize(x,g,myobj,bnd,*pwa,outStream);
124 Ptr<TypeU::Algorithm<Real>> algo;
125
126 // Output
127 if (verbosity_ > 0) writeOutput(outStream,true);
128
129 while (status_->check(*state_)) {
130 // Solve augmented Lagrangian subproblem
132 if (hasEcon_) algo->run(x,g,myobj,*proj_->getLinearConstraint(),
133 *proj_->getMultiplier(),*proj_->getResidual(),
134 outStream);
135 else algo->run(x,g,myobj,outStream);
136 subproblemIter_ = algo->getState()->iter;
137
138 // Compute step
139 state_->stepVec->set(x);
140 state_->stepVec->axpy(-one,*state_->iterateVec);
141 state_->snorm = state_->stepVec->norm();
142
143 // Update iterate and Lagrange multiplier
144 state_->iterateVec->set(x);
145
146 // Update objective and constraint
147 state_->iter++;
148
149 // Update state
150 updateState(x,myobj,bnd,*pwa,outStream);
151
152 // Update multipliers
153 if (updatePenalty_) {
154 state_->searchSize *= tau_;
155 state_->searchSize = std::min(state_->searchSize,maxPenalty_);
156 }
157 myobj.updateMultipliers(state_->searchSize,x);
158
159 state_->nfval += myobj.getNumberFunctionEvaluations() + algo->getState()->nfval;
160 state_->ngrad += myobj.getNumberGradientEvaluations() + algo->getState()->ngrad;
161
162 // Update Output
163 if (verbosity_ > 0) writeOutput(outStream,writeHeader_);
164 }
166}
167
168template<typename Real>
169void MoreauYosidaAlgorithm<Real>::writeHeader( std::ostream& os ) const {
170 std::ios_base::fmtflags osFlags(os.flags());
171 if (verbosity_ > 1) {
172 os << std::string(109,'-') << std::endl;
173 os << "Moreau-Yosida Penalty Solver";
174 os << " status output definitions" << std::endl << std::endl;
175 os << " iter - Number of iterates (steps taken)" << std::endl;
176 os << " fval - Objective function value" << std::endl;
177 os << " gnorm - Norm of the gradient" << std::endl;
178 os << " ifeas - Infeasibility metric" << std::endl;
179 os << " snorm - Norm of the step (update to optimization vector)" << std::endl;
180 os << " penalty - Penalty parameter for bound constraints" << std::endl;
181 os << " #fval - Cumulative number of times the objective function was evaluated" << std::endl;
182 os << " #grad - Cumulative number of times the gradient was computed" << std::endl;
183 os << " subiter - Number of subproblem iterations" << std::endl;
184 os << std::string(109,'-') << std::endl;
185 }
186
187 os << " ";
188 os << std::setw(6) << std::left << "iter";
189 os << std::setw(15) << std::left << "fval";
190 os << std::setw(15) << std::left << "gnorm";
191 os << std::setw(15) << std::left << "ifeas";
192 os << std::setw(15) << std::left << "snorm";
193 os << std::setw(10) << std::left << "penalty";
194 os << std::setw(8) << std::left << "#fval";
195 os << std::setw(8) << std::left << "#grad";
196 os << std::setw(8) << std::left << "subIter";
197 os << std::endl;
198 os.flags(osFlags);
199}
200
201template<typename Real>
202void MoreauYosidaAlgorithm<Real>::writeName( std::ostream& os ) const {
203 std::ios_base::fmtflags osFlags(os.flags());
204 os << std::endl << " Moreau-Yosida Penalty Solver";
205 os << std::endl;
206 os.flags(osFlags);
207}
208
209template<typename Real>
210void MoreauYosidaAlgorithm<Real>::writeOutput( std::ostream& os, bool write_header ) const {
211 std::ios_base::fmtflags osFlags(os.flags());
212 os << std::scientific << std::setprecision(6);
213 if ( state_->iter == 0 ) writeName(os);
214 if ( write_header ) writeHeader(os);
215 if ( state_->iter == 0 ) {
216 os << " ";
217 os << std::setw(6) << std::left << state_->iter;
218 os << std::setw(15) << std::left << state_->value;
219 os << std::setw(15) << std::left << gnorm_;
220 os << std::setw(15) << std::left << compViolation_;
221 os << std::setw(15) << std::left << "---";
222 os << std::scientific << std::setprecision(2);
223 os << std::setw(10) << std::left << state_->searchSize;
224 os << std::scientific << std::setprecision(6);
225 os << std::setw(8) << std::left << state_->nfval;
226 os << std::setw(8) << std::left << state_->ngrad;
227 os << std::setw(8) << std::left << "---";
228 os << std::endl;
229 }
230 else {
231 os << " ";
232 os << std::setw(6) << std::left << state_->iter;
233 os << std::setw(15) << std::left << state_->value;
234 os << std::setw(15) << std::left << gnorm_;
235 os << std::setw(15) << std::left << compViolation_;
236 os << std::setw(15) << std::left << state_->snorm;
237 os << std::scientific << std::setprecision(2);
238 os << std::setw(10) << std::left << state_->searchSize;
239 os << std::scientific << std::setprecision(6);
240 os << std::setw(8) << std::left << state_->nfval;
241 os << std::setw(8) << std::left << state_->ngrad;
242 os << std::setw(8) << std::left << subproblemIter_;
243 os << std::endl;
244 }
245 os.flags(osFlags);
246}
247
248} // namespace TypeB
249} // namespace ROL
250
251#endif
virtual void initialize(const Vector< Real > &x)
Initialize temporary variables.
Provides the interface to apply upper and lower bound constraints.
Provides the interface to evaluate the Moreau-Yosida penalty function.
void getObjectiveGradient(Vector< Real > &g, const Vector< Real > &x, Real &tol)
Real getObjectiveValue(const Vector< Real > &x, Real &tol)
Real testComplementarity(const Vector< Real > &x)
void update(const Vector< Real > &x, UpdateType type, int iter=-1)
Update Moreau-Yosida penalty function.
void updateMultipliers(Real mu, const Vector< Real > &x)
Provides the interface to evaluate objective functions.
Provides interface for and implements limited-memory secant operators.
Provides an interface to check status of optimization algorithms.
Ptr< PolyhedralProjection< Real > > proj_
void initialize(const Vector< Real > &x, const Vector< Real > &g)
Algorithm()
Constructor, given a step and a status test.
virtual void writeExitStatus(std::ostream &os) const
const Ptr< AlgorithmState< Real > > state_
const Ptr< CombinedStatusTest< Real > > status_
void updateState(const Vector< Real > &x, MoreauYosidaObjective< Real > &myobj, BoundConstraint< Real > &bnd, Vector< Real > &pwa, std::ostream &outStream=std::cout)
void run(Vector< Real > &x, const Vector< Real > &g, Objective< Real > &obj, BoundConstraint< Real > &bnd, std::ostream &outStream=std::cout) override
Run algorithm on bound constrained problems (Type-B). This general interface supports the use of dual...
MoreauYosidaAlgorithm(ParameterList &list, const Ptr< Secant< Real > > &secant=nullPtr)
void writeName(std::ostream &os) const override
Print step name.
void initialize(Vector< Real > &x, const Vector< Real > &g, MoreauYosidaObjective< Real > &myobj, BoundConstraint< Real > &bnd, Vector< Real > &pwa, std::ostream &outStream=std::cout)
void writeOutput(std::ostream &os, const bool write_header=false) const override
Print iterate status.
void writeHeader(std::ostream &os) const override
Print iterate header.
Defines the linear algebra or vector space interface.
virtual Real norm() const =0
Returns where .
virtual void set(const Vector &x)
Set where .
virtual ROL::Ptr< Vector > clone() const =0
Clone to make a new (uninitialized) vector.
virtual void axpy(const Real alpha, const Vector &x)
Compute where .
Ptr< Algorithm< Real > > AlgorithmFactory(ParameterList &parlist, const Ptr< Secant< Real > > &secant=nullPtr)
Real ROL_EPSILON(void)
Platform-dependent machine epsilon.
Definition ROL_Types.hpp:57