G+Smo  25.01.0
Geometry + Simulation Modules
 
Loading...
Searching...
No Matches
gsIpOpt.hpp
Go to the documentation of this file.
1
14#ifdef gsIpOpt_ENABLED
15#include "IpTNLP.hpp"
16#include "IpIpoptApplication.hpp"
17#include "IpSolveStatistics.hpp"
18#endif
19
20#include <gsIO/gsFileManager.h>
21
22namespace gismo
23{
24
25template <class T>
26class gsIpOptPrivate
27{
28public:
29#ifdef gsIpOpt_ENABLED
30 // Pointer to IpOpt interface
31 Ipopt::SmartPtr<gsIpOptTNLP<T>> tnlp;
32#endif
33
34};
35
36#ifdef gsIpOpt_ENABLED
40template<typename T>
41class gsIpOptTNLP : public Ipopt::TNLP
42{
43 typedef Ipopt::Index Index;
44 typedef Ipopt::Number Number;
45 typedef Ipopt::SolverReturn SolverReturn;
46 typedef Ipopt::IpoptData IpoptData;
47 typedef Ipopt::IpoptCalculatedQuantities IpoptCalculatedQuantities;
48
49 public:
50 gsIpOptTNLP(gsOptProblem<T> * op)
51 :
52 m_op(op)
53 {
54 m_curDesign.resize(m_op->numDesignVars(),1);
55 m_curDesign.setZero();
56 }
57
58 void setCurrentDesign(const gsMatrix<T> & currentDesign)
59 {
60 m_curDesign = currentDesign;
61 }
62
66 bool intermediateCallback() { return true;}
67
68 const gsMatrix<T> & currentDesign() {return m_curDesign; }
69 const gsMatrix<T> & lambda() {return m_lambda; }
70
71 public:
75 bool get_nlp_info(Index& n, Index& m, Index& nnz_jac_g,
76 Index& nnz_h_lag, IndexStyleEnum& index_style)
77 {
78 // gsDebug<<"Getting get_nlp_info.\n";
79
80 n = m_op->numDesignVars();
81 m = m_op->numConstraints();
82
83 // Nonzeros in the constaint jacobian
84 nnz_jac_g = m_op->numConJacNonZero();
85
86 // hessian of the lagrangian not supported yet
87 nnz_h_lag = 0;
88
89 // index style for row/col entries:
90 // C_STYLE: 0-based, FORTRAN_STYLE: 1-based
91 index_style = C_STYLE;
92
93 return true;
94 }
95
97 bool get_bounds_info(Index n, Number* x_l, Number* x_u,
98 Index m, Number* g_l, Number* g_u)
99 {
100 //gsDebug<<"Getting get_bounds_info.\n";
101
102 // to do: { memcpy(target, start, numVals); }
103
104 copy_n( m_op->desLowerBounds().data(), n, x_l );
105 copy_n( m_op->desUpperBounds().data(), n, x_u );
106
107 copy_n( m_op->conLowerBounds().data(), m, g_l );
108 copy_n( m_op->conUpperBounds().data(), m, g_u );
109
110 return true;
111 }
112
114 bool get_starting_point(Index n, bool init_x, Number* x,
115 bool init_z, Number* z_L, Number* z_U,
116 Index m, bool init_lambda,
117 Number* lambda)
118 {
119 //gsDebug<<"Getting get_starting_point.\n";
120
121 // Here, we assume we only have starting values for the design variables
122 copy_n( m_curDesign.data(), n, x );
123 return true;
124 }
125
126
128 bool eval_f(Index n, const Number* x, bool new_x, Number& obj_value)
129 {
130 //gsDebug<<"Getting eval_f.\n";
131
132 gsAsConstVector<T> xx(x, n);
133 obj_value = m_op->evalObj( xx );
134 return true;
135 }
136
138 bool eval_grad_f(Index n, const Number* x, bool new_x, Number* grad_f)
139 {
140 //gsDebug<<"Getting eval_grad_f.\n";
141
142 gsAsConstVector<T> xx(x , n);
143 gsAsVector<T> result (grad_f, n);
144 m_op->gradObj_into(xx, result);
145 return true;
146 }
147
149 bool eval_g(Index n, const Number* x, bool new_x, Index m, Number* g)
150 {
151 //gsDebug<<"Getting eval_g.\n";
152
153 gsAsConstVector<T> xx(x, n);
154 gsAsVector<T> result(g, m);
155 m_op->evalCon_into(xx, result);
156 return true;
157 }
158
163 bool eval_jac_g(Index n, const Number* x, bool new_x,
164 Index m, Index nele_jac, Index* iRow, Index *jCol,
165 Number* values)
166 {
167 if (values == NULL)
168 {
169 // pass the structure of the jacobian of the constraints
170 copy_n( m_op->conJacRows().data(), nele_jac, iRow );
171 copy_n( m_op->conJacCols().data(), nele_jac, jCol );
172 }
173 else
174 {
175 gsAsConstVector<T> xx(x , n );
176 gsAsVector<T> result(values, nele_jac);
177 m_op->jacobCon_into(xx, result);
178 }
179
180 return true;
181 }
182
183
188 bool eval_h(Index n, const Number* x, bool new_x,
189 Number obj_factor, Index m, const Number* lambda,
190 bool new_lambda, Index nele_hess, Index* iRow,
191 Index* jCol, Number* values)
192 {
193 GISMO_ERROR("IpOpt Hessian option not supported yet!");
194 }
195
197
198
199
216 virtual bool intermediate_callback(Ipopt::AlgorithmMode mode,
217 Index iter, Number obj_value,
218 Number inf_pr, Number inf_du,
219 Number mu, Number d_norm,
220 Number regularization_size,
221 Number alpha_du, Number alpha_pr,
222 Index ls_trials,
223 const IpoptData* ip_data,
224 IpoptCalculatedQuantities* ip_cq)
225 {
226 /*
227 Ipopt::SmartPtr< const Ipopt::Vector > curr = ip_data->curr()->x();
228 Ipopt::SmartPtr< Ipopt::DenseVector > dv = MakeNewDenseVector ();
229 curr->Copy(dv);
230 m_op->m_curDesign = gsAsConstVector<T>(dx->Values(),m_op->m_curDesign.rows());
231 */
232 // gsInfo << "\n === intermediateCallback is called === \n\n";
233 return this->intermediateCallback();
234
235 //SmartPtr< const IteratesVector > trial = ip_data->trial();
236 //int it = ip_data->iter_count();
237 //Number tol = ip_data->tol();
238 }
239
240
244 void finalize_solution(SolverReturn status,
245 Index n, const Number* x, const Number* z_L, const Number* z_U,
246 Index m, const Number* g, const Number* lambda,
247 Number obj_value,
248 const IpoptData* ip_data,
249 IpoptCalculatedQuantities* ip_cq)
250 {
251 m_curDesign = gsAsConstVector<T>(x,n);
252 m_lambda = gsAsConstVector<T>(lambda,m);
253 }
254
256 private:
257 gsOptProblem<T> * m_op;
258 gsMatrix<T> m_curDesign;
259 gsMatrix<T> m_lambda;
260
261 private:
262 gsIpOptTNLP(const gsIpOptTNLP & );
263 gsIpOptTNLP& operator=(const gsIpOptTNLP & );
264};
265#endif
266
267template <typename T>
269{
270 delete m_data;
271}
272
273template <typename T>
275:
276Base(problem)
277{
278 this->defaultOptions();
279
280 #ifdef gsIpOpt_ENABLED
281 m_data = new gsIpOptPrivate<T>();
282 m_data->tnlp = new gsIpOptTNLP<T>(m_op);
283 #else
284 m_data = NULL;
285 #endif
286}
287
288template <typename T>
289void gsIpOpt<T>::solve(const gsMatrix<T> & initialGuess)
290{
291#ifdef gsIpOpt_ENABLED
292
293 Ipopt::SmartPtr<Ipopt::IpoptApplication> app = IpoptApplicationFactory();
294 app->RethrowNonIpoptException(true);
295
296 Ipopt::ApplicationReturnStatus status;
297 std::string path = gsFileManager::findInDataDir( "options/ipopt.opt" );
298 status = app->Initialize( path );
299
300 if (status != Ipopt::Solve_Succeeded)
301 {
302 gsWarn << "\n\n*** Error during initialization!\n";
303 return;
304 }
305
306 gsIpOptTNLP<T> * tmp = dynamic_cast<gsIpOptTNLP<T> * >(Ipopt::GetRawPtr(m_data->tnlp));
307 tmp->setCurrentDesign(initialGuess);
308 status = app->OptimizeTNLP(m_data->tnlp);
309 //if (status != Ipopt::Solve_Succeeded)
310 // gsInfo << "Optimization did not succeed.\n";
311
312 // Retrieve some statistics about the solve
313 m_numIterations = app->Statistics()->IterationCount();
314 m_finalObjective = app->Statistics()->FinalObjective();
315 m_curDesign = tmp->currentDesign();
316 m_lambda = tmp->lambda();
317 //gsInfo << "\n*** The problem solved in " << numIterations << " iterations!\n";
318 //gsInfo << "*** The final value of the objective function is " << finalObjective <<".\n";
319
320#else
321
323#endif
324
325}
326
327
328} // end namespace gismo
static std::string findInDataDir(std::string fn)
Find a file in GISMO_DATA_DIR.
Definition gsFileManager.cpp:311
Class defining an optimization problem.
Definition gsIpOpt.h:31
gsIpOpt(gsOptProblem< T > *problem)
Definition gsIpOpt.hpp:274
virtual ~gsIpOpt()
Definition gsIpOpt.hpp:268
A matrix with arbitrary coefficient type and fixed or dynamic size.
Definition gsMatrix.h:41
Class defining an optimization problem.
Definition gsOptProblem.h:25
Class defining an optimizer.
Definition gsOptimizer.h:28
#define GISMO_NO_IMPLEMENTATION
Definition gsDebug.h:129
#define GISMO_ERROR(message)
Definition gsDebug.h:118
#define gsWarn
Definition gsDebug.h:50
Utility class for finding files and handling paths.
The G+Smo namespace, containing all definitions for the library.
void copy_n(T begin, const size_t n, U *result)
Small wrapper for std::copy mimicking memcpy (or std::copy_n) for a raw pointer destination,...
Definition gsMemory.h:368