14 #ifdef gsIpOpt_ENABLED
16 #include "IpIpoptApplication.hpp"
17 #include "IpSolveStatistics.hpp"
29 #ifdef gsIpOpt_ENABLED
31 Ipopt::SmartPtr<gsIpOptTNLP<T>> tnlp;
36 #ifdef gsIpOpt_ENABLED
41 class gsIpOptTNLP :
public Ipopt::TNLP
43 typedef Ipopt::Index Index;
44 typedef Ipopt::Number Number;
45 typedef Ipopt::SolverReturn SolverReturn;
46 typedef Ipopt::IpoptData IpoptData;
47 typedef Ipopt::IpoptCalculatedQuantities IpoptCalculatedQuantities;
50 gsIpOptTNLP(gsOptProblem<T> * op)
54 m_curDesign.resize(m_op->numDesignVars(),1);
55 m_curDesign.setZero();
58 void setCurrentDesign(
const gsMatrix<T> & currentDesign)
60 m_curDesign = currentDesign;
66 bool intermediateCallback() {
return true;}
68 const gsMatrix<T> & currentDesign() {
return m_curDesign; }
69 const gsMatrix<T> & lambda() {
return m_lambda; }
75 bool get_nlp_info(Index& n, Index& m, Index& nnz_jac_g,
76 Index& nnz_h_lag, IndexStyleEnum& index_style)
80 n = m_op->numDesignVars();
81 m = m_op->numConstraints();
84 nnz_jac_g = m_op->numConJacNonZero();
91 index_style = C_STYLE;
97 bool get_bounds_info(Index n, Number* x_l, Number* x_u,
98 Index m, Number* g_l, Number* g_u)
104 copy_n( m_op->desLowerBounds().data(), n, x_l );
105 copy_n( m_op->desUpperBounds().data(), n, x_u );
107 copy_n( m_op->conLowerBounds().data(), m, g_l );
108 copy_n( m_op->conUpperBounds().data(), m, g_u );
114 bool get_starting_point(Index n,
bool init_x, Number* x,
115 bool init_z, Number* z_L, Number* z_U,
116 Index m,
bool init_lambda,
122 copy_n( m_curDesign.data(), n, x );
128 bool eval_f(Index n,
const Number* x,
bool new_x, Number& obj_value)
132 gsAsConstVector<T> xx(x, n);
133 obj_value = m_op->evalObj( xx );
138 bool eval_grad_f(Index n,
const Number* x,
bool new_x, Number* grad_f)
142 gsAsConstVector<T> xx(x , n);
143 gsAsVector<T> result (grad_f, n);
144 m_op->gradObj_into(xx, result);
149 bool eval_g(Index n,
const Number* x,
bool new_x, Index m, Number* g)
153 gsAsConstVector<T> xx(x, n);
154 gsAsVector<T> result(g, m);
155 m_op->evalCon_into(xx, result);
163 bool eval_jac_g(Index n,
const Number* x,
bool new_x,
164 Index m, Index nele_jac, Index* iRow, Index *jCol,
170 copy_n( m_op->conJacRows().data(), nele_jac, iRow );
171 copy_n( m_op->conJacCols().data(), nele_jac, jCol );
175 gsAsConstVector<T> xx(x , n );
176 gsAsVector<T> result(values, nele_jac);
177 m_op->jacobCon_into(xx, result);
188 bool eval_h(Index n,
const Number* x,
bool new_x,
189 Number obj_factor, Index m,
const Number* lambda,
190 bool new_lambda, Index nele_hess, Index* iRow,
191 Index* jCol, Number* values)
193 GISMO_ERROR(
"IpOpt Hessian option not supported yet!");
216 virtual bool intermediate_callback(Ipopt::AlgorithmMode mode,
217 Index iter, Number obj_value,
218 Number inf_pr, Number inf_du,
219 Number mu, Number d_norm,
220 Number regularization_size,
221 Number alpha_du, Number alpha_pr,
223 const IpoptData* ip_data,
224 IpoptCalculatedQuantities* ip_cq)
233 return this->intermediateCallback();
244 void finalize_solution(SolverReturn status,
245 Index n,
const Number* x,
const Number* z_L,
const Number* z_U,
246 Index m,
const Number* g,
const Number* lambda,
248 const IpoptData* ip_data,
249 IpoptCalculatedQuantities* ip_cq)
251 m_curDesign = gsAsConstVector<T>(x,n);
252 m_lambda = gsAsConstVector<T>(lambda,m);
257 gsOptProblem<T> * m_op;
258 gsMatrix<T> m_curDesign;
259 gsMatrix<T> m_lambda;
262 gsIpOptTNLP(
const gsIpOptTNLP & );
263 gsIpOptTNLP& operator=(
const gsIpOptTNLP & );
267 template <
typename T>
273 template <
typename T>
278 this->defaultOptions();
280 #ifdef gsIpOpt_ENABLED
281 m_data =
new gsIpOptPrivate<T>();
282 m_data->tnlp =
new gsIpOptTNLP<T>(m_op);
288 template <
typename T>
291 #ifdef gsIpOpt_ENABLED
293 Ipopt::SmartPtr<Ipopt::IpoptApplication> app = IpoptApplicationFactory();
294 app->RethrowNonIpoptException(
true);
296 Ipopt::ApplicationReturnStatus status;
298 status = app->Initialize( path );
300 if (status != Ipopt::Solve_Succeeded)
302 gsWarn <<
"\n\n*** Error during initialization!\n";
306 gsIpOptTNLP<T> * tmp =
dynamic_cast<gsIpOptTNLP<T> *
>(Ipopt::GetRawPtr(m_data->tnlp));
307 tmp->setCurrentDesign(initialGuess);
308 status = app->OptimizeTNLP(m_data->tnlp);
313 m_numIterations = app->Statistics()->IterationCount();
314 m_finalObjective = app->Statistics()->FinalObjective();
315 m_curDesign = tmp->currentDesign();
316 m_lambda = tmp->lambda();
Class defining an optimization problem.
Definition: gsOptProblem.h:24
gsIpOpt(gsOptProblem< T > *problem)
Definition: gsIpOpt.hpp:274
Class defining an optimizer.
Definition: gsOptimizer.h:27
#define GISMO_NO_IMPLEMENTATION
Definition: gsDebug.h:129
Class defining an optimization problem.
Definition: gsIpOpt.h:30
virtual ~gsIpOpt()
Definition: gsIpOpt.hpp:268
#define gsWarn
Definition: gsDebug.h:50
static std::string findInDataDir(std::string fn)
Find a file in GISMO_DATA_DIR.
Definition: gsFileManager.cpp:311
void copy_n(T begin, const size_t n, U *result)
Small wrapper for std::copy mimicking memcpy (or std::copy_n) for a raw pointer destination, copies n positions starting from begin into result. The latter is expected to have been allocated in advance.
Definition: gsMemory.h:368
Utility class for finding files and handling paths.
#define GISMO_ERROR(message)
Definition: gsDebug.h:118