static action_t cont_macroexpand1() { if (iscons(expr)) { ref_t symbol = check_symbol(car(expr)); if (has_function(symbol)) { ref_t func = get_function(symbol); if (ismacro(func)) { C(cont)->fn = cont_apply_apply, C(cont)->val[0] = func; expr = cdr(expr); return ACTION_APPLY_CONT; } } } pop_cont(); return ACTION_APPLY_CONT; }
void BonminInterface::init(const Dict& opts) { // Call the init method of the base class Nlpsol::init(opts); // Default options pass_nonlinear_variables_ = true; pass_nonlinear_constraints_ = true; Dict hess_lag_options, jac_g_options, grad_f_options; std::vector< std::vector<int> > sos1_groups; std::vector< std::vector<double> > sos1_weights; // Read user options for (auto&& op : opts) { if (op.first=="bonmin") { opts_ = op.second; } else if (op.first=="pass_nonlinear_variables") { pass_nonlinear_variables_ = op.second; } else if (op.first=="pass_nonlinear_constraints") { pass_nonlinear_constraints_ = op.second; } else if (op.first=="var_string_md") { var_string_md_ = op.second; } else if (op.first=="var_integer_md") { var_integer_md_ = op.second; } else if (op.first=="var_numeric_md") { var_numeric_md_ = op.second; } else if (op.first=="con_string_md") { con_string_md_ = op.second; } else if (op.first=="con_integer_md") { con_integer_md_ = op.second; } else if (op.first=="con_numeric_md") { con_numeric_md_ = op.second; } else if (op.first=="hess_lag_options") { hess_lag_options = op.second; } else if (op.first=="jac_g_options") { jac_g_options = op.second; } else if (op.first=="grad_f_options") { grad_f_options = op.second; } else if (op.first=="hess_lag") { Function f = op.second; casadi_assert_dev(f.n_in()==4); casadi_assert_dev(f.n_out()==1); set_function(f, "nlp_hess_l"); } else if (op.first=="jac_g") { Function f = op.second; casadi_assert_dev(f.n_in()==2); casadi_assert_dev(f.n_out()==2); set_function(f, "nlp_jac_g"); } else if (op.first=="grad_f") { Function f = op.second; casadi_assert_dev(f.n_in()==2); casadi_assert_dev(f.n_out()==2); set_function(f, "nlp_grad_f"); } else if (op.first=="sos1_groups") { sos1_groups = to_int(op.second.to_int_vector_vector()); for (auto & g : sos1_groups) { for (auto & e : g) e-= GlobalOptions::start_index; } } else if (op.first=="sos1_weights") { sos1_weights = op.second.to_double_vector_vector(); } else if (op.first=="sos1_priorities") { sos1_priorities_ = to_int(op.second.to_int_vector()); } } // Do we need second order derivatives? exact_hessian_ = true; auto hessian_approximation = opts_.find("hessian_approximation"); if (hessian_approximation!=opts_.end()) { exact_hessian_ = hessian_approximation->second == "exact"; } // Setup NLP functions create_function("nlp_f", {"x", "p"}, {"f"}); create_function("nlp_g", {"x", "p"}, {"g"}); if (!has_function("nlp_grad_f")) { create_function("nlp_grad_f", {"x", "p"}, {"f", "grad:f:x"}); } if (!has_function("nlp_jac_g")) { create_function("nlp_jac_g", {"x", "p"}, {"g", "jac:g:x"}); } jacg_sp_ = get_function("nlp_jac_g").sparsity_out(1); // By default, assume all nonlinear nl_ex_.resize(nx_, true); nl_g_.resize(ng_, true); // Allocate temporary work vectors if (exact_hessian_) { if (!has_function("nlp_hess_l")) { create_function("nlp_hess_l", {"x", "p", "lam:f", "lam:g"}, {"hess:gamma:x:x"}, {{"gamma", {"f", "g"}}}); } hesslag_sp_ = get_function("nlp_hess_l").sparsity_out(0); if (pass_nonlinear_variables_) { const casadi_int* col = hesslag_sp_.colind(); for (casadi_int i=0;i<nx_;++i) nl_ex_[i] = col[i+1]-col[i]; } } else { if (pass_nonlinear_variables_) nl_ex_ = oracle_.which_depends("x", {"f", "g"}, 2, false); } if (pass_nonlinear_constraints_) nl_g_ = oracle_.which_depends("x", {"g"}, 2, true); // Create sos info // Declare size sos_num_ = sos1_groups.size(); // sos1 type sos1_types_.resize(sos_num_, 1); casadi_assert(sos1_weights.empty() || sos1_weights.size()==sos_num_, "sos1_weights has incorrect size"); casadi_assert(sos1_priorities_.empty() || sos1_priorities_.size()==sos_num_, "sos1_priorities has incorrect size"); if (sos1_priorities_.empty()) sos1_priorities_.resize(sos_num_, 1); sos_num_nz_ = 0; for (casadi_int i=0;i<sos_num_;++i) { // get local group const std::vector<int>& sos1_group = sos1_groups[i]; // Get local weights std::vector<double> default_weights(sos1_group.size(), 1.0); const std::vector<double>& sos1_weight = sos1_weights.empty() ? default_weights : sos1_weights[i]; casadi_assert(sos1_weight.size()==sos1_group.size(), "sos1_weights has incorrect size"); // Populate lookup vector sos1_starts_.push_back(sos_num_nz_); sos_num_nz_+=sos1_group.size(); sos1_weights_.insert(sos1_weights_.end(), sos1_weight.begin(), sos1_weight.end()); sos1_indices_.insert(sos1_indices_.end(), sos1_group.begin(), sos1_group.end()); } sos1_starts_.push_back(sos_num_nz_); // Allocate work vectors alloc_w(nx_, true); // xk_ alloc_w(nx_, true); // lam_xk_ alloc_w(ng_, true); // gk_ alloc_w(nx_, true); // grad_fk_ alloc_w(jacg_sp_.nnz(), true); // jac_gk_ if (exact_hessian_) { alloc_w(hesslag_sp_.nnz(), true); // hess_lk_ } }
void BonminInterface::init(const Dict& opts) { // Call the init method of the base class Nlpsol::init(opts); // Default options pass_nonlinear_variables_ = false; Dict hess_lag_options, jac_g_options, grad_f_options; // Read user options for (auto&& op : opts) { if (op.first=="bonmin") { opts_ = op.second; } else if (op.first=="pass_nonlinear_variables") { pass_nonlinear_variables_ = op.second; } else if (op.first=="var_string_md") { var_string_md_ = op.second; } else if (op.first=="var_integer_md") { var_integer_md_ = op.second; } else if (op.first=="var_numeric_md") { var_numeric_md_ = op.second; } else if (op.first=="con_string_md") { con_string_md_ = op.second; } else if (op.first=="con_integer_md") { con_integer_md_ = op.second; } else if (op.first=="con_numeric_md") { con_numeric_md_ = op.second; } else if (op.first=="hess_lag_options") { hess_lag_options = op.second; } else if (op.first=="jac_g_options") { jac_g_options = op.second; } else if (op.first=="grad_f_options") { grad_f_options = op.second; } else if (op.first=="hess_lag") { Function f = op.second; casadi_assert(f.n_in()==4); casadi_assert(f.n_out()==1); set_function(f, "nlp_hess_l"); } else if (op.first=="jac_g") { Function f = op.second; casadi_assert(f.n_in()==2); casadi_assert(f.n_out()==2); set_function(f, "nlp_jac_g"); } else if (op.first=="grad_f") { Function f = op.second; casadi_assert(f.n_in()==2); casadi_assert(f.n_out()==2); set_function(f, "nlp_grad_f"); } } // Do we need second order derivatives? exact_hessian_ = true; auto hessian_approximation = opts_.find("hessian_approximation"); if (hessian_approximation!=opts_.end()) { exact_hessian_ = hessian_approximation->second == "exact"; } // Setup NLP functions create_function("nlp_f", {"x", "p"}, {"f"}); create_function("nlp_g", {"x", "p"}, {"g"}); if (!has_function("nlp_grad_f")) { create_function("nlp_grad_f", {"x", "p"}, {"f", "grad:f:x"}); } if (!has_function("nlp_jac_g")) { create_function("nlp_jac_g", {"x", "p"}, {"g", "jac:g:x"}); } jacg_sp_ = get_function("nlp_jac_g").sparsity_out(1); // Allocate temporary work vectors if (exact_hessian_) { if (!has_function("nlp_hess_l")) { create_function("nlp_hess_l", {"x", "p", "lam:f", "lam:g"}, {"hess:gamma:x:x"}, {{"gamma", {"f", "g"}}}); } hesslag_sp_ = get_function("nlp_hess_l").sparsity_out(0); } else if (pass_nonlinear_variables_) { nl_ex_ = oracle_.which_depends("x", {"f", "g"}, 2, false); } // Allocate work vectors alloc_w(nx_, true); // xk_ alloc_w(ng_, true); // lam_gk_ alloc_w(nx_, true); // lam_xk_ alloc_w(ng_, true); // gk_ alloc_w(nx_, true); // grad_fk_ alloc_w(jacg_sp_.nnz(), true); // jac_gk_ if (exact_hessian_) { alloc_w(hesslag_sp_.nnz(), true); // hess_lk_ } }