void IpoptAlgorithm::calc_number_of_bounds( const Vector& x, const Vector& x_L, const Vector& x_U, const Matrix& Px_L, const Matrix& Px_U, Index& n_tot, Index& n_only_lower, Index& n_both, Index& n_only_upper) { DBG_START_METH("IpoptAlgorithm::calc_number_of_bounds", dbg_verbosity); n_tot = x.Dim(); SmartPtr<Vector> tmpx = x.MakeNew(); SmartPtr<Vector> tmpxL = x_L.MakeNew(); SmartPtr<Vector> tmpxU = x_U.MakeNew(); tmpxL->Set(-1.); tmpxU->Set(2.); Px_L.MultVector(1.0, *tmpxL, 0.0, *tmpx); Px_U.MultVector(1.0, *tmpxU, 1.0, *tmpx); // Now, x has elements // -1 : if component has only lower bound // 0 : if component has no bound // 1 : if component has both lower and upper bound // 2 : if component has only upper bound DBG_PRINT_VECTOR(2, "x-indicator", *tmpx); SmartPtr<Vector> tmpx0 = x.MakeNew(); tmpx0->Set(0.); SmartPtr<Vector> tmpx2 = x.MakeNew(); tmpx2->Set(-1.0); tmpx2->Axpy(1.0, *tmpx); tmpx2->ElementWiseMax(*tmpx0); // tmpx2 is now 1 in those // components with only upper bounds n_only_upper = (Index)tmpx2->Asum(); tmpx->Axpy(-2., *tmpx2); // now make all those entries for // only upper bounds zero in tmpx tmpx2->Copy(*tmpx); tmpx2->ElementWiseMax(*tmpx0); // tmpx2 is now 1 in those // components with both bounds n_both = (Index)tmpx2->Asum(); tmpx->Axpy(-1., *tmpx2); tmpx->ElementWiseMin(*tmpx); // tmpx is now -1 in those with only // lower bounds n_only_lower = (Index)tmpx->Asum(); }
void GradientScaling::DetermineScalingParametersImpl( const SmartPtr<const VectorSpace> x_space, const SmartPtr<const VectorSpace> p_space, const SmartPtr<const VectorSpace> c_space, const SmartPtr<const VectorSpace> d_space, const SmartPtr<const MatrixSpace> jac_c_space, const SmartPtr<const MatrixSpace> jac_d_space, const SmartPtr<const SymMatrixSpace> h_space, const Matrix& Px_L, const Vector& x_L, const Matrix& Px_U, const Vector& x_U, Number& df, SmartPtr<Vector>& dx, SmartPtr<Vector>& dc, SmartPtr<Vector>& dd) { DBG_ASSERT(IsValid(nlp_)); SmartPtr<Vector> x = x_space->MakeNew(); SmartPtr<Vector> p = p_space->MakeNew(); if (!nlp_->GetStartingPoint(GetRawPtr(x), true, GetRawPtr(p), true, NULL, false, NULL, false, NULL, false, NULL, false)) { THROW_EXCEPTION(FAILED_INITIALIZATION, "Error getting initial point from NLP in GradientScaling.\n"); } // // Calculate grad_f scaling // SmartPtr<Vector> grad_f = x_space->MakeNew(); if (nlp_->Eval_grad_f(*x, *p, *grad_f)) { double max_grad_f = grad_f->Amax(); df = 1.; if (scaling_obj_target_gradient_ == 0.) { if (max_grad_f > scaling_max_gradient_) { df = scaling_max_gradient_ / max_grad_f; } } else { if (max_grad_f == 0.) { Jnlst().Printf(J_WARNING, J_INITIALIZATION, "Gradient of objective function is zero at starting point. Cannot determine scaling factor based on scaling_obj_target_gradient option.\n"); } else { df = scaling_obj_target_gradient_ / max_grad_f; } } df = Max(df, scaling_min_value_); Jnlst().Printf(J_DETAILED, J_INITIALIZATION, "Scaling parameter for objective function = %e\n", df); } else { Jnlst().Printf(J_WARNING, J_INITIALIZATION, "Error evaluating objective gradient at user provided starting point.\n No scaling factor for objective function computed!\n"); df = 1.; } // // No x scaling // dx = NULL; dc = NULL; if (c_space->Dim()>0) { // // Calculate c scaling // SmartPtr<Matrix> jac_c = jac_c_space->MakeNew(); if (nlp_->Eval_jac_c(*x, *p, *jac_c)) { dc = c_space->MakeNew(); const double dbl_min = std::numeric_limits<double>::min(); dc->Set(dbl_min); jac_c->ComputeRowAMax(*dc, false); Number arow_max = dc->Amax(); if (scaling_constr_target_gradient_<=0.) { if (arow_max > scaling_max_gradient_) { dc->ElementWiseReciprocal(); dc->Scal(scaling_max_gradient_); SmartPtr<Vector> dummy = dc->MakeNew(); dummy->Set(1.); dc->ElementWiseMin(*dummy); } else { dc = NULL; } } else { dc->Set(scaling_constr_target_gradient_/arow_max); } if (IsValid(dc) && scaling_min_value_ > 0.) { SmartPtr<Vector> tmp = dc->MakeNew(); tmp->Set(scaling_min_value_); dc->ElementWiseMax(*tmp); } } else { Jnlst().Printf(J_WARNING, J_INITIALIZATION, "Error evaluating Jacobian of equality constraints at user provided starting point.\n No scaling factors for equality constraints computed!\n"); } } dd = NULL; if (d_space->Dim()>0) { // // Calculate d scaling // SmartPtr<Matrix> jac_d = jac_d_space->MakeNew(); if (nlp_->Eval_jac_d(*x, *p, *jac_d)) { dd = d_space->MakeNew(); const double dbl_min = std::numeric_limits<double>::min(); dd->Set(dbl_min); jac_d->ComputeRowAMax(*dd, false); Number arow_max = dd->Amax(); if (scaling_constr_target_gradient_<=0.) { if (arow_max > scaling_max_gradient_) { dd->ElementWiseReciprocal(); dd->Scal(scaling_max_gradient_); SmartPtr<Vector> dummy = dd->MakeNew(); dummy->Set(1.); dd->ElementWiseMin(*dummy); } else { dd = NULL; } } else { dd->Set(scaling_constr_target_gradient_/arow_max); } if (IsValid(dd) && scaling_min_value_ > 0.) { SmartPtr<Vector> tmp = dd->MakeNew(); tmp->Set(scaling_min_value_); dd->ElementWiseMax(*tmp); } } else { Jnlst().Printf(J_WARNING, J_INITIALIZATION, "Error evaluating Jacobian of inequality constraints at user provided starting point.\n No scaling factors for inequality constraints computed!\n"); } } }