double CostFromErrFunc::value(const vector<double>& xin, Model* model) {
  VectorXd x = getVec(xin, vars_);
  VectorXd err = f_->call(x);
  if (coeffs_.size()>0) err.array() *= coeffs_.array();
  switch (pen_type_) {
    case SQUARED: return err.array().square().sum();
    case ABS: return err.array().abs().sum();
    case HINGE: return err.cwiseMax(VectorXd::Zero(err.size())).sum();
    default: assert(0 && "unreachable"); 
  }
  
  return 0; // avoid compiler warning
}
Example #2
0
void drwnNNGraphMLearner::subGradientStep(const MatrixXd& G, double alpha)
{
    DRWN_FCN_TIC;

    // gradient step
    _M -= alpha * G;

    // project onto psd
    SelfAdjointEigenSolver<MatrixXd> es;
    es.compute(_M);

    const VectorXd d = es.eigenvalues().real();
    if ((d.array() < 0.0).any()) {
        const MatrixXd V = es.eigenvectors();
        _M = V * d.cwiseMax(VectorXd::Constant(d.rows(), DRWN_EPSILON)).asDiagonal() * V.inverse();
    }

    DRWN_FCN_TOC;
}