Esempio n. 1
0
bool
LogisticRegression::solve(double tolerance)
{
  if (numObservations() <= 0)
  {
    THEA_WARNING << "LogisticRegression: Solving empty problem";
    has_solution = true;
    solution.resize(0);
  }
  else
  {
    StdLinearSolver llsq(StdLinearSolver::Method::DEFAULT, StdLinearSolver::Constraint::UNCONSTRAINED);
    llsq.setTolerance(tolerance);

    typedef Eigen::Map< MatrixX<double, MatrixLayout::ROW_MAJOR> > M;
    M a(&llsq_coeffs[0], numObservations(), ndims);
    has_solution = llsq.solve(MatrixWrapper<M>(&a), &llsq_consts[0]);

    if (has_solution)
      solution = Eigen::Map<VectorXd>(const_cast<double *>(llsq.getSolution()), ndims);
    else
      solution.resize(0);
  }

  return has_solution;
}
Esempio n. 2
0
void Interpolate1D::setObservation(const float & x,
					const float & y,
					const int & idx)
{
	if(idx >= numObservations() ) {
		addObservation(x, y);
		return;
	}
	
	m_observations[idx] = Float2(x, y);
}
Esempio n. 3
0
bool Interpolate1D::learn()
{
	const int dim = numObservations();
	if(dim<2) {
		std::cout<<"Interpolate1D has too few observations "<<dim;
		return false;
	}
	
	m_xTrain->resize(dim, 1);
	m_yTrain->resize(dim, 1);
	
	for(int i=0;i<dim;++i) {
		m_xTrain->column(0)[i] = m_observations[i].x;
		m_yTrain->column(0)[i] = m_observations[i].y;
	}
	
	center_data(*m_xTrain, 1, (float)dim, *m_xMean);
	center_data(*m_yTrain, 1, (float)dim, *m_yMean);
	
	if(m_rbf) delete m_rbf;
	m_rbf = new RbfKernel<float> (.125f * (m_bound.y - m_bound.x) );
    
	return m_covTrain->create(*m_xTrain, *m_rbf);
}