SGVector<float64_t> CRelaxedTree::eval_binary_model_K(CSVM *svm)
{
    CRegressionLabels *lab = svm->apply_regression(m_feats);
    SGVector<float64_t> resp(lab->get_num_labels());
    for (int32_t i=0; i < resp.vlen; ++i)
        resp[i] = lab->get_label(i) - m_A/m_svm_C;
    SG_UNREF(lab);
    return resp;
}
int main(int argc, char** argv)
{
	init_shogun(&print_message);
	
#ifdef HAVE_LAPACK
	
	// create some data
	SGMatrix<float64_t> matrix(2,3);
	for (int32_t i=0; i<6; i++)
		matrix.matrix[i]=i;
	
	//Labels
	CRegressionLabels* labels = new CRegressionLabels(3);
	SG_REF(labels);
	
	labels->set_label(0, -1);
	labels->set_label(1, +1);
	labels->set_label(2, -1);
	
	// create three 2-dimensional vectors 
	// shogun will now own the matrix created
	CDenseFeatures<float64_t>* features= new CDenseFeatures<float64_t>(matrix);
	SG_REF(features);
	
	// create gaussian kernel with cache 10MB, width 0.5
	CGaussianKernel* kernel = new CGaussianKernel(10, 0.5);
	SG_REF(kernel);
	
	//Gaussian Process Regression with sigma = 1.
	CGaussianProcessRegression regressor(1.0, kernel, features, labels);
	
	regressor.train(features);
	//Get mean predictions
	CRegressionLabels* result = CRegressionLabels::obtain_from_generic(regressor.apply());
	SG_REF(result);
	
	SGMatrix<float64_t> cov = regressor.getCovarianceMatrix(features);
	
	SGMatrix<float64_t>::display_matrix(cov.matrix, cov.num_rows, cov.num_cols, "Covariance Matrix");

	// output predictions
	for (int32_t i=0; i<3; i++)
		SG_SPRINT("output[%d]=%f\n", i, result->get_label(i));

	// free up memory
	SG_UNREF(result);
	SG_UNREF(features);
	SG_UNREF(labels);
	SG_UNREF(kernel);
#endif
	
	exit_shogun();
	return 0;
}
Exemple #3
0
void CShareBoost::compute_pred()
{
	CDenseFeatures<float64_t> *fea = dynamic_cast<CDenseFeatures<float64_t> *>(m_features);
	CDenseSubsetFeatures<float64_t> *subset_fea = new CDenseSubsetFeatures<float64_t>(fea, m_activeset);
	SG_REF(subset_fea);
	for (int32_t i=0; i < m_multiclass_strategy->get_num_classes(); ++i)
	{
		CLinearMachine *machine = dynamic_cast<CLinearMachine *>(m_machines->get_element(i));
		CRegressionLabels *lab = machine->apply_regression(subset_fea);
		SGVector<float64_t> lab_raw = lab->get_labels();
		std::copy(lab_raw.vector, lab_raw.vector + lab_raw.vlen, m_pred.get_column_vector(i));
		SG_UNREF(machine);
		SG_UNREF(lab);
	}
	SG_UNREF(subset_fea);
}
int main(int argc, char **argv)
{
	init_shogun_with_defaults();


	/* create some data and labels */
	SGMatrix<float64_t> matrix =
			SGMatrix<float64_t>(dim_vectors, num_vectors);

	SGMatrix<float64_t> matrix2 =
			SGMatrix<float64_t>(dim_vectors, num_vectors);
			
	CRegressionLabels* labels=new CRegressionLabels(num_vectors);

	build_matrices(matrix2, matrix, labels);
	
	/* create training features */
	CDenseFeatures<float64_t>* features=new CDenseFeatures<float64_t> ();
	features->set_feature_matrix(matrix);

	/* create testing features */
	CDenseFeatures<float64_t>* features2=new CDenseFeatures<float64_t> ();
	features2->set_feature_matrix(matrix2);

	SG_REF(features);
	SG_REF(features2);

	SG_REF(labels);
	
	/*Allocate our Kernel*/
	CGaussianKernel* test_kernel = new CGaussianKernel(10, 2);

	test_kernel->init(features, features);

	/*Allocate our mean function*/
	CZeroMean* mean = new CZeroMean();
	
	/*Allocate our likelihood function*/
	CGaussianLikelihood* lik = new CGaussianLikelihood();

	/*Allocate our inference method*/
	CExactInferenceMethod* inf =
			new CExactInferenceMethod(test_kernel, 
						  features, mean, labels, lik);

	SG_REF(inf);

	/*Finally use these to allocate the Gaussian Process Object*/
	CGaussianProcessRegression* gp =
			new CGaussianProcessRegression(inf, features, labels);

	SG_REF(gp);
	
	/*Build the parameter tree for model selection*/
	CModelSelectionParameters* root = build_tree(inf, lik, test_kernel);

	/*Criterion for gradient search*/
	CGradientCriterion* crit = new CGradientCriterion();

	/*This will evaluate our inference method for its derivatives*/
	CGradientEvaluation* grad=new CGradientEvaluation(gp, features, labels,
			crit);

	grad->set_function(inf);

	gp->print_modsel_params();

	root->print_tree();

	/* handles all of the above structures in memory */
	CGradientModelSelection* grad_search=new CGradientModelSelection(
			root, grad);

	/* set autolocking to false to get rid of warnings */
	grad->set_autolock(false);

	/*Search for best parameters*/
	CParameterCombination* best_combination=grad_search->select_model(true);

	/*Output all the results and information*/
	if (best_combination)
	{
		SG_SPRINT("best parameter(s):\n");
		best_combination->print_tree();

		best_combination->apply_to_machine(gp);
	}

	CGradientResult* result=(CGradientResult*)grad->evaluate();

	if(result->get_result_type() != GRADIENTEVALUATION_RESULT)
		SG_SERROR("Evaluation result not a GradientEvaluationResult!");

	result->print_result();

	SGVector<float64_t> alpha = inf->get_alpha();
	SGVector<float64_t> labe = labels->get_labels();
	SGVector<float64_t> diagonal = inf->get_diagonal_vector();
	SGMatrix<float64_t> cholesky = inf->get_cholesky();
	gp->set_return_type(CGaussianProcessRegression::GP_RETURN_COV);

	CRegressionLabels* covariance = gp->apply_regression(features);

	gp->set_return_type(CGaussianProcessRegression::GP_RETURN_MEANS);
	
	CRegressionLabels* predictions = gp->apply_regression();

	alpha.display_vector("Alpha Vector");
	labe.display_vector("Labels");
	diagonal.display_vector("sW Matrix");
	covariance->get_labels().display_vector("Predicted Variances");
	predictions->get_labels().display_vector("Mean Predictions");
	cholesky.display_matrix("Cholesky Matrix L");
	matrix.display_matrix("Training Features");
	matrix2.display_matrix("Testing Features");

	/*free memory*/
	SG_UNREF(features);
	SG_UNREF(features2);
	SG_UNREF(predictions);
	SG_UNREF(covariance);
	SG_UNREF(labels);
	SG_UNREF(inf);
	SG_UNREF(gp);
	SG_UNREF(grad_search);
	SG_UNREF(best_combination);
	SG_UNREF(result);
	SG_UNREF(mean);

	exit_shogun();

	return 0;
}
int main(int argc, char **argv)
{
	init_shogun(&print_message, &print_message, &print_message);

	int32_t num_vectors=4;
	int32_t dim_vectors=3;

	/* create some data and labels */
	SGMatrix<float64_t> matrix =
			SGMatrix<float64_t>(dim_vectors, num_vectors);

	matrix[0] = -1;
	matrix[1] = -1;
	matrix[2] = -1;
	matrix[3] = 1;
	matrix[4] = 1;
	matrix[5] = 1;
	matrix[6] = -10;
	matrix[7] = -10;
	matrix[8] = -10;
	matrix[9] = 3;
	matrix[10] = 2;
	matrix[11] = 1;

	SGMatrix<float64_t> matrix2 =
			SGMatrix<float64_t>(dim_vectors, num_vectors);

	for (int32_t i=0; i<num_vectors*dim_vectors; i++)
		matrix2[i]=i*sin(i)*.96;

	/* create training features */
	CDenseFeatures<float64_t>* features=new CDenseFeatures<float64_t> ();
	features->set_feature_matrix(matrix);

	/* create testing features */
	CDenseFeatures<float64_t>* features2=new CDenseFeatures<float64_t> ();
	features2->set_feature_matrix(matrix2);

	SG_REF(features);
	SG_REF(features2);

	CRegressionLabels* labels=new CRegressionLabels(num_vectors);

	/* create labels, two classes */
	for (index_t i=0; i<num_vectors; ++i)
	{
		if(i%2 == 0) labels->set_label(i, 1);
		else labels->set_label(i, -1);
	}

	SG_REF(labels);
	CGaussianKernel* test_kernel = new CGaussianKernel(10, 2);

	test_kernel->init(features, features);

	CZeroMean* mean = new CZeroMean();
	CGaussianLikelihood* lik = new CGaussianLikelihood();
	lik->set_sigma(0.01);

	CExactInferenceMethod* inf =
			new CExactInferenceMethod(test_kernel, features, mean, labels, lik);


	SG_REF(inf);

	CGaussianProcessRegression* gp =
			new CGaussianProcessRegression(inf, features, labels);

	CModelSelectionParameters* root=new CModelSelectionParameters();

	CModelSelectionParameters* c1 =
			new CModelSelectionParameters("inference_method", inf);
	root->append_child(c1);

	CModelSelectionParameters* c2 = new CModelSelectionParameters("scale");
	c1 ->append_child(c2);
	c2->build_values(0.01, 4.0, R_LINEAR);


	CModelSelectionParameters* c3 =
			new CModelSelectionParameters("likelihood_model", lik);
	c1->append_child(c3);

	CModelSelectionParameters* c4=new CModelSelectionParameters("sigma");
	c3->append_child(c4);
	c4->build_values(0.001, 4.0, R_LINEAR);

	CModelSelectionParameters* c5 =
			new CModelSelectionParameters("kernel", test_kernel);
	c1->append_child(c5);

	CModelSelectionParameters* c6 =
			new CModelSelectionParameters("width");
	c5->append_child(c6);
	c6->build_values(0.001, 4.0, R_LINEAR);

	/* cross validation class for evaluation in model selection */
	SG_REF(gp);

	CGradientCriterion* crit = new CGradientCriterion();

	CGradientEvaluation* grad=new CGradientEvaluation(gp, features, labels,
			crit);

	grad->set_function(inf);

	gp->print_modsel_params();

	root->print_tree();

	/* handles all of the above structures in memory */
	CGradientModelSelection* grad_search=new CGradientModelSelection(
			root, grad);

	/* set autolocking to false to get rid of warnings */
	grad->set_autolock(false);

	CParameterCombination* best_combination=grad_search->select_model(true);
	grad_search->set_max_evaluations(5);

	if (best_combination)
	{
		SG_SPRINT("best parameter(s):\n");
		best_combination->print_tree();

		best_combination->apply_to_machine(gp);
	}

	CGradientResult* result=(CGradientResult*)grad->evaluate();

	if(result->get_result_type() != GRADIENTEVALUATION_RESULT)
		SG_SERROR("Evaluation result not a GradientEvaluationResult!");

	result->print_result();

	SGVector<float64_t> alpha = inf->get_alpha();
	SGVector<float64_t> labe = labels->get_labels();
	SGVector<float64_t> diagonal = inf->get_diagonal_vector();
	SGMatrix<float64_t> cholesky = inf->get_cholesky();
	gp->set_return_type(CGaussianProcessRegression::GP_RETURN_COV);

	CRegressionLabels* covariance = gp->apply_regression(features);

	gp->set_return_type(CGaussianProcessRegression::GP_RETURN_MEANS);
	CRegressionLabels* predictions = gp->apply_regression();

	alpha.display_vector("Alpha Vector");
	labe.display_vector("Labels");
	diagonal.display_vector("sW Matrix");
	covariance->get_labels().display_vector("Predicted Variances");
	predictions->get_labels().display_vector("Mean Predictions");
	cholesky.display_matrix("Cholesky Matrix L");
	matrix.display_matrix("Training Features");
	matrix2.display_matrix("Testing Features");

	/*free memory*/
	SG_UNREF(features);
	SG_UNREF(features2);
	SG_UNREF(predictions);
	SG_UNREF(covariance);
	SG_UNREF(labels);
	SG_UNREF(inf);
	SG_UNREF(gp);
	SG_UNREF(grad_search);
	SG_UNREF(best_combination);
	SG_UNREF(result);

	exit_shogun();

	return 0;
}
int main(int argc, char **argv)
{
    init_shogun(&print_message, &print_message, &print_message);

    int32_t num_vectors=4;
    int32_t dim_vectors=3;

    /* create some data and labels */
    SGMatrix<float64_t> matrix= SGMatrix<float64_t>(dim_vectors, num_vectors);

    matrix[0] = -1;
    matrix[1] = -1;
    matrix[2] = -1;
    matrix[3] = 1;
    matrix[4] = 1;
    matrix[5] = 1;
    matrix[6] = -10;
    matrix[7] = -10;
    matrix[8] = -10;
    matrix[9] = 3;
    matrix[10] = 2;
    matrix[11] = 1;

    SGMatrix<float64_t> matrix2= SGMatrix<float64_t>(dim_vectors, num_vectors);
    for (int32_t i=0; i<num_vectors*dim_vectors; i++)
        matrix2[i]=i*sin(i)*.96;

    /* create training features */
    CDenseFeatures<float64_t>* features=new CDenseFeatures<float64_t> ();
    features->set_feature_matrix(matrix);

    /* create testing features */
    CDenseFeatures<float64_t>* features2=new CDenseFeatures<float64_t> ();
    features2->set_feature_matrix(matrix2);

    SG_REF(features);
    SG_REF(features2);

    CRegressionLabels* labels=new CRegressionLabels(num_vectors);

    /* create labels, two classes */
    for (index_t i=0; i<num_vectors; ++i)
    {
        if(i%2 == 0) labels->set_label(i, 1);
        else labels->set_label(i, -1);
    }

    SG_REF(labels);
    CGaussianKernel* test_kernel = new CGaussianKernel(10, 2);

    test_kernel->init(features, features);

    CZeroMean* mean = new CZeroMean();
    CGaussianLikelihood* lik = new CGaussianLikelihood();
    lik->set_sigma(0.01);
    CExactInferenceMethod* inf = new CExactInferenceMethod(test_kernel, features, mean, labels, lik);
    CGaussianProcessRegression* gp = new CGaussianProcessRegression(inf, features, labels);

    SGVector<float64_t> alpha = inf->get_alpha();
    SGVector<float64_t> labe = labels->get_labels();
    SGVector<float64_t> diagonal = inf->get_diagonal_vector();
    SGMatrix<float64_t> cholesky = inf->get_cholesky();
    SGVector<float64_t> covariance = gp->getCovarianceVector(features2);
    CRegressionLabels* predictions = gp->apply_regression(features2);

    SGVector<float64_t>::display_vector(alpha.vector, alpha.vlen, "Alpha Vector");
    SGVector<float64_t>::display_vector(labe.vector, labe.vlen, "Labels");
    SGVector<float64_t>::display_vector(diagonal.vector, diagonal.vlen, "sW Matrix");
    SGVector<float64_t>::display_vector(covariance.vector, covariance.vlen, "Predicted Variances");
    SGVector<float64_t>::display_vector(predictions->get_labels().vector, predictions->get_labels().vlen, "Mean Predictions");
    SGMatrix<float64_t>::display_matrix(cholesky.matrix, cholesky.num_rows, cholesky.num_cols, "Cholesky Matrix L");
    SGMatrix<float64_t>::display_matrix(matrix.matrix, matrix.num_rows, matrix.num_cols, "Training Features");
    SGMatrix<float64_t>::display_matrix(matrix2.matrix, matrix2.num_rows, matrix2.num_cols, "Testing Features");

    /*free memory*/
    SG_UNREF(features);
    SG_UNREF(features2);
    SG_UNREF(predictions);
    SG_UNREF(labels);
    SG_UNREF(gp);

    exit_shogun();

    return 0;
}