예제 #1
0
파일: KNN.cpp 프로젝트: AsherBond/shogun
CLabels* CKNN::classify_NN()
{
	ASSERT(distance);
	ASSERT(num_classes>0);

	int32_t num_lab = distance->get_num_vec_rhs();
	ASSERT(num_lab);

	CLabels* output = new CLabels(num_lab);
	float64_t* distances = new float64_t[num_train_labels];

	ASSERT(distances);
	SG_INFO("%d test examples\n", num_lab);
	CSignal::clear_cancel();

	// for each test example
	for (int32_t i=0; i<num_lab && (!CSignal::cancel_computations()); i++)
	{
		SG_PROGRESS(i,0,num_lab);

		// get distances from i-th test example to 0..num_train_labels-1 train examples
		distances_lhs(distances,0,num_train_labels-1,i);
		int32_t j;

		// assuming 0th train examples as nearest to i-th test example
		int32_t out_idx = 0;
		float64_t min_dist = distances[0];

		// searching for nearest neighbor by comparing distances
		for (j=0; j<num_train_labels; j++)
		{
			if (distances[j]<min_dist)
			{
				min_dist = distances[j];
				out_idx = j;
			}
		}

		// label i-th test example with label of nearest neighbor with out_idx index
		output->set_label(i,train_labels[out_idx]+min_label);
	}

	delete [] distances;
	return output;
}
예제 #2
0
CLabels* CGaussianNaiveBayes::apply()
{
	// init number of vectors
	int32_t num_vectors = m_features->get_num_vectors();

	// init result labels
	CLabels* result = new CLabels(num_vectors);

	// classify each example of data
	SG_PROGRESS(0, 0, num_vectors);
	for (int i = 0; i < num_vectors; i++)
	{
		result->set_label(i,apply(i));
		SG_PROGRESS(i + 1, 0, num_vectors);
	}
	SG_DONE();
	return result;
};