Ejemplo n.º 1
0
//Bayesian Classifier
void bayes ( Mat & trainingData , Mat & trainingClasses , Mat & testData , Mat
		& testClasses ) {
	CvNormalBayesClassifier bayes ( trainingData , trainingClasses ) ;
	Mat predicted ( testClasses.rows , 1 , CV_32F ) ;
	for ( int i = 0; i < testData.rows ; i ++) {
		const Mat sample = testData.row ( i ) ;
		predicted.at < float > (i , 0) = bayes.predict ( sample ) ;
	}
	cout << " Accuracy_ { BAYES } = " << evaluate ( predicted , testClasses ) << endl ;
	plot_binary ( testData , predicted , " Predictions Bayes " ) ;
}
Ejemplo n.º 2
0
void Model::Predict_nbayes( const SampleSet& samples, SampleSet& outError )
{
	int true_resp = 0;
	CvNormalBayesClassifier* model = (CvNormalBayesClassifier*)m_pModel;
	cv::Mat result;
	model->predict(samples.Samples(),&result);
	for (int i = 0; i < samples.N(); i++)
	{
		if (result.at<float>(i) != samples.GetLabelAt(i))
		{
			outError.Add(samples.GetSampleAt(i), samples.GetLabelAt(i));
		}
		else
		{
			true_resp++;
		}
	}
	printf("%d %d",samples.N(), true_resp);
}
int _tmain(int argc, _TCHAR* argv[])
{
	// General training of a naive bayes model - trafic sign model training in this specific case
	// A model is trained by supplying positive and negative examples and letting the classifier decide how to seperate them
	// For this training, a feature vector of each sample and an output needs to be supplied to the algoritm

	// Read in file, in order to be able to segment out the wrong ones
	if( argc == 1 ){
		printf( "Usage: Application trains a naive bayes classifier on positive and negative samples.\n"
			    "For the moment parameters and feature construction are specified in the code.\n"
				"--> naive_bayes_model_training.exe\n");
		return 0;
	}

	// Parameters - comment out
	string main_folder = "C:/data/traffic_signs/";
	string positive_folder = main_folder + "positives/";
	string negative_folder = main_folder + "negatives/";
	vector<Mat> input_elements, input_elements_cmy, features;
	CvNormalBayesClassifier classifier;

	// Read in the positive and negative training samples and put them into a specific vector
	// C/C++ doesn't provide a standard file reading system,  this is WINDOWS based and will not work under Linux/MacOsx/Android/... !
	WIN32_FIND_DATA ffd;
	TCHAR szDir_positives[MAX_PATH], szDir_negatives[MAX_PATH];
	vector<string> filenames_positives, filenames_negatives;
	HANDLE hFind = INVALID_HANDLE_VALUE;

	StringCchCopy(szDir_positives, MAX_PATH, positive_folder.c_str());
	StringCchCat(szDir_positives, MAX_PATH, TEXT("*"));

	StringCchCopy(szDir_negatives, MAX_PATH, negative_folder.c_str());
	StringCchCat(szDir_negatives, MAX_PATH, TEXT("*"));

	// Process data from the positives folder, at the end remove the current and parent folder references in the listing
	hFind = FindFirstFile(szDir_positives, &ffd);
	do
	{
		filenames_positives.push_back (ffd.cFileName);	
	}
	while (FindNextFile(hFind, &ffd) != 0);
	filenames_positives.erase(filenames_positives.begin(),filenames_positives.begin()+2);
	FindClose( hFind );

	// Process data from the negatives folder, at the end remove the current and parent folder references in the listing
	hFind = FindFirstFile(szDir_negatives, &ffd);
	do
	{
		filenames_negatives.push_back (ffd.cFileName);	
	}
	while (FindNextFile(hFind, &ffd) != 0);
	filenames_negatives.erase(filenames_negatives.begin(),filenames_negatives.begin()+2);
	FindClose( hFind );

	// Based on the filenames, read in the data and put into the matrix vector
	for(int i = 0; i < filenames_positives.size(); i++){
		Mat temp = imread(filenames_positives[i]);
		input_elements.push_back(temp);
	}
	for(int i = 0; i < filenames_negatives.size(); i++){
		Mat temp = imread(filenames_negatives[i]);
		input_elements.push_back(temp);
	}

	// Switch inputs from bgr to cmy color space
	for(int i = 0; i < 10; i++){
		Mat temp = bgr_to_cmy(input_elements[i]);
		input_elements_cmy.push_back(temp);
	}
	
	// Pick the yellow channel of each sample, create a feature representation of it and return to the naïve bayes classifier
	for(int i = 0; i < input_elements.size(); i++){
		Mat temp_feature = create_feature_cmy(input_elements_cmy[i]);
		features.push_back(temp_feature);
	}

	// Create feature matrix based on retrieved vector with the single features
	Mat input_features = convert_input_vector(features);
	// Create according feature label matrix
	Mat label_features(10,1,CV_8UC1);
	for(int i = 0; i < filenames_positives.size(); i++){
		label_features.push_back(1);
	}
	for(int i = 0; i < filenames_negatives.size(); i++){
		label_features.push_back(0);
	}

	// Train the normal bayes classifier model and store in into an xml file so that it can be reused
	// Training data and labels need to be of the CV_32FC1 format!
	input_features.convertTo(input_features, CV_32FC1);
	label_features.convertTo(label_features, CV_32FC1);
	classifier.train(input_features, label_features);

	// Save the normal bayes classifer model
	string output_location = main_folder + "naive_bayes_traffic_signs.xml";
	classifier.save(main_folder.c_str());

	return 0;
}
Ejemplo n.º 4
0
void Model::Train_nbayes( const SampleSet& samples )
{
	CvNormalBayesClassifier* model = (CvNormalBayesClassifier*)m_pModel;
	//void* para = (void*)m_trainPara;
	model->train(samples.Samples(), samples.Labels());
}
Ejemplo n.º 5
0
void Naivebayesopencv::test()
{
	//testPCA();
	//number of taining samples to be used
	int numImages = 1000;
	CvMat *trainingVectors = 0;
	CvMat *trainingLabels = 0;
	extractTrainingData(numImages, trainingVectors, trainingLabels);
	//train the data with Naivebayes
	
	// Perform a PCA:
	
	PCA pca(Mat(trainingVectors), Mat(), CV_PCA_DATA_AS_ROW, 300);

	Mat newTraining = pca.project(Mat(trainingVectors));

	CvNormalBayesClassifier Naivebayes;
	Naivebayes.train(newTraining, trainingLabels, Mat(), Mat(), false);
	/*Naivebayes.train(trainingVectors, trainingLabels, Mat(), Mat(), false);*/


	cvReleaseMat(&trainingVectors);
	cvReleaseMat(&trainingLabels);

	//Recognition: Using Naivebayes

	//test number to be used
	numImages = 1000;//for tesing number
	CvMat *testVectors = 0;
	CvMat *actualLabels = 0;
	extractTestingData(numImages, testVectors,  actualLabels);

	CvMat *testLabels = cvCreateMat(numImages, 1, CV_32FC1);

	PCA pca2(Mat(testVectors), Mat(), CV_PCA_DATA_AS_ROW, 300);

	Mat newTesignVector = pca2.project(Mat(testVectors));

	CvMat cvMatnewTestingvector = newTesignVector;
	Naivebayes.predict(&cvMatnewTestingvector, testLabels);
	//Naivebayes.predict(testVectors, testLabels);

	
	
	//Get the error rate and print the result out
	int totalCorrect = 0;
	for (int i = 0; i < numImages; i++){
		if (testLabels->data.fl[i] == actualLabels->data.fl[i])
		{
			totalCorrect++;
		}
		else{
			printf("\n Error: image id=%d number %f was mistaken as %f", i, actualLabels->data.fl[i], testLabels->data.fl[i]);
		}
			
	}
	printf("\nError Rate: %.1f%%", 
		(double)100- (double)totalCorrect * 100 / (double)numImages);

	cvReleaseMat(&testVectors);
	cvReleaseMat(&testLabels);
	cvReleaseMat(&actualLabels);
	return;



}
Ejemplo n.º 6
0
int predict(const CvNormalBayesClassifier& _bayes, const cv::Mat& _mat) {
    return (int) _bayes.predict(_mat);
}
Ejemplo n.º 7
0
void train(cMatRef _training_data, cMatRef _label ) {
    CvNormalBayesClassifier bayes;
    bayes.train(_training_data, _label);
    bayes.save("model.yml");
}