Пример #1
0
static model_t*svm_train(svm_model_factory_t*factory, dataset_t*d)
{
    int num_rows = training_set_size(d->num_rows);

    CodeGeneratingLinearSVM svm(d);
    CvSVMParams params = CvSVMParams(CvSVM::C_SVC, CvSVM::LINEAR,
                                     /*degree*/0, /*gamma*/1, /*coef0*/0, /*C*/1,
                                     /*nu*/0, /*p*/0, /*class_weights*/0,
                                     cvTermCriteria(CV_TERMCRIT_ITER+CV_TERMCRIT_EPS, 1000, FLT_EPSILON));
    CvMat* input;
    CvMat* response;
    make_ml_multicolumn(d, &input, &response, num_rows, false);

    model_t*m = 0;
    if(svm.train_auto(input, response, 0, 0, params, 5)) {
	m = model_new(d);
        m->code = svm.get_program();
    } else if(svm.train(input, response, 0, 0, params)) {
	m = model_new(d);
        m->code = svm.get_program();
    }

    cvReleaseMat(&input);
    cvReleaseMat(&response);
    return m;
}
Пример #2
0
int svmtest(Mat &label,Mat &traindata){

	cout<<"training"<<'\n';

	CvSVM svm;
	CvSVMParams param;    
	CvTermCriteria criteria;      
	criteria = cvTermCriteria( CV_TERMCRIT_EPS, 1000, FLT_EPSILON );      
	param = CvSVMParams( CvSVM::C_SVC, CvSVM::LINEAR, 10.0, 0.09, 1.0, 10.0, 0.5, 1.0, NULL, criteria );     
	
	svm.train(traindata,label,Mat(),Mat(),param);
	svm.save( "SVM_DATA.xml" );   

	cout<<"done"<<'\n';

	cout<<"predict"<<'\n';


	return 0;
}
Пример #3
0
//Support Vector Machine
void svm ( Mat & trainingData , Mat & trainingClasses , Mat & testData , Mat &
		testClasses ) {
	CvSVMParams param = CvSVMParams () ;
	param.svm_type = CvSVM :: C_SVC ;
	param.kernel_type = CvSVM :: RBF ; // CvSVM :: RBF , CvSVM :: LINEAR ...
	param.degree = 0; // for poly
	param.gamma = 20; // for poly / rbf / sigmoid
	param.coef0 = 0; // for poly / sigmoid
	param.C = 7; // for CV_SVM_C_SVC , CV_SVM _EPS_SVR and CV_SVM_NU_SVR
	param.nu = 0.0; // for CV_SVM_NU_SVC , CV_SVM_ONE_CLASS , and CV_SVM_NU_SVR
	param.p = 0.0; // for CV_SV M_EPS_SV R
	param.class_weights = NULL ; // for CV_SVM_C_SVC
	param.term_crit.type = CV_TERMCRIT_ITER + CV_TERMCRIT_EPS ;
	param.term_crit.max_iter = 1000;
	param.term_crit.epsilon = 1e-6;
	// SVM training ( use train auto for OpenCV >=2.0)
	CvSVM svm ( trainingData , trainingClasses , Mat () , Mat () , param ) ;
	Mat predicted ( testClasses.rows , 1 , CV_32F ) ;
	for ( int i = 0; i < testData.rows ; i ++) {
		Mat sample = testData.row ( i ) ;
		predicted.at<float>( i , 0) = svm.predict ( sample ) ;
	}
	cout << " Accuracy_ { SVM } = " << evaluate ( predicted , testClasses ) << endl ;
	plot_binary ( testData , predicted , " Predictions SVM " ) ;
	// plot support vectors
	if ( plotSupportVectors ) {
		Mat plot_sv ( size , size , CV_8UC3 ) ;
		plot_sv.setTo ( Scalar (255.0 ,255.0 ,255.0) ) ;
		int svec_count = svm.get_support_vector_count() ;
		for ( int vecNum = 0; vecNum < svec_count ; vecNum ++) {
			const float * vec = svm.get_support_vector( vecNum ) ;
			circle ( plot_sv , Point ( vec [0]* size , vec [1]* size ) , 3 , CV_RGB (0 , 0 , 0) ) ;
		}
		imshow ( " Support Vectors " , plot_sv ) ;
	}
}
Пример #4
0
/** 
 * @author     	JIA Pei
 * @version    	2009-10-04
 * @brief      	Training
 * @param      	data     		Input - input data
 * @param		categories		Input - column vector
 * @return		classification time cost
*/
void CClassificationAlgs::Training(const Mat_<float>& data, const Mat_<int>& categories)
{
	unsigned int NbOfSamples = data.rows;
	set<int> ClassSet;
	for(int i = 0; i < categories.rows; i++)
	{
		ClassSet.insert(categories(i, 0));
	}
	this->m_iNbOfCategories = ClassSet.size();
	
	switch(this->m_iClassificationMethod)
	{
		case CClassificationAlgs::DecisionTree:
			this->m_CVDtree.train( 	data,
									CV_ROW_SAMPLE,
									categories,
									Mat(),
									Mat(),
									Mat(),
									Mat(),
									CvDTreeParams( INT_MAX, 2, 0, false, this->m_iNbOfCategories, 0, false, false, 0 ) );
		break;
		case CClassificationAlgs::Boost:
		    this->m_CVBoost.train( 	data,
									CV_ROW_SAMPLE,
									categories,
									Mat(),
									Mat(),
									Mat(),
									Mat(),
									CvBoostParams(CvBoost::DISCRETE, 50, 0.95, INT_MAX, false, 0),
									false );
		break;
		case CClassificationAlgs::RandomForest:
			this->m_CVRTrees.train( data, 
									CV_ROW_SAMPLE,
									categories,
									Mat(),
									Mat(),
									Mat(),
									Mat(),
									CvRTParams( INT_MAX, 2, 0, false, this->m_iNbOfCategories, 0, true, 0, 100, 0, CV_TERMCRIT_ITER ) );
		break;
		case CClassificationAlgs::ExtremeRandomForest:
			this->m_CVERTrees.train(data,
									CV_ROW_SAMPLE,
									categories,
									Mat(),
									Mat(),
									Mat(),
									Mat(),
									CvRTParams( INT_MAX, 2, 0, false, this->m_iNbOfCategories, 0, true, 0, 100, 0, CV_TERMCRIT_ITER ) );
		break;
		case CClassificationAlgs::SVM:
			this->m_CVSVM.train(	data,
									categories,
									Mat(),
									Mat(),
									CvSVMParams(CvSVM::C_SVC, CvSVM::RBF,
									0, 1, 0,
									1, 0, 0,
									NULL, cvTermCriteria(CV_TERMCRIT_ITER, 1000, 1E-6) ) );
		break;
	}
}
Пример #5
0
int main()
{
    const int train_sample_count = 300;
    bool is_regression = false;

    const char* filename = "data/waveform.data";
    int response_idx = 21;

    CvMLData data;

    CvTrainTestSplit spl( train_sample_count );
    
    if(data.read_csv(filename) != 0)
    {
        printf("couldn't read %s\n", filename);
        exit(0);
    }

    data.set_response_idx(response_idx);
    data.change_var_type(response_idx, CV_VAR_CATEGORICAL);
    data.set_train_test_split( &spl );

    const CvMat* values = data.get_values();
    const CvMat* response = data.get_responses();
    const CvMat* missing = data.get_missing();
    const CvMat* var_types = data.get_var_types();
    const CvMat* train_sidx = data.get_train_sample_idx();
    const CvMat* var_idx = data.get_var_idx();
    CvMat*response_map;
    CvMat*ordered_response = cv_preprocess_categories(response, var_idx, response->rows, &response_map, NULL);
    int num_classes = response_map->cols;
    
    CvDTree dtree;
    printf("======DTREE=====\n");
    CvDTreeParams cvd_params( 10, 1, 0, false, 16, 0, false, false, 0);
    dtree.train( &data, cvd_params);
    print_result( dtree.calc_error( &data, CV_TRAIN_ERROR), dtree.calc_error( &data, CV_TEST_ERROR ), dtree.get_var_importance() );

#if 0
    /* boosted trees are only implemented for two classes */
    printf("======BOOST=====\n");
    CvBoost boost;
    boost.train( &data, CvBoostParams(CvBoost::DISCRETE, 100, 0.95, 2, false, 0));
    print_result( boost.calc_error( &data, CV_TRAIN_ERROR ), boost.calc_error( &data, CV_TEST_ERROR), 0 );
#endif

    printf("======RTREES=====\n");
    CvRTrees rtrees;
    rtrees.train( &data, CvRTParams( 10, 2, 0, false, 16, 0, true, 0, 100, 0, CV_TERMCRIT_ITER ));
    print_result( rtrees.calc_error( &data, CV_TRAIN_ERROR), rtrees.calc_error( &data, CV_TEST_ERROR ), rtrees.get_var_importance() );

    printf("======ERTREES=====\n");
    CvERTrees ertrees;
    ertrees.train( &data, CvRTParams( 10, 2, 0, false, 16, 0, true, 0, 100, 0, CV_TERMCRIT_ITER ));
    print_result( ertrees.calc_error( &data, CV_TRAIN_ERROR), ertrees.calc_error( &data, CV_TEST_ERROR ), ertrees.get_var_importance() );

    printf("======GBTREES=====\n");
    CvGBTrees gbtrees;
    CvGBTreesParams gbparams;
    gbparams.loss_function_type = CvGBTrees::DEVIANCE_LOSS; // classification, not regression
    gbtrees.train( &data, gbparams);
    
    //gbt_print_error(&gbtrees, values, response, response_idx, train_sidx);
    print_result( gbtrees.calc_error( &data, CV_TRAIN_ERROR), gbtrees.calc_error( &data, CV_TEST_ERROR ), 0);

    printf("======KNEAREST=====\n");
    CvKNearest knearest;
    //bool CvKNearest::train( const Mat& _train_data, const Mat& _responses,
    //                const Mat& _sample_idx, bool _is_regression,
    //                int _max_k, bool _update_base )
    bool is_classifier = var_types->data.ptr[var_types->cols-1] == CV_VAR_CATEGORICAL;
    assert(is_classifier);
    int max_k = 10;
    knearest.train(values, response, train_sidx, is_regression, max_k, false);

    CvMat* new_response = cvCreateMat(response->rows, 1, values->type);
    //print_types();

    //const CvMat* train_sidx = data.get_train_sample_idx();
    knearest.find_nearest(values, max_k, new_response, 0, 0, 0);

    print_result(knearest_calc_error(values, response, new_response, train_sidx, is_regression, CV_TRAIN_ERROR),
                 knearest_calc_error(values, response, new_response, train_sidx, is_regression, CV_TEST_ERROR), 0);

    printf("======== RBF SVM =======\n");
    //printf("indexes: %d / %d, responses: %d\n", train_sidx->cols, var_idx->cols, values->rows);
    CvMySVM svm1;
    CvSVMParams params1 = CvSVMParams(CvSVM::C_SVC, CvSVM::RBF,
                                     /*degree*/0, /*gamma*/1, /*coef0*/0, /*C*/1,
                                     /*nu*/0, /*p*/0, /*class_weights*/0,
                                     cvTermCriteria(CV_TERMCRIT_ITER+CV_TERMCRIT_EPS, 1000, FLT_EPSILON));
    //svm1.train(values, response, train_sidx, var_idx, params1);
    svm1.train_auto(values, response, var_idx, train_sidx, params1);
    svm_print_error(&svm1, values, response, response_idx, train_sidx);

    printf("======== Linear SVM =======\n");
    CvMySVM svm2;
    CvSVMParams params2 = CvSVMParams(CvSVM::C_SVC, CvSVM::LINEAR,
                                     /*degree*/0, /*gamma*/1, /*coef0*/0, /*C*/1,
                                     /*nu*/0, /*p*/0, /*class_weights*/0,
                                     cvTermCriteria(CV_TERMCRIT_ITER+CV_TERMCRIT_EPS, 1000, FLT_EPSILON));
    //svm2.train(values, response, train_sidx, var_idx, params2);
    svm2.train_auto(values, response, var_idx, train_sidx, params2);
    svm_print_error(&svm2, values, response, response_idx, train_sidx);

    printf("======NEURONAL NETWORK=====\n");

    int num_layers = 3;
    CvMat layers = cvMat(1, num_layers, CV_32SC1, calloc(1, sizeof(double)*num_layers*1));
    cvmSetI(&layers, 0, 0, values->cols-1);
    cvmSetI(&layers, 0, 1, num_classes);
    cvmSetI(&layers, 0, 2, num_classes);
    CvANN_MLP ann(&layers, CvANN_MLP::SIGMOID_SYM, 0.0, 0.0);
    CvANN_MLP_TrainParams ann_params;
    //ann_params.train_method = CvANN_MLP_TrainParams::BACKPROP;
    CvMat ann_response = cvmat_make_boolean_class_columns(response, num_classes);

    CvMat values2 = cvmat_remove_column(values, response_idx);
    ann.train(&values2, &ann_response, NULL, train_sidx, ann_params, 0x0000);
    //ann.train(values, &ann_response, NULL, train_sidx, ann_params, 0x0000);

    ann_print_error(&ann, values, num_classes, &ann_response, response, response_idx, train_sidx);

#if 0 /* slow */

    printf("======== Polygonal SVM =======\n");
    //printf("indexes: %d / %d, responses: %d\n", train_sidx->cols, var_idx->cols, values->rows);
    CvMySVM svm3;
    CvSVMParams params3 = CvSVMParams(CvSVM::C_SVC, CvSVM::POLY,
                                     /*degree*/2, /*gamma*/1, /*coef0*/0, /*C*/1,
                                     /*nu*/0, /*p*/0, /*class_weights*/0,
                                     cvTermCriteria(CV_TERMCRIT_ITER+CV_TERMCRIT_EPS, 1000, FLT_EPSILON));
    //svm3.train(values, response, train_sidx, var_idx, params3);
    svm3.train_auto(values, response, var_idx, train_sidx, params3);
    svm_print_error(&svm3, values, response, response_idx, train_sidx);
#endif

    return 0;
}
CvSVM* HoGProcessor::trainSVM(CvMat* pos_mat, CvMat* neg_mat, char *savexml, char *pos_file, char *neg_file) 
{    
	/* Read the feature vectors for positive samples */
	if (pos_file != NULL) 
	{
		printf("positive loading...\n");
		pos_mat = (CvMat*) cvLoad(pos_file);
		printf("positive loaded\n");
	}

	/* Read the feature vectors for negative samples */
	if (neg_file != NULL)
	{
		neg_mat = (CvMat*) cvLoad(neg_file);
		printf("negative loaded\n");
	}

	int n_positive, n_negative;
	n_positive = pos_mat->rows;
	n_negative = neg_mat->rows;
	int feature_vector_length = pos_mat->cols;
	int total_samples;
	
	total_samples = n_positive + n_negative;

	CvMat* trainData = cvCreateMat(total_samples, feature_vector_length, CV_32FC1);

	CvMat* trainClasses = cvCreateMat(total_samples, 1, CV_32FC1 );

	CvMat trainData1, trainData2, trainClasses1, trainClasses2;

	printf("Number of positive Samples : %d\n",
	pos_mat->rows);

	/*Copy the positive feature vectors to training
	data*/
	cvGetRows(trainData, &trainData1, 0, n_positive);
	cvCopy(pos_mat, &trainData1);
	cvReleaseMat(&pos_mat);

	/*Copy the negative feature vectors to training
	data*/
	cvGetRows(trainData, &trainData2, n_positive,total_samples);

	cvCopy(neg_mat, &trainData2);
	cvReleaseMat(&neg_mat);

	printf("Number of negative Samples : %d\n",	trainData2.rows);

	/*Form the training classes for positive and
	negative samples. Positive samples belong to class
	1 and negative samples belong to class 2 */
	cvGetRows(trainClasses, &trainClasses1, 0, n_positive);
	cvSet(&trainClasses1, cvScalar(1));

	cvGetRows(trainClasses, &trainClasses2, n_positive,	total_samples);
	cvSet(&trainClasses2, cvScalar(2));

	/* Train a linear support vector machine to learn from
	the training data. The parameters may played and
	experimented with to see their effects*/
	
	/*
	CvMat* class_weight = cvCreateMat(1, 1, CV_32FC1);
	(*(float*)CV_MAT_ELEM_PTR(*class_weight, 0, 0)) = 0;
	//(*(float*)CV_MAT_ELEM_PTR(*class_weight, 0, 1)) = 10;
	//(*(float*)CV_MAT_ELEM_PTR(*class_weight, 1, 0)) = 100;
	//(*(float*)CV_MAT_ELEM_PTR(*class_weight, 1, 1)) = 0;
*/

	CvSVM* svm = new CvSVM(trainData, trainClasses, 0, 0,
	CvSVMParams(CvSVM::C_SVC, CvSVM::LINEAR, 0, 0, 0, 2,
	0, 0, 0, cvTermCriteria(CV_TERMCRIT_EPS,0, 0.01)));	

	printf("SVM Training Complete!!\n");

	/*Save the learnt model*/
	if (savexml != NULL) 
	{
		svm->save(savexml);
	}

	cvReleaseMat(&trainClasses);
	cvReleaseMat(&trainData);

	return svm;
}