Ejemplo n.º 1
0
void LibSVMRunner::arma_prediction(SVMConfiguration& config) {
	struct svm_model* m;
	struct svm_node ** train;
	svm_parameter *params;
	int training_examples = config.getDataExamplesNumber();

	params = configuration_to_problem(config);
	m = load_model_from_config(config, params);

//	TODO: READ MODEL FROM PARAMETERS
	if(config.isSparse()) {
        train = ArmaSpMatToSvmNode(config.sparse_data);
	} else {
		train = armatlib(config.data);
	}
	double* ret = Malloc(double, training_examples);

	for (int i = 0; i < training_examples; i++)
		ret[i] = svm_predict(m, train[i],config.log);

	arma::vec ret_vec(ret, training_examples);
	config.result = ret_vec;
	/* TODO: CLEAN MEMORY IN BETTER WAY THINK OF OTHER PARAMETERS
	 * Clean memory:
	 * -array matrix
	 * -model
	 */
	for (int i = 0; i < training_examples; i++)
		free(train[i]);
	free(train);
	//TODO: THIS SHOULD WORK WITH PREDICTIONS 2X, now it's not working
//	svm_free_and_destroy_model(&m);
	svm_destroy_param(params,config.log);
	free(ret);
}
Ejemplo n.º 2
0
double cv(struct svm_parameter *param,struct svm_problem *prob){
	struct svm_problem prob2;
	struct svm_model *model;
	int i,j;
	double predict_label,target_label;
	prob2.l=prob->l;
	prob2.y = Malloc(double,prob->l);
	prob2.x = Malloc(struct svm_node *,prob->l);
	double error = 0;
	int total = 0;

	prob2.l--;


	for(i=0;i<prob->l;i++){
		for(j=0;j<i;j++){
			prob2.x[j]=prob->x[j];
			prob2.y[j]=prob->y[j];
		}
		for(;j<prob2.l;j++){
			prob2.x[j]=prob->x[j+1];
			prob2.y[j]=prob->y[j+1];
		}
		model = svm_train(&prob2,param);		
		predict_label = svm_predict(model,prob->x[i]);
		svm_free_and_destroy_model(&model);
		
		printf("predict_label:%lf prob->y[i]:%lf\n",predict_label,prob->y[i]);
		target_label=prob->y[i];
		error += (predict_label-target_label)*(predict_label-target_label);
		++total;
	}
	return error/(double)total;
	
}
Ejemplo n.º 3
0
double svm_test_acc(const std::vector<feature_t> &feats, const std::vector<int> &labels, const svm_model * model)
{
	double success = 0;
	assert(feats.size() == labels.size());
	int length = labels.size();
	for(int i = 0; i < length; i++)
	{
		int len = 0;
		for(auto k : feats[i])
		{
			if(k != 0)
				len++;
		}
		svm_node * x = (svm_node *)malloc(sizeof(svm_node) * (len+1));
		int idx = 0;
		for(auto k : feats[i])
			if(k != 0)
			{
				x[idx].value = k;
				x[idx].index = idx + 1; 
				idx++;
			}
		x[len].index = -1;
		auto y = svm_predict(model, x);
		if(y == (double)labels[i])
			success++;
		free(x);
	}
	return success / length;
}
Ejemplo n.º 4
0
double SVM::predict(vector<double> x)
{
	svm_node *x_node;
	int count = 0;
	for (int i = 0; i < x.size(); i++)
	{
		if (x[i] > 10e6 || x[i] < 10e-6)
			count++;
	}
	x_node = Malloc(svm_node, count+1);
	int index = 0;
	for (int i = 0; i < x.size(); i++)
	{
		if (x[i] > 10e6 || x[i] < 10e-6)
		{
			x_node[index].index = i+1;
			x_node[index].value = x[i];
			index++;
		}
	}
	x_node[index].index = -1;

	double value = svm_predict(model, x_node);

	return value;
}
int SupportVectorMachine::predict_age(Abalone a, svm_model *model)
{
	svm_node *nodes = new svm_node[9];
	svm_node node1; node1.index = 1; node1.value = a.get_Sex();
	nodes[0] = node1;
	svm_node node2; node2.index = 2; node2.value = a.get_Diameter();
	nodes[1] = node2;
	svm_node node3; node3.index = 3; node3.value = a.get_Height();
	nodes[2] = node3;
	svm_node node4; node4.index = 4; node4.value = a.get_Length();
	nodes[3] = node4;
	svm_node node5; node5.index = 5; node5.value = a.get_Shell_weight();
	nodes[4] = node5;
	svm_node node6; node6.index = 6; node6.value = a.get_Shucked_weight();
	nodes[5] = node6;
	svm_node node7; node7.index = 7; node7.value = a.get_Viscera_weight();
	nodes[6] = node7;
	svm_node node8; node8.index = 8; node8.value = a.get_Whole_weight();
	nodes[7] = node8;
	svm_node node9; node9.index = -1; node9.value = '?';
	nodes[8] = node9;
	double result = svm_predict(model, nodes);
	//delete[] nodes;
	return result;
}
bool SVM::predictSVM(VectorDouble &inputVector){

		if( !trained || inputVector.size() != numInputDimensions ) return false;

		svm_node *x = NULL;

		//Copy the input data into the SVM format
		x = new svm_node[numInputDimensions+1];
		for(UINT j=0; j<numInputDimensions; j++){
			x[j].index = (int)j+1;
			x[j].value = inputVector[j];
		}
		//The last value in the input vector must be set to -1
		x[numInputDimensions].index = -1;
		x[numInputDimensions].value = 0;

		//Scale the input data if required
		if( useScaling ){
			for(UINT i=0; i<numInputDimensions; i++)
				x[i].value = scale(x[i].value,ranges[i].minValue,ranges[i].maxValue,SVM_MIN_SCALE_RANGE,SVM_MAX_SCALE_RANGE);
		}

		//Perform the SVM prediction
		double predict_label = svm_predict(model,x);

        //We can't do null rejection without the probabilities, so just set the predicted class
        predictedClassLabel = (UINT)predict_label;

		//Clean up the memory
		delete[] x;

		return true;
}
Ejemplo n.º 7
0
/**
 * @brief Predict with SVM for a given vector.
 * @param type Type of feature vector: Type of svm-model (1, 2, ...)
 * @param vec Feature vector for the SVM.
 * @param prob Probability of correct prediction for each class.
 * @return Returns the prediction label
 */
bool SVMPredictorSingle::process(int type, std::vector<double> &vec, std::vector<double> &prob)
{
  int svm_type = svm_get_svm_type(model);
  int nr_class = svm_get_nr_class(model);
  double *prob_estimates = NULL;
  int j;

  if(predict_probability) {
    if (svm_type == NU_SVR || svm_type == EPSILON_SVR)
      printf("Prob. model for test data: target value = predicted value + z,\nz: Laplace distribution e^(-|z|/sigma)/(2sigma),sigma=%g\n", svm_get_svr_probability(model));
    else
      prob_estimates = (double *) malloc(nr_class*sizeof(double));
  }

  // we copy now the feature vector
  double predict_label;
  for(unsigned idx = 0; idx < vec.size(); idx++) {
    node[idx].index = idx+1;
    node[idx].value = vec[idx];
    node[idx+1].index = -1;
  }

  if (predict_probability && (svm_type==C_SVC || svm_type==NU_SVC)) {
    predict_label = svm_predict_probability(model, node, prob_estimates);
    for(j=0;j<nr_class;j++)
      prob.push_back(prob_estimates[j]);
  }
  else
    predict_label = svm_predict(model, node);

  if(predict_probability)
    free(prob_estimates);
  
  return predict_label;
}
Ejemplo n.º 8
0
Archivo: svmrbf.cpp Proyecto: infnty/ys
    virtual void Classify(IDataSet* data) const {
        vector< vector<double> > features = PreprocessFeatures(data);
        svm_problem_xx test_prob(features, vector<int>(data->GetObjectCount(), 0));

        for (int i = 0; i < data->GetObjectCount(); i++)
            data->SetTarget(i, svm_predict(model->model, test_prob.x[i]));
    }
Ejemplo n.º 9
0
bool SvmClassifier::Predict(const Mat &feats, vector<int> *labels,
        vector<float> *probs) const
{
    if (labels == nullptr)
    {
        return false;
    }

    int m = feats.cols;
    int n = feats.rows;
    labels->clear();
    if (probs != nullptr)
    {
        probs->clear();
    }

    // Normalize the features
    Mat feats_norm;
    Normalize(feats, &feats_norm);

    // Predict using SVM
    svm_node *x = static_cast<svm_node*>(malloc((m + 1) * sizeof(svm_node)));
    for (int i = 0; i < n; ++i)
    {
        for (int j = 0; j < m; ++j)
        {
            x[j].index = j + 1;
            x[j].value = feats_norm.at<float>(i, j);
        }
        x[m].index = -1;

        if (probs == nullptr)
        {
            labels->push_back(svm_predict(svm_model_, x));
        }
        else
        {
            double *prob = static_cast<double*>(
                    malloc(svm_model_->nr_class * sizeof(double)));
            int label = svm_predict_probability(svm_model_, x, prob);
            labels->push_back(label);
            int idx = 0;
            for (int k = 0; k < svm_model_->nr_class; ++k)
            {
                if (label == svm_model_->label[k])
                {
                    idx = k;
                    break;
                }
            }
            probs->push_back(prob[idx]);
            delete prob;
        }
    }
    free(x);

    return true;
}
Ejemplo n.º 10
0
//for real time prediction
int svm_rt_predict(Pair* p, int size){
	for(int i=0; i< size; i++){
		x[i].index = p[i].index;
		x[i].value = p[i].value;
	}
	x[size].index = -1; //last one
	int predict_label = svm_predict(imp_model,x);
	return predict_label;
}
Ejemplo n.º 11
0
int gcm::patchRun(vector<SLR_ST_Skeleton> vSkeletonData, vector<Mat> vDepthData, vector<IplImage*> vColorData, 
				  int *rankIndex, double *rankScore)
{
	int kernelFeatureDim = NClass*NTrainSample;
	//clock_t startT=clock();
	oriData2Feature(vSkeletonData, vDepthData, vColorData);
	//cout<<"=======Time========="<<clock()-startT<<endl;
 	gcmSubspace();

	x[0].index = 0;
	for (int j=0; j<kernelFeatureDim; j++)
	{
		subMatrix(subFeaAll_model, subFea1, 0, featureDim, j*subSpaceDim, subSpaceDim);
		x[j+1].value = myGcmKernel.Frobenius(subFea1, gcm_subspace, featureDim, subSpaceDim);
		x[j+1].index=j+1;
	}
	x[kernelFeatureDim+1].index=-1;

	//int testID = svm_predict_probability(myModel, x, prob_estimates);
	int testID_noPro = svm_predict(myModel, x);

	int testID = svm_predict_probability(myModel_candi, x, prob_estimates);

	//Sort and get the former 5 ranks. 
	vector<scoreAndIndex> rank;
	for (int i=0; i<myModel->nr_class; i++)
	{
		scoreAndIndex temp;
		temp.index = myModel->label[i];
		temp.score = prob_estimates[i];
		rank.push_back(temp);
	}
	sort(rank.begin(),rank.end(),comp);

	
	rankIndex[0] = testID_noPro;
	rankScore[0] = 1.0;
	int candiN = 0;
	//for (int i=1; i<5; i++)
	int seqCandiN = 1;
	while(seqCandiN<5)
	{
		if (rank[candiN].index == testID_noPro)
		{
			candiN++;
			continue;
		}
		rankIndex[seqCandiN] = rank[candiN].index;
		rankScore[seqCandiN] = rank[candiN].score;
		candiN++;
		seqCandiN++;
	}
	releaseResource();
	return rankIndex[0];
}
Ejemplo n.º 12
0
double svm_test_acc(const svm_problem * prob, const svm_model * model)
{
	double success = 0;
	for(int i = 0; i < prob->l; i++)
	{
		double y = svm_predict(model, prob->x[i]);
		if(y == prob->y[i])
			success++;
	}
	success /= prob->l;
	return success;
}
Ejemplo n.º 13
0
	int Predict( const std::vector<T>& x ) {
		int dimension = scale_info_.dimension();
		assert( static_cast<int>( x.size() ) == dimension );
		std::vector<double> scaled_x;
		scale_info_.Scale( x, scaled_x );
		struct svm_node* nodes;
		nodes = (struct svm_node *) malloc( ( dimension + 1 ) * sizeof(struct svm_node) );
		GetSVMNodes( scaled_x, nodes );
		double predict_label = svm_predict( libsvm_model_, nodes );
		free( nodes );
		return static_cast<int>( predict_label );
	}
string ImgProcessing::classifyImg(svm_node* vector){

	double res = svm_predict(ImgProcessing::model, vector);
	string str;

	if (res > 0 && res < NB_CLASS)
		str = classLbls[(unsigned int) res];
	else
		str = "No matching found, there's an error somewhere";

	return str;
}
int predict(double **values, int **indices, int rowNum, int *arrayNcol, int *labels, double **prob_estimates, int isProb)
{
    int svm_type=svm_get_svm_type(model);
    int nr_class=svm_get_nr_class(model);
    int j;
    LOGD("isProb:%d", isProb);
    if(isProb)
    {
        if (svm_type==NU_SVR || svm_type==EPSILON_SVR)
            LOGD("Prob. model for test data: target value = predicted value + z,\nz: Laplace distribution e^(-|z|/sigma)/(2sigma),sigma=%g\n",svm_get_svr_probability(model));
        else
        {
            int *labels=(int *) malloc(nr_class*sizeof(int));
            svm_get_labels(model,labels);
            //	fprintf(output,"labels");
            //	for(j=0;j<nr_class;j++)
            //		fprintf(output," %d",labels[j]);
            //	fprintf(output,"\n");
            //	free(labels);
        }
    }

    // each record will receive
    // a predicted label and a [nClass]-D probability estimate array
    for (int i = 0; i < rowNum; i++)
    {
        int nCol = arrayNcol[i];
        double target_label = 0;
        int predict_label=0;
        x = (struct svm_node *) realloc(x,(nCol+1)*sizeof(struct svm_node));

        for (int j = 0; j < nCol; j++)
        {
            x[j].index = indices[i][j];
            x[j].value = values[i][j];
        }
        x[nCol].index = -1;

        // Probability prediction
        if (isProb && (svm_type==C_SVC || svm_type==NU_SVC))
        {
            // prob_estimate[rowNum][nClass]
            labels[i] = svm_predict_probability(model,x,prob_estimates[i]);
        }
        // without probability
        else
        {
            labels[i] = svm_predict(model,x);
        }
    } // For

    return 0;
}
Ejemplo n.º 16
0
bool racewalk_svm_predict(u_char *data){
    int ret = racewalk_count_frequency(data, insn_freq);
    if( ret != 0)
        return false;

    racewalk_svm_scale(insn_freq);
    for(int j = 0; j < NUM_INSTRUCTION_TYPES; j++){
        node[j].index = j + 1;
        node[j].value = insn_freq[j];
    }
    node[NUM_INSTRUCTION_TYPES].index = -1;
    int predict_label = svm_predict(model, node);

    return predict_label == LABEL_SLED;
}
Ejemplo n.º 17
0
void CSvmModel::Predict( const REAL* prInputs, REAL* prOutputs )
{
	//write the inputs into a temporary test file
/*	{
		ofstream ofg(TEST_FILE);
		vector<REAL> vcInputs( prInputs, prInputs+GetInputs() );
		vector<REAL> vcOutputs;
		TransformSvmLine( ofg, vcInputs, vcOutputs, 0 );
	}*/

	//predict for each model and put each output into prOutputs[i]
	for( int i=0; i<GetOutputs(); i++ ){
		svm_predict( m_vcModels[i], prInputs, GetInputs(), &prOutputs[i] );
//		RunSvmPredict( TEST_FILE, m_vcModelFiles[i], prOutputs[i] );
	}
}
Ejemplo n.º 18
0
 string SVMClassifier::classifyPoint(const std::vector<double> point)
 {
     //Copy the point to be classified into an svm_node
     int dims = point.size();
     svm_node* test_pt = new svm_node[dims+1];
     for(int i=0; i<dims; i++){
         test_pt[i].index = i;
         //Scale the point using the training scaling values
         test_pt[i].value = (point[i]-scaling_factors[i][0]) / scaling_factors[i][1];
     }
     test_pt[dims].index = -1;
     
     //Classify the point using the currently trained SVM
     int label_n = svm_predict(trained_model, test_pt);
     return label_int_to_str[label_n];
 }
Ejemplo n.º 19
0
    //predict the expected value using the trained svm and the input
    double ML2::predictML( double velocity[])
    {
	    double error = 0;
	    
        //get info about svm using the model
	    int svm_type=svm_get_svm_type(model);
	    int nr_class=svm_get_nr_class(model);
	    double *prob_estimates=NULL;
	    int j;
        int predict_probability = param.probability;

	cout<<"svm type : "<<svm_type<<endl<<flush;
	
	cout<<"nr class : "<<nr_class<<endl<<flush;
	
	cout<<"predict probability : "<<predict_probability<<endl<<flush;
	    if(predict_probability)
	    {
		    if (svm_type==NU_SVR || svm_type==EPSILON_SVR)
			    cout<<"Prob. model for test data: target value = predicted value + z,\nz: Laplace distribution e^(-|z|/sigma)/(2sigma),sigma="<<svm_get_svr_probability(model)<<endl<<flush;
	
	    }
        
	    int i = 0;
	    double target_label, predict_label;
	    //allocate space for x
        int max_nr_attr = datacols + 1;
         x = Malloc(struct svm_node,max_nr_attr);
        //store each of the velocity parameter
        for( i = 0 ; i < datacols; i++){
           x[i].index = i+1;
	       x[i].value = velocity[i];
        }
        
        //end of the x
	    x[i].index = -1;
        //predict the value
	    predict_label = svm_predict(model,x);
        //free x
        free(x);
	    cout<<"prediction "<<predict_label <<endl<<flush;

	    if(predict_probability)
		    free(prob_estimates);

        return predict_label;
    }
Ejemplo n.º 20
0
double SVM::classify(struct svm_node *data, char* filename_model)
{
	double predict_label;
	double *prob_estimates = NULL;
	int svm_type;
	int nr_class;

	// load feature file only once
	if(!this->_bFeatureFileLoaded)
	{
		if((this->_svmModel = svm_load_model(filename_model))==0)
		{
			printf("can't open model file %s\n",filename_model);
			return -99;
		}

		this->_bFeatureFileLoaded = true;
	}

	svm_type = svm_get_svm_type(this->_svmModel);
	nr_class = svm_get_nr_class(this->_svmModel);

	prob_estimates = (double *) malloc(nr_class*sizeof(double));

	if (svm_type==C_SVC || svm_type==NU_SVC)
	{
		predict_label = svm_predict_probability(this->_svmModel,data,prob_estimates);

	//	printf("%g",predict_label);

		//for(int k=0; k < nr_class; k++)
			//printf(" %g",prob_estimates[k]);

		//printf("\n");
	}
	else
	{
		predict_label = svm_predict(this->_svmModel, data);

		printf("%g\n",predict_label);
	}

	free(data);
	free(prob_estimates);

	return predict_label;
}
Ejemplo n.º 21
0
int predict(float **values, int **indices, int rowNum, int colNum, int *labels, double *prob_estimates, int isProb)
{
	int svm_type=svm_get_svm_type(model);
	int nr_class=svm_get_nr_class(model);
	int j;
	
	if(isProb)
	{
		if (svm_type==NU_SVR || svm_type==EPSILON_SVR)
			LOGD("Prob. model for test data: target value = predicted value + z,\nz: Laplace distribution e^(-|z|/sigma)/(2sigma),sigma=%g\n",svm_get_svr_probability(model));
		else
		{
			int *labels=(int *) malloc(nr_class*sizeof(int));
			svm_get_labels(model,labels);
		//	fprintf(output,"labels");		
		//	for(j=0;j<nr_class;j++)
		//		fprintf(output," %d",labels[j]);
		//	fprintf(output,"\n");
		//	free(labels);
		}
	}

        for (int i = 0; i < rowNum; i++)
        {
            double target_label, predict_label=0;
            x = (struct svm_node *) realloc(x,(colNum+1)*sizeof(struct svm_node));

            for (int j = 0; j < colNum; j++)
            {
                x[j].index = indices[i][j];
                x[j].value = values[i][j];
            }
            x[colNum].index = -1;

            // Probability prediction 
            if (isProb && (svm_type==C_SVC || svm_type==NU_SVC))
            {
                    predict_label = svm_predict_probability(model,x,prob_estimates);
					labels[0]=predict_label;

            }
            else { labels[i] = svm_predict(model,x); }
	} // For

        return 0;
}
Ejemplo n.º 22
0
Label SVMClassifier::classify(const Descriptor &descriptor) const{
 svm_node *nodes = constructNode(descriptor);
  //print::print_svm_nodes(nodes, descriptor.size());
  double   result = svm_predict(model, nodes);
  //vector<double> value_per_class = getValues(nodes, model);
  /*
  typedef Descriptor::const_iterator desit;
  cout << "descriptor: ";
  for(desit i = descriptor.begin(); i != descriptor.end(); ++i){
    if((i - descriptor.begin()) % 10 == 0)
      cout << endl;
    cout << *i << " ";
  }
  cout << endl << "result: " << result << endl;
  */
  delete [] nodes;
  return result;
}
/*
 * Predict using model.
 *
 *  It will return -1 if we run out of memory.
 */
int copy_predict(char *predict, struct svm_model *model, npy_intp *predict_dims,
                 char *dec_values)
{
    double *t = (double *) dec_values;
    struct svm_node *predict_nodes;
    npy_intp i;

    predict_nodes = dense_to_libsvm((double *) predict, predict_dims);

    if (predict_nodes == NULL)
        return -1;
    for(i=0; i<predict_dims[0]; ++i) {
        *t = svm_predict(model, &predict_nodes[i]);
        ++t;
    }
    free(predict_nodes);
    return 0;
}
Ejemplo n.º 24
0
Real32 predict_sample(const char *test_sample_name) {
	Word16 correct = 0;
	FILE *input = fopen(test_sample_name, "r");
	Word16 i = 0, j = 0;
	Word16 n = -1;
	fscanf(input, "%hd", &n);
	Real32 temp;
	printf("{\n");
	for (i = 0; i < n; i++) {
		Word16 label = -1;
		fscanf(input, "%hd", &label);
		// if (i < n - 1) {
		// 	printf("%hd,", label);
		// }
		// else {
		// 	printf("%hd", label);
		// }
		
		//printf("{");
		for (j = 0; j < NR_FEATURE; j++) {
			fscanf(input, FORMAT, &temp);
			//printf("%lf\n", temp);
			test_sample.data[j].value = round_real(temp);
			test_sample.data[j].index = (j + 1);

			// if (j < NR_FEATURE - 1) {
			// 	printf("%hd,", test_sample.data[j].value);
			// }
			// else {
			// 	printf("%hd", test_sample.data[j].value);
			// }
		}
		//printf("},\n");
		Word16 predict = svm_predict(test_sample);
		//printf("%d\n", predict);
		if (predict == label) {
			correct++;
		}
	}
	printf("}\n");
	fclose(input);
	printf("%hd %hd\n", correct, n);
	return (Real32)correct / (Real32)n;
}
Ejemplo n.º 25
0
int svm_classify (svm_classifier_t *svm, mx_real_t *instance) {
    int i;
    mx_real_t best_class;
    struct svm_node *x;

    x = (struct svm_node *) rs_malloc((svm->feature_dim+1)*sizeof(struct svm_node),"Feature vector representation for svm");
    for (i=0;i<svm->feature_dim;i++) {
	x[i].index=i+1;
	x[i].value=instance[i];
    }
    x[i].index=-1;

    _scale_instance(&x,svm->feature_dim,svm->max,svm->min);
    
    best_class = svm_predict(svm->model,x);
    rs_free(x);
			
    return (int) best_class;
}
Ejemplo n.º 26
0
bool CmySvmArth::Sim(double* res , int& len)
{
	if(model==NULL||res==NULL)
		return false;
	int svm_type=svm_get_svm_type(model);
	int nr_class=svm_get_nr_class(model);
	double *prob_estimates=NULL;
	len = m_nSimDataLen;
	if (predict_probability && (svm_type==C_SVC || svm_type==NU_SVC))
	{
		prob_estimates = new double[nr_class];
		*res = svm_predict_probability(model,m_pTestdata,prob_estimates);
		delete prob_estimates;
	}
	else
	{
		*res = svm_predict(model,m_pTestdata);
	}
	return true;
}
Ejemplo n.º 27
0
		Image::Candidate::Assessments SvmOneVsAll::match(const Image::Candidate& query) const
		{
			LibSVM::NodeArray node_list = buildNodeArray(getDescriptor(query));
			LibSVM::scale(scaling, node_list);

			OmpStream(cout) << "matching query" << endl;
			
			Image::Candidate::Assessments assessments;
			for (const auto& model_group : models_by_name)
			{
				const auto& model = get<0>(model_group.second);
				double svm_out = svm_predict(model, node_list.getPtr());

				Image::Candidate::Assessment assessment;
				assessment.name = model_group.first;
				assessment.score = - svm_out;
				assessments.push_back(assessment);
			}
			return assessments;
		}
Ejemplo n.º 28
0
		Label PredictModel::CPredictModel::predict(const Array<std::pair<int32, double>>& vector) const
		{
			if (!m_model)
			{
				return Math::NaN;
			}

			Array<svm_node> node(vector.size() + 1);

			for (int32 i = 0; i < static_cast<int32>(vector.size()); ++i)
			{
				node[i].index = vector[i].first;

				node[i].value = vector[i].second;
			}

			node.back().index = -1;

			return svm_predict(m_model, node.data());
		}
Ejemplo n.º 29
0
static void predictAndCount(svm_model *model, WindowFile &file, int &nA, int &nB)
{
    const qint32 samples = getNumSamples(file);
    float *buf = new float[samples];
    SVMNodeList nodelist(samples);

    nA = nB = 0;

    while(file.nextChannel()) {
        assert(file.getEventSamples() == samples);
        file.read((char*)buf, samples*sizeof(float));
        nodelist.fill(buf);
        if(svm_predict(model, nodelist) > 0)
            ++nA;
        else
            ++nB;
    }

    delete[] buf;
    file.rewind();
}
Ejemplo n.º 30
0
int Clasificador::saida_svm(struct svm_model *svm, float *x, float *media, float *desv) 
{
	unsigned int  i;
	int y;
	struct svm_node  t[1 + N_ENTRADAS];

	for(i = 0; i < N_ENTRADAS; i++) {
		t[i].index = i; 
		if(desv[i]) {
			t[i].value = (x[i] - media[i])/desv[i];
			//cout<<x[i]<<" ";
		} else {
			t[i].value = x[i];
		}
	}
	//cout<<endl;
	t[N_ENTRADAS].index = -1;
	y = svm_predict(svm, t);

	return(y);
}