Esempio n. 1
0
/**
 * @brief Predict with SVM for a given vector.
 * @param type Type of feature vector: Type of svm-model (1, 2, ...)
 * @param vec Feature vector for the SVM.
 * @param prob Probability of correct prediction for each class.
 * @return Returns the prediction label
 */
bool SVMPredictorSingle::process(int type, std::vector<double> &vec, std::vector<double> &prob)
{
  int svm_type = svm_get_svm_type(model);
  int nr_class = svm_get_nr_class(model);
  double *prob_estimates = NULL;
  int j;

  if(predict_probability) {
    if (svm_type == NU_SVR || svm_type == EPSILON_SVR)
      printf("Prob. model for test data: target value = predicted value + z,\nz: Laplace distribution e^(-|z|/sigma)/(2sigma),sigma=%g\n", svm_get_svr_probability(model));
    else
      prob_estimates = (double *) malloc(nr_class*sizeof(double));
  }

  // we copy now the feature vector
  double predict_label;
  for(unsigned idx = 0; idx < vec.size(); idx++) {
    node[idx].index = idx+1;
    node[idx].value = vec[idx];
    node[idx+1].index = -1;
  }

  if (predict_probability && (svm_type==C_SVC || svm_type==NU_SVC)) {
    predict_label = svm_predict_probability(model, node, prob_estimates);
    for(j=0;j<nr_class;j++)
      prob.push_back(prob_estimates[j]);
  }
  else
    predict_label = svm_predict(model, node);

  if(predict_probability)
    free(prob_estimates);
  
  return predict_label;
}
  	virtual int C() {
#ifdef HAVE_LIBSVM
  		return svm_get_nr_class(model);
#else
  		return 0;
#endif	
	  }
// We want to compute the average accuracy across categories,
// i.e. avarage of diagonal entries in the confusion matrix
// This is a helper function for this purpose, by Jianxin Wu
int FindLabel(const int v,const int* labels)
{
	int i;
	for(i=0;i<svm_get_nr_class(model);i++)
		if(labels[i]==v)
			return i;
    printf("There are error(s) in the data (unknown category name).");
    exit(-1);
}
Esempio n. 4
0
int cLibsvmLiveSink::myFinaliseInstance()
{
  int ap=0;

  int ret = cDataSink::myFinaliseInstance();
  if (ret==0) return 0;
  
  // TODO: binary model files...
  // load model
  SMILE_MSG(2,"loading LibSVM model for instance '%s' ...",getInstName()); 
  if((model=svm_load_model(modelfile))==0) {
    COMP_ERR("can't open libSVM model file '%s'",modelfile);
  }

  nClasses = svm_get_nr_class(model);
  svmType = svm_get_svm_type(model);

  if(predictProbability) {
    if ((svmType==NU_SVR) || (svmType==EPSILON_SVR)) {
      nClasses = 0;
      SMILE_MSG(2,"LibSVM prob. model (regression) for test data: target value = predicted value + z,\nz: Laplace distribution e^(-|z|/sigma)/(2sigma),sigma=%g",svm_get_svr_probability(model));
    } else {
      labels=(int *) malloc(nClasses*sizeof(int));
      svm_get_labels(model,labels);

      SMILE_MSG(3,"LibSVM %i labels in model '%s':",nClasses,modelfile);
      int j;
      for(j=0;j<nClasses;j++)
        SMILE_MSG(3,"  Label[%i] : '%d'",j,labels[j]);
	}
  }

  //?? move this in front of above if() block ?
  if ((predictProbability)&&(nClasses>0))
    probEstimates = (double *) malloc(nClasses*sizeof(double));

  // load scale
  if((scale=svm_load_scale(scalefile))==0) {
	COMP_ERR("can't open libSVM scale file '%s'",scalefile);
  }

  // load selection
  loadSelection(fselection);

  //TODO: check compatibility of getLevelN() (possibly after selection), number of features in model, and scale
  
  if (nClasses>0) {
    // load class mapping
    loadClasses(classes);
  } else {
    if (classes != NULL) SMILE_IWRN(2,"not loading given class mapping file for regression SVR model (there are no classes...)!");
  }

  return ret;
}
int predict(double **values, int **indices, int rowNum, int *arrayNcol, int *labels, double **prob_estimates, int isProb)
{
    int svm_type=svm_get_svm_type(model);
    int nr_class=svm_get_nr_class(model);
    int j;
    LOGD("isProb:%d", isProb);
    if(isProb)
    {
        if (svm_type==NU_SVR || svm_type==EPSILON_SVR)
            LOGD("Prob. model for test data: target value = predicted value + z,\nz: Laplace distribution e^(-|z|/sigma)/(2sigma),sigma=%g\n",svm_get_svr_probability(model));
        else
        {
            int *labels=(int *) malloc(nr_class*sizeof(int));
            svm_get_labels(model,labels);
            //	fprintf(output,"labels");
            //	for(j=0;j<nr_class;j++)
            //		fprintf(output," %d",labels[j]);
            //	fprintf(output,"\n");
            //	free(labels);
        }
    }

    // each record will receive
    // a predicted label and a [nClass]-D probability estimate array
    for (int i = 0; i < rowNum; i++)
    {
        int nCol = arrayNcol[i];
        double target_label = 0;
        int predict_label=0;
        x = (struct svm_node *) realloc(x,(nCol+1)*sizeof(struct svm_node));

        for (int j = 0; j < nCol; j++)
        {
            x[j].index = indices[i][j];
            x[j].value = values[i][j];
        }
        x[nCol].index = -1;

        // Probability prediction
        if (isProb && (svm_type==C_SVC || svm_type==NU_SVC))
        {
            // prob_estimate[rowNum][nClass]
            labels[i] = svm_predict_probability(model,x,prob_estimates[i]);
        }
        // without probability
        else
        {
            labels[i] = svm_predict(model,x);
        }
    } // For

    return 0;
}
Esempio n. 6
0
JNIEXPORT jint JNICALL Java_LibSVMTest_modelInitialize
        (JNIEnv *env, jclass cls, jstring modelFiles) {
    const char *modelFile = env->GetStringUTFChars(modelFiles, NULL);
    if ((model_F = svm_load_model(modelFile)) == 0) {
        fprintf(stderr, "can't open model file %s\n", modelFile);
        return -1;
    }
    env->ReleaseStringUTFChars(modelFiles, modelFile);
    env->DeleteLocalRef(modelFiles);

    svm_type=svm_get_svm_type(model_F);
    nr_class=svm_get_nr_class(model_F);
    printf("SVM Model Loaded\n");
    return 0;
}
Esempio n. 7
0
double SVM::classify(struct svm_node *data, char* filename_model)
{
	double predict_label;
	double *prob_estimates = NULL;
	int svm_type;
	int nr_class;

	// load feature file only once
	if(!this->_bFeatureFileLoaded)
	{
		if((this->_svmModel = svm_load_model(filename_model))==0)
		{
			printf("can't open model file %s\n",filename_model);
			return -99;
		}

		this->_bFeatureFileLoaded = true;
	}

	svm_type = svm_get_svm_type(this->_svmModel);
	nr_class = svm_get_nr_class(this->_svmModel);

	prob_estimates = (double *) malloc(nr_class*sizeof(double));

	if (svm_type==C_SVC || svm_type==NU_SVC)
	{
		predict_label = svm_predict_probability(this->_svmModel,data,prob_estimates);

	//	printf("%g",predict_label);

		//for(int k=0; k < nr_class; k++)
			//printf(" %g",prob_estimates[k]);

		//printf("\n");
	}
	else
	{
		predict_label = svm_predict(this->_svmModel, data);

		printf("%g\n",predict_label);
	}

	free(data);
	free(prob_estimates);

	return predict_label;
}
Esempio n. 8
0
    //predict the expected value using the trained svm and the input
    double ML2::predictML( double velocity[])
    {
	    double error = 0;
	    
        //get info about svm using the model
	    int svm_type=svm_get_svm_type(model);
	    int nr_class=svm_get_nr_class(model);
	    double *prob_estimates=NULL;
	    int j;
        int predict_probability = param.probability;

	cout<<"svm type : "<<svm_type<<endl<<flush;
	
	cout<<"nr class : "<<nr_class<<endl<<flush;
	
	cout<<"predict probability : "<<predict_probability<<endl<<flush;
	    if(predict_probability)
	    {
		    if (svm_type==NU_SVR || svm_type==EPSILON_SVR)
			    cout<<"Prob. model for test data: target value = predicted value + z,\nz: Laplace distribution e^(-|z|/sigma)/(2sigma),sigma="<<svm_get_svr_probability(model)<<endl<<flush;
	
	    }
        
	    int i = 0;
	    double target_label, predict_label;
	    //allocate space for x
        int max_nr_attr = datacols + 1;
         x = Malloc(struct svm_node,max_nr_attr);
        //store each of the velocity parameter
        for( i = 0 ; i < datacols; i++){
           x[i].index = i+1;
	       x[i].value = velocity[i];
        }
        
        //end of the x
	    x[i].index = -1;
        //predict the value
	    predict_label = svm_predict(model,x);
        //free x
        free(x);
	    cout<<"prediction "<<predict_label <<endl<<flush;

	    if(predict_probability)
		    free(prob_estimates);

        return predict_label;
    }
Esempio n. 9
0
int predict(float **values, int **indices, int rowNum, int colNum, int *labels, double *prob_estimates, int isProb)
{
	int svm_type=svm_get_svm_type(model);
	int nr_class=svm_get_nr_class(model);
	int j;
	
	if(isProb)
	{
		if (svm_type==NU_SVR || svm_type==EPSILON_SVR)
			LOGD("Prob. model for test data: target value = predicted value + z,\nz: Laplace distribution e^(-|z|/sigma)/(2sigma),sigma=%g\n",svm_get_svr_probability(model));
		else
		{
			int *labels=(int *) malloc(nr_class*sizeof(int));
			svm_get_labels(model,labels);
		//	fprintf(output,"labels");		
		//	for(j=0;j<nr_class;j++)
		//		fprintf(output," %d",labels[j]);
		//	fprintf(output,"\n");
		//	free(labels);
		}
	}

        for (int i = 0; i < rowNum; i++)
        {
            double target_label, predict_label=0;
            x = (struct svm_node *) realloc(x,(colNum+1)*sizeof(struct svm_node));

            for (int j = 0; j < colNum; j++)
            {
                x[j].index = indices[i][j];
                x[j].value = values[i][j];
            }
            x[colNum].index = -1;

            // Probability prediction 
            if (isProb && (svm_type==C_SVC || svm_type==NU_SVC))
            {
                    predict_label = svm_predict_probability(model,x,prob_estimates);
					labels[0]=predict_label;

            }
            else { labels[i] = svm_predict(model,x); }
	} // For

        return 0;
}
Esempio n. 10
0
void classifyInput(FILE *f, char *modelfile) {
	struct svm_model *model;
	struct svm_node features[NOSAMPS+1];
	int samp[NOSAMPS];
	int rmin[NOSAMPS];
	int rmax[NOSAMPS];
	double *decVals;
	int x, noLabels, noDecVals;
	double r;
	char buff[1024];

	strcpy(buff, modelfile);
	strcat(buff, ".range");
	loadRangeFile(buff, rmin, rmax);

	strcpy(buff, modelfile);
	strcat(buff, ".model");
	model=svm_load_model(buff);
	if (model==NULL) {
		printf("Couldn't load model!\n");
		exit(0);
	}
	noLabels=svm_get_nr_class(model);
	noDecVals=noLabels*(noLabels-1)/2;
	decVals=malloc(sizeof(double)*noDecVals);

	while(1) {
		getSamples(f, samp);
		for (x=0; x<NOSAMPS; x++) {
			features[x].index=x;
			//rescale to [-1, 1]
			r=(samp[x]-rmin[x+1]);
			r=r/(rmax[x+1]-rmin[x+1]);
			r=(r*2)-1;
			features[x+1].value=r;
//			printf("%f ", features[x].value);
		}
//		printf("\n");
		features[x].index=-1;
		r=svm_predict_values(model,features,decVals);
		printf("prediction value: %f\n", r);
//		for (x=0; x<noDecVals; x++) printf("%f ", decVals[x]);
//		printf("\n");
	}
}
Esempio n. 11
0
int main(){
	auto test_data = read_data("1_3");
	svm_model* model = svm_load_model("1_3_b_model");
	int nr_class = svm_get_nr_class(model);

	for (auto svmData : test_data){
		svm_node* nodes = AssignSVMNode(svmData);
		double* probEstimates = new double[nr_class];
		svm_predict_probability(model, nodes, probEstimates);
		for (int i = 0; i < nr_class; ++i){
			std::cout<<i<<":"<<probEstimates[i]<<"  ";
		}
		std::cout<<std::endl;
	}

	getchar();

	return 0;
}
Esempio n. 12
0
bool CmySvmArth::Sim(double* res , int& len)
{
	if(model==NULL||res==NULL)
		return false;
	int svm_type=svm_get_svm_type(model);
	int nr_class=svm_get_nr_class(model);
	double *prob_estimates=NULL;
	len = m_nSimDataLen;
	if (predict_probability && (svm_type==C_SVC || svm_type==NU_SVC))
	{
		prob_estimates = new double[nr_class];
		*res = svm_predict_probability(model,m_pTestdata,prob_estimates);
		delete prob_estimates;
	}
	else
	{
		*res = svm_predict(model,m_pTestdata);
	}
	return true;
}
HandTranslating::HandTranslating(QObject *parent) :
    QThread(parent)
{
    databaseFolderPath = "../Database/Main";
    modelFile = "databaseMain.model";
    string model_file_name = databaseFolderPath + "/" + modelFile;
    if((model=svm_load_model(model_file_name.c_str()))==0)
    {
        STOP = true;
        cout << "ko mo duoc model\n";
    } else {
        svm_type = svm_get_svm_type(model);
        nr_class = svm_get_nr_class(model);
        labels=(int *) malloc(nr_class*sizeof(int));
        svm_get_labels(model,labels);
        prob_estimates = (double *) malloc(nr_class * sizeof(double));
        STOP = false;
        enableToTranslate = false;
    }
}
Esempio n. 14
0
int SVMPredict2(struct svm_model* model, float *descript_vector, double *prob_est, int size)// prob_est is a pointer to array
{
	int correct = 0;
	int total = 0;
	double error = 0;
	double sump = 0, sumt = 0, sumpp = 0, sumtt = 0, sumpt = 0;

	int svm_type = svm_get_svm_type(model);
	int nr_class = svm_get_nr_class(model);
	double *prob_estimates = NULL;
	int j;

	int i = 0;
	double target_label, predict_label;
	char *idx, *val, *label, *endptr;
	int inst_max_index = -1; // strtol gives 0 if wrong format, and precomputed kernel has <index> start from 0

	int n = 1;
	while (n < size)
		n *= 2;
	struct svm_node* x = (struct svm_node *) malloc(n*sizeof(struct svm_node));

	for (int i = 0; i < size; i++)
	{
		x[i].index = i + 1;
		x[i].value = descript_vector[i];
	}
	x[size].index = -1;
	if (prob_est != NULL)
		predict_label = svm_predict_probability(model, x, prob_est);
	else
		predict_label = svm_predict(model, x);

	if (prob_est != NULL)
		*prob_est = -1;
	free(x);
	return predict_label;
}
Esempio n. 15
0
void predict(FILE *input, FILE *output, FILE *output_w2, FILE *output_prob,int predict_probability, FILE *addr, int Ncol, int Nlig, long Npolar)
{
	int svm_type=svm_get_svm_type(model);
	int nr_class=svm_get_nr_class(model);
	double *prob_estimates=NULL;
	int i,j,k,l,num_block,p,n,count;
	int Nband[Npolar];
	long n_addr = 0,m=0, n_total_band, offset=0;
	long max_num_pixel, num_rest_pixel, n_img_pixel = (long)Ncol * (long)Nlig;
	float buff_float, zero = 0;
	float **V_pol_rest, **V_pol_block, *v_pol_buff;
	long int *tab_addr, buff_long_int;
	float *output_label;
	double *w2_predict;
	float **w2_predict_out, *mean_dist, *max_prob, **prob_estimates_out;
	
	
	max_num_pixel = (long)floor(memory_pixel /((long)4 * Npolar));
	
	for(i=0; i< n_img_pixel; i++){// We initiaite the output classification file with '0' value
		fwrite(&zero, sizeof(float), 1, output);
	}
	rewind(output);
	
/*	for(i=0; i< n_img_pixel*(nr_class*(nr_class-1)/2 +1) ; i++){// We initiaite the output classification file with '0' value
		fwrite(&zero, sizeof(float), 1, output_w2);
	}
	rewind(output_w2);
*/
	if(predict_probability)
	{
		printf("Predict prob!!!!\n");
		for(i=0; i< n_img_pixel*(nr_class + 1) ; i++){// We initiaite the output classification file with '0' value
			fwrite(&zero, sizeof(float), 1, output_prob);
		}
		rewind(output_prob);
		
		if (svm_type==NU_SVR || svm_type==EPSILON_SVR)
			printf("Prob. model for test data: target value = predicted value + z,\nz: Laplace distribution e^(-|z|/sigma)/(2sigma),sigma=%g\n",svm_get_svr_probability(model));
		else
		{
			int *labels=(int *) malloc(nr_class*sizeof(int));
			svm_get_labels(model,labels);
			prob_estimates = (double *) malloc(nr_class*sizeof(double));
			//fprintf(output,"labels");		
			//for(j=0;j<nr_class;j++)
			//	printf(" %d\n",labels[j]);
				//fprintf(output," %d",labels[j]);
			//fprintf(output,"\n");
			free(labels);
		}
	}

	while(fread(&buff_long_int, sizeof(long int), 1, addr)!=0){ // We read the addr file to know the number of pixel to be classified
		n_addr++;
	}
	n_addr = n_addr - Npolar;
	rewind(addr);
	tab_addr = (long int *) malloc((long int) (n_addr) * sizeof(long int));

	for(i=0; i<Npolar;i++){
		fread(&buff_long_int, sizeof(long int), 1, addr);
		Nband[i] = (int)buff_long_int;
	}
	i=0;


	while(fread(&tab_addr[i], sizeof(long int), 1, addr)!=0){ // We read the addr of each classified pixel
//hde
//printf("tab_addr[%d]:%d\n",i,tab_addr[i]);

		i++;

	}
//hde
int min,max;
min=tab_addr[0];
max=min;
for (i=0;i<n_addr;i++)
{
	if (tab_addr[i]>max) {max=tab_addr[i];}
	if (tab_addr[i]<min) {min=tab_addr[i];}
}
printf("min:%d; max:%d\n",min,max);
// fin HDE

	// We compute the necessary number of block and remaining pixel
	num_block = (int)floor(n_addr /max_num_pixel);
	num_rest_pixel = n_addr - num_block * max_num_pixel;
	printf("num_block: %i\n",num_block);

	/////////////////////////////////////////////////Ajouter boucle de lecture des float input pour ensuite calculer le
	//nb de bande total initial pour faire une lecutre par bloc de bande puis extraire celle qui sont interessante
	while(fread(&buff_float, sizeof(float), 1, input)!=0){ 
		m++;
	}
	rewind(input);
	
	if(num_block > 0){//Loop on the block
		V_pol_block = matrix_float((int)max_num_pixel,(int)Npolar);
		output_label = vector_float((int)max_num_pixel);
	//	mean_dist = vector_float((int)max_num_pixel);
	//	w2_predict_out = matrix_float((int)max_num_pixel,nr_class*(nr_class-1)/2);
		prob_estimates_out = matrix_float((int)max_num_pixel,nr_class);
		max_prob = vector_float((int)max_num_pixel);
		for (i=0; i<num_block; i++){	// blocs pour l'optimisation accès disque
			for (j=0; j< max_num_pixel; j++){
				for (k=0; k<Npolar; k++){	
					// pointe sur le pixel col,lig, k(bande), selon la bande
					offset = (long)(sizeof(float)) * (Nband[k] * n_img_pixel + tab_addr[i*max_num_pixel + j]);
					fseek(input, offset, SEEK_SET); //place le pointeur à l'offset calculé avant qui s'appelle offset ! 
					fread(&V_pol_block[j][k], sizeof(float), 1, input);

				}
			}
			for (j=0; j< max_num_pixel; j++){
				for (k=0; k<Npolar; k++){
					if(k>=max_nr_attr-1)	// need one more for index = -1
					{
						max_nr_attr *= 2;
						x = (struct svm_node *) realloc(x,max_nr_attr*sizeof(struct svm_node));
					}
					x[k].index = k+1;
					x[k].value = scale(k, (double)V_pol_block[j][k]);
				}
				x[k].index = -1;
				double predict_label;
				
				if (predict_probability && (svm_type==C_SVC || svm_type==NU_SVC))
				{
					//printf("Predict prob if block!!!!\n");
					predict_label = svm_predict_probability(model,x,prob_estimates);
					
					w2_predict = svm_w2(model,x);				
					
					output_label[j] = (float)predict_label;
					
	//				mean_dist[j] = 0.;
					count = 0;
					l=0;
					max_prob[j] = 0.;
					for(p=0;p<nr_class;p++){
						for(n=p+1;n<nr_class;n++){
		//					w2_predict_out[j][l] =(float)w2_predict[l];
						//	printf("w2_predict[%i] : %g\n",w2_predict[l]);
							if(p+1 == output_label[j] || n+1 == output_label[j]){
			//					mean_dist[j] = mean_dist[j] + w2_predict_out[j][l];
								count = count + 1;
							}
							l++;
						}
						prob_estimates_out[j][p] = (float)prob_estimates[p];
						if(max_prob[j] < prob_estimates_out[j][p] ){
							max_prob[j] = prob_estimates_out[j][p];
						}
					}
		//			mean_dist[j] = mean_dist[j] / (float)count;
				}
				else
				{
					predict_label = svm_predict(model,x);
					
					w2_predict = svm_w2(model,x);
					
					
					
					output_label[j] = (float)predict_label;
					
		//			mean_dist[j] = 0.;
					count = 0;
					l=0;
					for(p=0;p<nr_class;p++){
						for(n=p+1;n<nr_class;n++){
			//				w2_predict_out[j][l] =(float)w2_predict[l];
							
							if(p+1 == output_label[j] || n+1 == output_label[j]){
				//			mean_dist[j] = mean_dist[j] + w2_predict_out[j][l];
								count = count + 1;
							}
							l++;
						}
					}
		//			mean_dist[j] = mean_dist[j] / (float)count;
						
				
						
				}
			}/* pixels block */

			for (j=0; j< max_num_pixel; j++){ // On ecrit le fichier de classif
				offset = (long)(sizeof(float)) * tab_addr[i*max_num_pixel + j];
					fseek (output, offset, SEEK_SET);
					fwrite(&output_label[j], sizeof(float), 1, output);
					
					
			}
/*			for (l=0; l< nr_class*(nr_class-1)/2; l++){ // On ecrit les cartes de distances
				for (j=0; j< max_num_pixel; j++){
					offset = (long)(sizeof(float)) * tab_addr[i*max_num_pixel + j];
					fseek(output_w2, offset + n_img_pixel * sizeof(float) * l, SEEK_SET);
					fwrite(&w2_predict_out[j][l],  sizeof(float), 1, output_w2);		
				}			
			}
			
			for (j=0; j< max_num_pixel; j++){ // On ecrit la distance moyenne
				offset = (long)(sizeof(float)) * tab_addr[i*max_num_pixel + j];
				fseek(output_w2, offset + n_img_pixel * sizeof(float) * (nr_class*(nr_class-1)/2), SEEK_SET);// on ecrit la distance moyenne parmi les classif bianaire renvoyant le label final
				fwrite(&mean_dist[j],  sizeof(float), 1, output_w2);
			}
*/			
			if(predict_probability){
				//printf("Predict prob fin if block ecriture!!!!\n");
				for (l=0; l< nr_class; l++){ // On ecrit les cartes de probabilité
					for (j=0; j< max_num_pixel; j++){
						offset = (long)(sizeof(float)) * tab_addr[i*max_num_pixel + j];
						fseek(output_prob, offset + n_img_pixel * sizeof(float) * l, SEEK_SET);
						fwrite(&prob_estimates_out[j][l],  sizeof(float), 1, output_prob);		
					}			
				}
				
				for (j=0; j< max_num_pixel; j++){
						offset = (long)(sizeof(float)) * tab_addr[i*max_num_pixel + j];
						fseek(output_prob, offset + n_img_pixel * sizeof(float) * nr_class, SEEK_SET);
						fwrite(&max_prob[j],  sizeof(float), 1, output_prob);		
					}	
			}

		}/* Loop over the blocks*/
		free_matrix_float(V_pol_block, max_num_pixel);
	//	free_matrix_float(w2_predict_out, max_num_pixel);
		free_matrix_float(prob_estimates_out, max_num_pixel);
		free_vector_float(output_label);
	//	free_vector_float(mean_dist);
		free(w2_predict);
		free_vector_float(max_prob);
		printf("FREE block: %i\n",num_block);
	}else{
		printf("PAS DE BLOCK\n");
	}
	if(num_rest_pixel > 0){// Loop on the remaining pixel
		V_pol_rest = matrix_float((int)num_rest_pixel,(int)Npolar);/* Vector of polarimetrics indicators  for the remaining pixel */
		output_label = vector_float((int)num_rest_pixel);
	//	w2_predict_out = matrix_float((int)num_rest_pixel,nr_class*(nr_class-1)/2);
	//	mean_dist = vector_float((int)num_rest_pixel);
		prob_estimates_out = matrix_float((int)num_rest_pixel,nr_class);
		max_prob = vector_float((int)max_num_pixel);
		for (j=0; j< num_rest_pixel;j++){
			for (k=0; k<Npolar; k++){	
				offset = (long)(sizeof(float)) * (Nband[k] * n_img_pixel + tab_addr[num_block*max_num_pixel + j]);
				fseek (input, offset , SEEK_SET);
				fread(&V_pol_rest[j][k], sizeof(float), 1, input);
				
//hde
//printf("valeur pixel a adresse %d et pour bande %d: %d\n", tab_addr[num_block*max_num_pixel + j],k,V_pol_rest[j][k]);
			}
		}
		for (j=0; j< num_rest_pixel;j++){
			for (k=0; k<Npolar; k++){
				if(k>=max_nr_attr-1)	// need one more for index = -1
				{
					max_nr_attr *= 2;
					x = (struct svm_node *) realloc(x,max_nr_attr*sizeof(struct svm_node));
				}
				x[k].index = k+1;
				x[k].value = scale(k, (double)V_pol_rest[j][k]);
			}
			x[k].index = -1;
			double predict_label;
			if (predict_probability && (svm_type==C_SVC || svm_type==NU_SVC))
			{
				//printf("Predict prob if fin apres block!!!!\n");
				predict_label = svm_predict_probability(model,x,prob_estimates);
				 w2_predict = svm_w2(model,x);				
					
				output_label[j] = (float)predict_label;
					
		//		mean_dist[j] = 0.;
				count = 0;
				l=0;
				max_prob[j] = 0.;
				for(p=0;p<nr_class;p++){
					for(n=p+1;n<nr_class;n++){
				//		w2_predict_out[j][l] =(float)w2_predict[l];
					//	printf("w2_predict[%i] : %g\n",l,w2_predict[l]);
					//	printf("w2_predict_out[%i][%i] : %g\n",j,l,w2_predict_out[j][l]);
						if(p+1 == output_label[j] || n+1 == output_label[j]){
			//				mean_dist[j] = mean_dist[j] + w2_predict_out[j][l];
							count = count + 1;
						}
						l++;
					}
					prob_estimates_out[j][p] = (float)prob_estimates[p];
					if(max_prob[j] < prob_estimates_out[j][p] ){
							max_prob[j] = prob_estimates_out[j][p];
						}
				}
		//		mean_dist[j] = mean_dist[j] / (float)count;
				//printf("mean_dist[%i],%f\n",j,mean_dist[j]);
			}
			else
			{
				predict_label = svm_predict(model,x);
				
				w2_predict = svm_w2(model,x);
					
				
				
				output_label[j] = (float)predict_label;
					
		//		mean_dist[j] = 0.;
				count = 0;
				l=0;
				for(p=0;p<nr_class;p++){
					for(n=p+1;n<nr_class;n++){
			//			w2_predict_out[j][l] =(float)w2_predict[l];
						
						if(p+1 == output_label[j] || n+1 == output_label[j]){
		//					mean_dist[j] = mean_dist[j] + w2_predict_out[j][l];
							count = count + 1;
						}
						l++;
					}
				}
		//		mean_dist[j] = mean_dist[j] / (float)count;
			}
		}
		for (j=0; j< num_rest_pixel; j++){
				offset = (long)(sizeof(float)) * (tab_addr[num_block*max_num_pixel + j]);
					fseek (output, offset, SEEK_SET);
					fwrite(&output_label[j], sizeof(float), 1, output);
					
					
			}
/*			for (l=0; l< nr_class*(nr_class-1)/2; l++){
				for (j=0; j< num_rest_pixel; j++){
					offset = (long)(sizeof(float)) * (tab_addr[num_block*max_num_pixel + j]);
					fseek(output_w2, offset + n_img_pixel * sizeof(float) * l, SEEK_SET);
					fwrite(&w2_predict_out[j][l],  sizeof(float), 1, output_w2);		
				}			
			}
			
			for (j=0; j< num_rest_pixel; j++){
				offset = (long)(sizeof(float)) * (tab_addr[num_block*max_num_pixel + j]);
				fseek(output_w2, offset + n_img_pixel * sizeof(float) * (nr_class*(nr_class-1)/2), SEEK_SET);// on ecrit la distance moyenne parmi les classif bianaire renvoyant le label final
				fwrite(&mean_dist[j],  sizeof(float), 1, output_w2);
			}
*/			
			
			if(predict_probability){
				//printf("Predict prob if fin apres blockeCRITUR!!!!!!!\n");
				for (l=0; l< nr_class; l++){ // On ecrit les cartes de probabilité
					for (j=0; j< num_rest_pixel; j++){
						offset = (long)(sizeof(float)) * (tab_addr[num_block*max_num_pixel + j]);
						fseek(output_prob, offset + n_img_pixel * sizeof(float) * l, SEEK_SET);
						fwrite(&prob_estimates_out[j][l],  sizeof(float), 1, output_prob);	
						//printf("prob_estimates_out[%i][%i] : %f\n", j, l, prob_estimates_out[j][l]);	
					}			
				}
				for (j=0; j< num_rest_pixel; j++){
						offset = (long)(sizeof(float)) * (tab_addr[num_block*max_num_pixel + j]);
						fseek(output_prob, offset + n_img_pixel * sizeof(float) * nr_class, SEEK_SET);
						fwrite(&max_prob[j],  sizeof(float), 1, output_prob);	
					}	
				
			}

		free_matrix_float(V_pol_rest, num_rest_pixel);
	//	free_matrix_float(w2_predict_out, num_rest_pixel);
		free_matrix_float(prob_estimates_out, num_rest_pixel);
		free_vector_float(output_label);
		free(w2_predict);
	//	free_vector_float(mean_dist);
		free_vector_float(max_prob);
	}/* remaning pixel */
	free(tab_addr);	
	
	if(predict_probability)
		free(prob_estimates);
}
Esempio n. 16
0
void predict(FILE *input, FILE *output)
{
	int correct = 0;
	int total = 0;
	double error = 0;
	double sump = 0, sumt = 0, sumpp = 0, sumtt = 0, sumpt = 0;

	int svm_type=svm_get_svm_type(model);
	int nr_class=svm_get_nr_class(model);
	double *prob_estimates=NULL;
	int j;

	if(predict_probability)
	{
		if (svm_type==NU_SVR || svm_type==EPSILON_SVR)
			printf("Prob. model for test data: target value = predicted value + z,\nz: Laplace distribution e^(-|z|/sigma)/(2sigma),sigma=%g\n",svm_get_svr_probability(model));
		else
		{
			int *labels=(int *) malloc(nr_class*sizeof(int));
			svm_get_labels(model,labels);
			prob_estimates = (double *) malloc(nr_class*sizeof(double));
			fprintf(output,"labels");		
			for(j=0;j<nr_class;j++)
				fprintf(output," %d",labels[j]);
			fprintf(output,"\n");
			free(labels);
		}
	}

	max_line_len = 1024;
	line = (char *)malloc(max_line_len*sizeof(char));
	while(readline(input) != NULL)
	{
		int i = 0;
		double target_label, predict_label;
		char *idx, *val, *label, *endptr;
		int inst_max_index = -1; // strtol gives 0 if wrong format, and precomputed kernel has <index> start from 0

		label = strtok(line," \t");
		target_label = strtod(label,&endptr);
		if(endptr == label)
			exit_input_error(total+1);

		while(1)
		{
			if(i>=max_nr_attr-1)	// need one more for index = -1
			{
				max_nr_attr *= 2;
				x = (struct svm_node *) realloc(x,max_nr_attr*sizeof(struct svm_node));
			}

			idx = strtok(NULL,":");
			val = strtok(NULL," \t");

			if(val == NULL)
				break;
			errno = 0;
			x[i].index = (int) strtol(idx,&endptr,10);
			if(endptr == idx || errno != 0 || *endptr != '\0' || x[i].index <= inst_max_index)
				exit_input_error(total+1);
			else
				inst_max_index = x[i].index;

			errno = 0;
			x[i].value = strtod(val,&endptr);
			if(endptr == val || errno != 0 || (*endptr != '\0' && !isspace(*endptr)))
				exit_input_error(total+1);

			++i;
		}
		x[i].index = -1;

		if (predict_probability && (svm_type==C_SVC || svm_type==NU_SVC))
		{
			predict_label = svm_predict_probability(model,x,prob_estimates);
			fprintf(output,"%g",predict_label);
			for(j=0;j<nr_class;j++)
				fprintf(output," %g",prob_estimates[j]);
			fprintf(output,"\n");
		}
		else
		{
			predict_label = svm_predict(model,x);
			fprintf(output,"%g\n",predict_label);
		}

		if(predict_label == target_label)
			++correct;
		error += (predict_label-target_label)*(predict_label-target_label);
		sump += predict_label;
		sumt += target_label;
		sumpp += predict_label*predict_label;
		sumtt += target_label*target_label;
		sumpt += predict_label*target_label;
		++total;
	}
	if (svm_type==NU_SVR || svm_type==EPSILON_SVR)
	{
		printf("Mean squared error = %g (regression)\n",error/total);
		printf("Squared correlation coefficient = %g (regression)\n",
		       ((total*sumpt-sump*sumt)*(total*sumpt-sump*sumt))/
		       ((total*sumpp-sump*sump)*(total*sumtt-sumt*sumt))
		       );
	}
	else
		printf("Accuracy = %g%% (%d/%d) (classification)\n",
		       (double)correct/total*100,correct,total);
	if(predict_probability)
		free(prob_estimates);
}
Esempio n. 17
0
void predict(FILE *input, FILE *output)
{
	int correct = 0;
	int total = 0;
	double error = 0;
	double sump = 0, sumt = 0, sumpp = 0, sumtt = 0, sumpt = 0;

	int svm_type=svm_get_svm_type(model);
	int nr_class=svm_get_nr_class(model);
	double *prob_estimates=NULL;
	int j;
    
    // This block by Jianxin Wu, for average accuracy computation
    int ii,label_index;
    // number of correct predictions in each category
	int* correct_sub = (int *)malloc(nr_class*sizeof(int));
	for(ii=0;ii<nr_class;ii++) correct_sub[ii] = 0;
    // number of testing examples in each category
	int* total_sub = (int *)malloc(nr_class*sizeof(int));
	for(ii=0;ii<nr_class;ii++) total_sub[ii] = 0;
	int* labels_avg = (int*)malloc(nr_class*sizeof(int));
	svm_get_labels(model,labels_avg);

	if(predict_probability)
	{
		if (svm_type==NU_SVR || svm_type==EPSILON_SVR)
			printf("Prob. model for test data: target value = predicted value + z,\nz: Laplace distribution e^(-|z|/sigma)/(2sigma),sigma=%g\n",svm_get_svr_probability(model));
		else
		{
			int *labels=(int *) malloc(nr_class*sizeof(int));
			svm_get_labels(model,labels);
			prob_estimates = (double *) malloc(nr_class*sizeof(double));
			fprintf(output,"labels");		
			for(j=0;j<nr_class;j++)
				fprintf(output," %d",labels[j]);
			fprintf(output,"\n");
			free(labels);
		}
	}

	max_line_len = 1024;
	line = (char *)malloc(max_line_len*sizeof(char));
	while(readline(input) != NULL)
	{
		int i = 0;
		double target_label, predict_label;
		char *idx, *val, *label, *endptr;
		int inst_max_index = -1; // strtol gives 0 if wrong format, and precomputed kernel has <index> start from 0

		label = strtok(line," \t");
		target_label = strtod(label,&endptr);
		if(endptr == label)
			exit_input_error(total+1);

		while(1)
		{
			if(i>=max_nr_attr-1)	// need one more for index = -1
			{
				max_nr_attr *= 2;
				x = (struct svm_node *) realloc(x,max_nr_attr*sizeof(struct svm_node));
			}

			idx = strtok(NULL,":");
			val = strtok(NULL," \t");

			if(val == NULL)
				break;
			errno = 0;
			x[i].index = (int) strtol(idx,&endptr,10);
			if(endptr == idx || errno != 0 || *endptr != '\0' || x[i].index <= inst_max_index)
				exit_input_error(total+1);
			else
				inst_max_index = x[i].index;

			errno = 0;
			x[i].value = strtod(val,&endptr);
			if(endptr == val || errno != 0 || (*endptr != '\0' && !isspace(*endptr)))
				exit_input_error(total+1);

			++i;
		}
		x[i].index = -1;

		if (predict_probability && (svm_type==C_SVC || svm_type==NU_SVC))
		{
			predict_label = svm_predict_probability(model,x,prob_estimates);
			fprintf(output,"%g",predict_label);
			for(j=0;j<nr_class;j++)
				fprintf(output," %g",prob_estimates[j]);
			fprintf(output,"\n");
		}
		else
		{
			predict_label = svm_predict(model,x);
			fprintf(output,"%g\n",predict_label);
		}

        // This block by Jianxin Wu, for average accuracy
        label_index = FindLabel((int)target_label,labels_avg);
		total_sub[label_index]++;
		if(predict_label == target_label) correct_sub[label_index]++;

		if(predict_label == target_label)
			++correct;
		error += (predict_label-target_label)*(predict_label-target_label);
		sump += predict_label;
		sumt += target_label;
		sumpp += predict_label*predict_label;
		sumtt += target_label*target_label;
		sumpt += predict_label*target_label;
		++total;
	}
	if (svm_type==NU_SVR || svm_type==EPSILON_SVR)
	{
		printf("Mean squared error = %g (regression)\n",error/total);
		printf("Squared correlation coefficient = %g (regression)\n",
		       ((total*sumpt-sump*sumt)*(total*sumpt-sump*sumt))/
		       ((total*sumpp-sump*sump)*(total*sumtt-sumt*sumt))
		       );
	}
	else
		printf("Accuracy = %g%% (%d/%d) (classification)\n",
		       (double)correct/total*100,correct,total);
	if(predict_probability)
		free(prob_estimates);
        
    // This block (till endo of function) by Jianxin WU
    // Print per-category accuracy and average accuracy of categories
    double sub_score = 0;
    int nonempty_category = 0;
	for(ii=0;ii<nr_class;ii++)
	{
		if(total_sub[ii]>0)
        {
            sub_score += (correct_sub[ii]*1.0/total_sub[ii]);
            nonempty_category++;
        }
	}
    printf("-----------\n");
    for(ii=0;ii<nr_class;ii++)
    {
        printf("%d / %d (Category %d)\n",correct_sub[ii],total_sub[ii],labels_avg[ii]);
    }
    printf("-----------\n");
	printf("Mean Accuray across classes = %g%%\n",sub_score*100.0/nonempty_category);
	free(correct_sub);
	free(total_sub);
	free(labels_avg);

}
Esempio n. 18
0
double binary_class_cross_validation(const svm_problem *prob, const svm_parameter *param, int nr_fold)
{
	dvec_t dec_values;
	ivec_t ty;
	int *labels;

	if (nr_fold > 1)
	{
		int i;
		int *fold_start = Malloc(int,nr_fold+1);
		int l = prob->l;
		int *perm = Malloc(int,l);
		
		for(i=0;i<l;i++) perm[i]=i;
		for(i=0;i<l;i++)
		{
			int j = i+rand()%(l-i);
			std::swap(perm[i],perm[j]);
		}
		for(i=0;i<=nr_fold;i++)
			fold_start[i]=i*l/nr_fold;

		for(i=0;i<nr_fold;i++)
		{
			int                begin   = fold_start[i];
			int                end     = fold_start[i+1];
			int                j,k;
			struct svm_problem subprob;

			subprob.l = l-(end-begin);
			subprob.x = Malloc(struct svm_node*,subprob.l);
			subprob.y = Malloc(double,subprob.l);

			k=0;
			for(j=0;j<begin;j++)
			{
				subprob.x[k] = prob->x[perm[j]];
				subprob.y[k] = prob->y[perm[j]];
				++k;
			}
			for(j=end;j<l;j++)
			{
				subprob.x[k] = prob->x[perm[j]];
				subprob.y[k] = prob->y[perm[j]];
				++k;
			}
			struct svm_model *submodel = svm_train(&subprob,param);
			int svm_type = svm_get_svm_type(submodel);
	
			if(svm_type == NU_SVR || svm_type == EPSILON_SVR){
				fprintf(stderr, "wrong svm type");
				exit(1);
			}

			labels = Malloc(int, svm_get_nr_class(submodel));
			svm_get_labels(submodel, labels);

			if(svm_get_nr_class(submodel) > 2) 
			{
				fprintf(stderr,"Error: the number of class is not equal to 2\n");
				exit(-1);
			}

			dec_values.resize(end);
			ty.resize(end);

			for(j=begin;j<end;j++) {
				svm_predict_values(submodel,prob->x[perm[j]], &dec_values[j]);
				ty[j] = (prob->y[perm[j]] > 0)? 1: -1;
			}


			if(labels[0] <= 0) {
				for(j=begin;j<end;j++)
					dec_values[j] *= -1;
			}
	
			svm_free_and_destroy_model(&submodel);
			free(subprob.x);
			free(subprob.y);
			free(labels);
		}		

		free(perm);
		free(fold_start);
	}
Esempio n. 19
0
int predict(double *coef, int target_label, int fv_size)
{
    int correct = 0;
    int total = 0;
    double error = 0;
    double sump = 0, sumt = 0, sumpp = 0, sumtt = 0, sumpt = 0;
    int i;
    int predict_label;
    struct svm_node *x;
    //struct svm_model *model,*createmodel();

    //model=svm_load_model("training_data.model");
    //model = createmodel();
    int svm_type=svm_get_svm_type(model);
    int nr_class=svm_get_nr_class(model);
    double *prob_estimates=NULL;
    int j;

    x = (struct svm_node *) malloc((fv_size+1)*sizeof(struct svm_node));

    for (i=0; i<fv_size; i++) {
        x[i].value = coef[i];
        x[i].index = i+1;
    }
    x[i].index = -1;


    predict_label = svm_predict(model,x);


    if(predict_label == target_label)
        ++correct;
    error += (predict_label-target_label)*(predict_label-target_label);
    sump += predict_label;
    sumt += target_label;
    sumpp += predict_label*predict_label;
    sumtt += target_label*target_label;
    sumpt += predict_label*target_label;
    ++total;

    if (svm_type==NU_SVR || svm_type==EPSILON_SVR)
    {
        //info("Mean squared error = %g (regression)\n",error/total);
        //info("Squared correlation coefficient = %g (regression)\n",
        //	((total*sumpt-sump*sumt)*(total*sumpt-sump*sumt))/
        //	((total*sumpp-sump*sump)*(total*sumtt-sumt*sumt))
        //	);
    }
    else
        //info("Accuracy = %g%% (%d/%d) (classification)\n",
        //(double)correct/total*100,correct,total);
        if(predict_probability)
            free(prob_estimates);

    //for(i=0; i<(model->l); i++){
    //        free(model->SV[i]);
    //        model->SV[i]=NULL;
    //}

    //svm_free_and_destroy_model(&model);
    free(x);

    return(predict_label);
}
Esempio n. 20
0
static VALUE cModel_classes(VALUE obj) 
{
  const struct svm_model *model;
  Data_Get_Struct(obj, struct svm_model, model);
  return INT2NUM(svm_get_nr_class(model));
}
Esempio n. 21
0
void predict(int nlhs, mxArray *plhs[], const mxArray *prhs[], struct svm_model *model)
{
	int feature_number, testing_instance_number;
	int instance_index;
	double *ptr_instance; 
	double *ptr_prob_estimates;
	
	mxArray *tplhs; // temporary storage for plhs[]

	int svm_type=svm_get_svm_type(model);
	int nr_class=svm_get_nr_class(model);

	// prhs[0] = testing instance matrix
	feature_number = (int)mxGetM(prhs[0]);
	testing_instance_number = (int)mxGetN(prhs[0]);

	ptr_instance = mxGetPr(prhs[0]);

	tplhs = mxCreateDoubleMatrix(testing_instance_number, nr_class, mxREAL);

	ptr_prob_estimates = mxGetPr(tplhs);    
    
    int freeInstance = 0;
    
	#pragma omp parallel
    {
    struct svm_node *x = (struct svm_node*)malloc((feature_number+1)*sizeof(struct svm_node) );
    double *prob_estimates = (double *) malloc(nr_class*sizeof(double));
    int i,base;
    double predict_label; 
    
    int instance_index;
    while (true) {
        #pragma omp critical 
        {
          instance_index = freeInstance;
          freeInstance++;
        }

        if (instance_index >= testing_instance_number)
            break;    
        
        base = feature_number*instance_index;
        for(i=0;i<feature_number;i++)
        {
            x[i].index = i+1;
            x[i].value = ptr_instance[base + i];
        }
        x[feature_number].index = -1;
        
        predict_label = svm_predict_probability(model, x, prob_estimates);

        for(i=0;i<nr_class;i++)
            ptr_prob_estimates[instance_index + i * testing_instance_number] = prob_estimates[i];
    }
    free(x);
    free(prob_estimates);
    }
		
    plhs[0] = tplhs;

}
Esempio n. 22
0
double * predict(FILE *input, int &size)
{
	int correct = 0;
	int total = 0;
	double error = 0;
	double sump = 0, sumt = 0, sumpp = 0, sumtt = 0, sumpt = 0;

	int svm_type=svm_get_svm_type(model);
	int nr_class=svm_get_nr_class(model);
	double *prob_estimates=NULL;
	size = 0;
	int tmp;
	while ( (tmp=fgetc(input)) != EOF) {
				if (tmp == '\n')
				  size++;
	}
	rewind(input);
	double * res = (double *) malloc (size*sizeof(double));
	target = (double *) malloc (size*sizeof(double));


	max_line_len = 1024;
	line = (char *)malloc(max_line_len*sizeof(char));
	int j = 0;
	while(readline(input) != NULL)
	{
		int i = 0;
		double target_label, predict_label;
		char *idx, *val, *label, *endptr;
		int inst_max_index = -1; // strtol gives 0 if wrong format, and precomputed kernel has <index> start from 0

		label = strtok(line," \t\n");
		if(label == NULL) // empty line
			exit_input_error(total+1);

		target_label = strtod(label,&endptr);
		if(endptr == label || *endptr != '\0')
			exit_input_error(total+1);

		while(1)
		{
			if(i>=max_nr_attr-1)	// need one more for index = -1
			{
				max_nr_attr *= 2;
				x = (struct svm_node *) realloc(x,max_nr_attr*sizeof(struct svm_node));
			}

			idx = strtok(NULL,":");
			val = strtok(NULL," \t");

			if(val == NULL)
				break;
			errno = 0;
			x[i].index = (int) strtol(idx,&endptr,10);
			if(endptr == idx || errno != 0 || *endptr != '\0' || x[i].index <= inst_max_index)
				exit_input_error(total+1);
			else
				inst_max_index = x[i].index;

			errno = 0;
			x[i].value = strtod(val,&endptr);
			if(endptr == val || errno != 0 || (*endptr != '\0' && !isspace(*endptr)))
				exit_input_error(total+1);

			++i;
		}
		x[i].index = -1;

		predict_label = svm_predict(model,x);
		res[j] = predict_label;
		target[j] = target_label;
		j++;
	}
	return res;
}
Esempio n. 23
0
//
// Called from javascript to do prediction
//  This method will call local prediction function
//
void SVMPredict::predict(const v8::FunctionCallbackInfo<v8::Value>& args){

  //v8::Isolate* isolate = args.GetIsolate();//v8::Isolate::GetCurrent();
  //v8::EscapableHandleScope scope(isolate);
  // arguments: 1
  //    Json: { id:"", value:{1:0.1, 2:3.5, ...} }
  // instance of SVMPredict
  //
  if (args.Length()!=1) {
    args.GetIsolate()->ThrowException(
                                      v8::String::NewFromUtf8(args.GetIsolate(), "Wrong arguments"));
    //return;
  }
  // Check arguments type: Null or undefined
  if (args[0]->IsNull() || args[0]->IsUndefined()) {
    args.GetIsolate()->ThrowException(
                                      v8::String::NewFromUtf8( args.GetIsolate(),
                                                              "The Input variable is Null or Undefined"));
    //return;
  }
  v8::Local<v8::Object> lobj = args.Holder();
  if (lobj.IsEmpty()) {
    std::cout<<"Input arguments is empty" <<std::endl;
  }
  SVMPredict* predictor =
  node::ObjectWrap::Unwrap<SVMPredict>(lobj);

  v8::Isolate* isolate = predictor->GetIsolate();
  v8::EscapableHandleScope scope(isolate);

#if defined(PROCESS_DEBUG)
  log(" is called.");
  //CW_ASSERT(&lobj==NULL);
  //CW_ASSERT(predictor==NULL);
#endif

  if (predictor==NULL) {
    std::cout<<"Failed to unwrap predictor: NULL"<<std::endl;
    return;
  }
#if defined(PROCESS_DEBUG)
  std::cout<<"Function:"<<__FUNCTION__<<" Success to unwrap predictor: "
  <<predictor->isModelLoaded<<std::endl;
#endif

  if (!predictor->isModelLoaded) {
    // Model file is not loaded.
    ThrowError("SVM Model is not loaded.");
    //return;
  }
#if defined(PROCESS_DEBUG)
  if (predictor->isModelLoaded) {
    log(" svm model is ready.");
  }
#endif
  struct svm_model* model = predictor->model;
  int nr_class=svm_get_nr_class(predictor->model);
  //svm_save_model(std::string("model").c_str(),predictor->model);
  if (args[0]->IsObject()){
    // single
    v8::Local<v8::Object> obj = v8::Local<v8::Object>::Cast(args[0]);
    // Data conversion
    struct svm_node *prob_node = predictor->FromJsonObj(obj);
    if (prob_node != nullptr){
      //Check field: id
      v8::Local<v8::Value> id = obj->Get(ToV8<std::string>(std::string("id")));
      //predict
      //predictor->print_node(prob_node);
      v8::Local<v8::Object> Result =
      predictor->predict_single(model, prob_node,id,nr_class);

      // free memory
      //free(prob_node);<-- V8 will take it easy.
      //free(labels);
      //free(probab_estimate);
      args.GetReturnValue().Set(Result);
      //scope.Close(id);
      //scope.Close(Result);
    }else{
      ThrowError("Object does not have property id or value.");
      //scope.Escape(obj);
    }

  }else if (args[0]->IsArray()) {
    // Array of Object
    v8::Local<v8::Array> obj = v8::Local<v8::Array>::Cast(args[0]);
    // Data conversion
    struct svm_node **prob_node = nullptr;
    ///////////////////////////
    // Not finished yet
    ///////////////////////////
    //scope.Escape(obj);
  }else{
    ThrowError("Wrong arguments!");
    //scope.Escape(v8::Undefined(isolate));
  }

};
Esempio n. 24
0
int SVM::test(char* data_filename, char* filename_model, double &dTestError)
{
	int correct = 0;
	int total = 0;
	double error = 0;
	double sump = 0, sumt = 0, sumpp = 0, sumtt = 0, sumpt = 0;
	struct svm_model* svmModel;
	int svm_type;
	int nr_class;
	double *prob_estimates=NULL;
	FILE *fInputTestData = NULL;
	int max_line_len = 0;
	char *line = NULL;
	int max_nr_attr = 64;
	struct svm_node *x = NULL;

	fInputTestData = fopen(data_filename,"r");
	if(fInputTestData == NULL)
	{
		fprintf(stderr,"can't open input file %s\n",data_filename);
		return -1;
	}

	if((svmModel=svm_load_model(filename_model))==0)
	{
		fprintf(stderr,"can't open model file %s\n",filename_model);
		return -3;
	}

	svm_type=svm_get_svm_type(svmModel);
	nr_class=svm_get_nr_class(svmModel);

	if (svm_type==NU_SVR || svm_type==EPSILON_SVR)
		printf("Prob. model for test data: target value = predicted value + z,\nz: Laplace distribution e^(-|z|/sigma)/(2sigma),sigma=%g\n",svm_get_svr_probability(svmModel));
	else
	{
		int *labels=(int *) malloc(nr_class*sizeof(int));
		svm_get_labels(svmModel,labels);
		prob_estimates = (double *) malloc(nr_class*sizeof(double));
		//printf("labels");
		//for(j=0;j<nr_class;j++)
		//printf(" %d",labels[j]);
		//printf("\n");
		free(labels);
	}

	max_line_len = 1024;

	while((line = this->read_line(fInputTestData, max_line_len)) != NULL)
	{
		int i = 0;
		double target_label, predict_label;
		char *idx, *val, *label, *endptr;
		int inst_max_index = -1; // strtol gives 0 if wrong format, and precomputed kernel has <index> start from 0

		label = strtok(line," \t");
		target_label = strtod(label,&endptr);
		if(endptr == label)
		{
			fprintf(stderr,"Wrong input format at line %d\n", total+1);
			return -2;
		}

		x = (struct svm_node *) malloc(max_nr_attr*sizeof(struct svm_node));

		while(1)
		{
			if(i>=max_nr_attr-1)	// need one more for index = -1
			{
				max_nr_attr *= 2;
				x = (struct svm_node *) realloc(x,max_nr_attr*sizeof(struct svm_node));
			}

			idx = strtok(NULL,":");
			val = strtok(NULL," \t");

			if(val == NULL)
				break;
			errno = 0;

			x[i].index = (int) strtol(idx,&endptr,10);

			if(endptr == idx || errno != 0 || *endptr != '\0' || x[i].index <= inst_max_index)
			{
				fprintf(stderr,"Wrong input format at line %d\n", total+1);
				return -2;
			}
			else
				inst_max_index = x[i].index;

			errno = 0;
			x[i].value = strtod(val,&endptr);
			if(endptr == val || errno != 0 || (*endptr != '\0' && !isspace(*endptr)))
			{
				fprintf(stderr,"Wrong input format at line %d\n", total+1);
				return -2;
			}

			++i;
		}
		x[i].index = -1;

		if (svm_type==C_SVC || svm_type==NU_SVC)
		{
			predict_label = svm_predict_probability(svmModel,x,prob_estimates);
			//printf("%g",predict_label);
			//for(j=0;j<nr_class;j++)
			//	printf(" %g",prob_estimates[j]);
			//printf("\n");
		}
		else
		{
			predict_label = svm_predict(svmModel,x);
			//	printf("%g\n",predict_label);
		}

		if(predict_label == target_label)
			++correct;

		error += (predict_label-target_label)*(predict_label-target_label);
		sump += predict_label;
		sumt += target_label;
		sumpp += predict_label*predict_label;
		sumtt += target_label*target_label;
		sumpt += predict_label*target_label;
		++total;

		free(line);

		free(x);
	}
	if (svm_type==NU_SVR || svm_type==EPSILON_SVR)
	{
		printf("Mean squared error = %g (regression)\n",error/total);
		printf("Squared correlation coefficient = %g (regression)\n",
			   ((total*sumpt-sump*sumt)*(total*sumpt-sump*sumt))/
			   ((total*sumpp-sump*sump)*(total*sumtt-sumt*sumt))
			   );
	}
	else
	{
		printf("Classification Accuracy = %g%% (%d/%d)\n",
			   (double)correct/total*100,correct,total);
	}

	dTestError = 1-((double)correct/(double)total);


	fclose(fInputTestData);
	free(prob_estimates);
	svm_destroy_model(svmModel);

	return 0;

}
void predict(FILE *input, FILE *output)
{
	int correct = 0;
	int total = 0;
	double error = 0, brier_score = 0, log_likelihood = 0;
	double sumv = 0, sumy = 0, sumvv = 0, sumyy = 0, sumvy = 0;

	int svm_type=svm_get_svm_type(model);
	int nr_class=svm_get_nr_class(model);
	int *labels=(int *) malloc(nr_class*sizeof(int));
	double *prob_estimates=NULL;
	int j;

	if(predict_probability)
	{
		if (svm_type==NU_SVR || svm_type==EPSILON_SVR)
			printf("Prob. model for test data: target value = predicted value + z,\nz: Laplace distribution e^(-|z|/sigma)/(2sigma),sigma=%g\n",svm_get_svr_probability(model));
		else
		{
			svm_get_labels(model,labels);
			prob_estimates = (double *) malloc(nr_class*sizeof(double));
			fprintf(output,"labels");		
			for(j=0;j<nr_class;j++)
				fprintf(output," %d",labels[j]);
			fprintf(output,"\n");
		}
	}
	while(1)
	{
		int i = 0;
		int c;
		double target,v;

		if (fscanf(input,"%lf",&target)==EOF)
			break;

		while(1)
		{
			if(i>=max_nr_attr-1)	// need one more for index = -1
			{
				max_nr_attr *= 2;
				x = (struct svm_node *) realloc(x,max_nr_attr*sizeof(struct svm_node));
			}

			do {
				c = getc(input);
				if(c=='\n' || c==EOF) goto out2;
			} while(isspace(c));
			ungetc(c,input);
			fscanf(input,"%d:%lf",&x[i].index,&x[i].value);
			++i;
		}	

out2:
		x[i++].index = -1;

		if (predict_probability && (svm_type==C_SVC || svm_type==NU_SVC))
		{
			v = svm_predict_probability(model,x,prob_estimates, predict_probability);
			fprintf(output,"%g ",v);
			for(j=0;j<nr_class;j++)
				fprintf(output,"%g ",prob_estimates[j]);
			fprintf(output,"\n");
		}
		else
		{
			v = svm_predict(model,x);
			fprintf(output,"%g\n",v);
		}

		if(v == target)
			++correct;
		error += (v-target)*(v-target);
		sumv += v;
		sumy += target;
		sumvv += v*v;
		sumyy += target*target;
		sumvy += v*target;
		++total;

		//Brier Score and log likelihood
		if (predict_probability && (svm_type==C_SVC || svm_type==NU_SVC))
		{
			int j;
			double bscore=0;
			for( j=0; j<nr_class && target != labels[j]; j++)
				;
			if(j < nr_class)
			{
				for(int i=0; i<nr_class; i++)
					bscore += prob_estimates[i]*prob_estimates[i];
				bscore = (bscore - 2 * prob_estimates[j] + 1)/nr_class;
				brier_score += bscore;
				log_likelihood += log(prob_estimates[j]);
			}
		}
		
	}
	printf("Accuracy = %g%% (%d/%d) (classification)\n",
		(double)correct/total*100,correct,total);
	printf("Mean squared error = %g (regression)\n",error/total);
	printf("Squared correlation coefficient = %g (regression)\n",
		((total*sumvy-sumv*sumy)*(total*sumvy-sumv*sumy))/
		((total*sumvv-sumv*sumv)*(total*sumyy-sumy*sumy))
		);
	if(predict_probability)
	{
		printf("Brier score = %g\n", brier_score/total);
		printf("Normalized log likelihood = %g\n", log_likelihood/total);
		free(prob_estimates);
		free(labels);
	}
}
Esempio n. 26
0
int main(int argc, char *argv[]) {

    float labels[4] = {1.0, 1.0, -1.0, -1.0};
    cv::Mat labelsMat(4, 1, CV_32FC1, labels);
    float trainingData[4][2] = {{501, 10}, {255, 10},
                                {501, 255}, {10, 501}};
    cv::Mat trainingDataMat(4, 2, CV_32FC1, trainingData);
    
    svm_parameter param;
    param.svm_type = C_SVC;
    param.kernel_type = LINEAR;
    param.degree = 3;
    param.gamma = 0;
    param.coef0 = 0;
    param.nu = 0.5;
    param.cache_size = 100;
    param.C = 1;
    param.eps = 1e-6;
    param.p = 0.1;
    param.shrinking = 1;
    param.probability = 1;
    param.nr_weight = 0;
    param.weight_label = NULL;
    param.weight = NULL;
    
    svm_problem svm_prob_vector = libSVMWrapper(
       trainingDataMat, labelsMat, param);
    struct svm_model *model = new svm_model;
    if (svm_check_parameter(&svm_prob_vector, &param)) {
       std::cout << "ERROR" << std::endl;
    } else {
       model = svm_train(&svm_prob_vector, &param);
    }

    bool is_compute_probability = true;
    std::string model_file_name = "svm";
    bool save_model = true;
    if (save_model) {
       try {
          svm_save_model(model_file_name.c_str(), model);
          std::cout << "Model file Saved Successfully..." << std::endl;
       } catch(std::exception& e) {
          std::cout << e.what() << std::endl;
       }
    }


    bool is_probability_model = svm_check_probability_model(model);
    int svm_type = svm_get_svm_type(model);
    int nr_class = svm_get_nr_class(model);  // number of classes
    double *prob_estimates = new double[nr_class];

    cv::Vec3b green(0, 255, 0);
    cv::Vec3b blue(255, 0, 0);
    int width = 512, height = 512;
    cv::Mat image = cv::Mat::zeros(height, width, CV_8UC3);
    for (int i = 0; i < image.rows; ++i) {
       for (int j = 0; j < image.cols; ++j) {
          cv::Mat sampleMat = (cv::Mat_<float>(1, 2) << j, i);
              
          int dims = sampleMat.cols;
          svm_node* test_pt = new svm_node[dims];
          for (int k = 0; k < dims; k++) {
             test_pt[k].index = k + 1;
             test_pt[k].value = static_cast<double>(sampleMat.at<float>(0, k));
          }
          test_pt[dims].index = -1;

          float response = 0.0f;
          if (is_probability_model && is_compute_probability) {
             response = svm_predict_probability(model, test_pt, prob_estimates);
          } else {
             response = svm_predict(model, test_pt);
          }
          
          /*
          std::cout << "Predict: " << prob << std::endl;
          for (int y = 0; y < nr_class; y++) {
             std::cout << prob_estimates[y] << "  ";
          }std::cout <<  std::endl;
          */
          
          if (prob_estimates[0] > 0.5 || response == 1) {
             image.at<cv::Vec3b>(i, j)  = green;
          } else if (prob_estimates[1] >= 0.5 || response == -1) {
             image.at<cv::Vec3b>(i, j)  = blue;
          }
       }
    }
    cv::imshow("image", image);
    cv::waitKey(0);
    return 0;
}
Esempio n. 27
0
void predict(mxArray *plhs[], const mxArray *prhs[], struct svm_model *model, const int predict_probability)
{
	int label_vector_row_num, label_vector_col_num;
	int feature_number, testing_instance_number;
	int instance_index;
	double *ptr_instance, *ptr_label, *ptr_predict_label; 
	double *ptr_prob_estimates, *ptr_dec_values, *ptr;
	struct svm_node *x;
	mxArray *pplhs[1]; // transposed instance sparse matrix

	int correct = 0;
	int total = 0;
	double error = 0;
	double sump = 0, sumt = 0, sumpp = 0, sumtt = 0, sumpt = 0;

	int svm_type=svm_get_svm_type(model);
	int nr_class=svm_get_nr_class(model);
	double *prob_estimates=NULL;

	// prhs[1] = testing instance matrix
	feature_number = (int)mxGetN(prhs[1]);
	testing_instance_number = (int)mxGetM(prhs[1]);
	label_vector_row_num = (int)mxGetM(prhs[0]);
	label_vector_col_num = (int)mxGetN(prhs[0]);

	if(label_vector_row_num!=testing_instance_number)
	{
		mexPrintf("Length of label vector does not match # of instances.\n");
		fake_answer(plhs);
		return;
	}
	if(label_vector_col_num!=1)
	{
		mexPrintf("label (1st argument) should be a vector (# of column is 1).\n");
		fake_answer(plhs);
		return;
	}

	ptr_instance = mxGetPr(prhs[1]);
	ptr_label    = mxGetPr(prhs[0]);

	// transpose instance matrix
	if(mxIsSparse(prhs[1]))
	{
		if(model->param.kernel_type == PRECOMPUTED)
		{
			// precomputed kernel requires dense matrix, so we make one
			mxArray *rhs[1], *lhs[1];
			rhs[0] = mxDuplicateArray(prhs[1]);
			if(mexCallMATLAB(1, lhs, 1, rhs, "full"))
			{
				mexPrintf("Error: cannot full testing instance matrix\n");
				fake_answer(plhs);
				return;
			}
			ptr_instance = mxGetPr(lhs[0]);
			mxDestroyArray(rhs[0]);
		}
		else
		{
			mxArray *pprhs[1];
			pprhs[0] = mxDuplicateArray(prhs[1]);
			if(mexCallMATLAB(1, pplhs, 1, pprhs, "transpose"))
			{
				mexPrintf("Error: cannot transpose testing instance matrix\n");
				fake_answer(plhs);
				return;
			}
		}
	}

	if(predict_probability)
	{
		if(svm_type==NU_SVR || svm_type==EPSILON_SVR)
			mexPrintf("Prob. model for test data: target value = predicted value + z,\nz: Laplace distribution e^(-|z|/sigma)/(2sigma),sigma=%g\n",svm_get_svr_probability(model));
		else
			prob_estimates = (double *) malloc(nr_class*sizeof(double));
	}

	plhs[0] = mxCreateDoubleMatrix(testing_instance_number, 1, mxREAL);
	if(predict_probability)
	{
		// prob estimates are in plhs[2]
		if(svm_type==C_SVC || svm_type==NU_SVC)
			plhs[2] = mxCreateDoubleMatrix(testing_instance_number, nr_class, mxREAL);
		else
			plhs[2] = mxCreateDoubleMatrix(0, 0, mxREAL);
	}
	else
	{
		// decision values are in plhs[2]
		if(svm_type == ONE_CLASS ||
		   svm_type == EPSILON_SVR ||
		   svm_type == NU_SVR)
			plhs[2] = mxCreateDoubleMatrix(testing_instance_number, 1, mxREAL);
		else
			plhs[2] = mxCreateDoubleMatrix(testing_instance_number, nr_class*(nr_class-1)/2, mxREAL);
	}

	ptr_predict_label = mxGetPr(plhs[0]);
	ptr_prob_estimates = mxGetPr(plhs[2]);
	ptr_dec_values = mxGetPr(plhs[2]);
	x = (struct svm_node*)malloc((feature_number+1)*sizeof(struct svm_node) );
	for(instance_index=0;instance_index<testing_instance_number;instance_index++)
	{
		int i;
		double target_label, predict_label;

		target_label = ptr_label[instance_index];

		if(mxIsSparse(prhs[1]) && model->param.kernel_type != PRECOMPUTED) // prhs[1]^T is still sparse
			read_sparse_instance(pplhs[0], instance_index, x);
		else
		{
			for(i=0;i<feature_number;i++)
			{
				x[i].index = i+1;
				x[i].value = ptr_instance[testing_instance_number*i+instance_index];
			}
			x[feature_number].index = -1;
		}

		if(predict_probability)
		{
			if(svm_type==C_SVC || svm_type==NU_SVC)
			{
				predict_label = svm_predict_probability(model, x, prob_estimates);
				ptr_predict_label[instance_index] = predict_label;
				for(i=0;i<nr_class;i++)
					ptr_prob_estimates[instance_index + i * testing_instance_number] = prob_estimates[i];
			} else {
				predict_label = svm_predict(model,x);
				ptr_predict_label[instance_index] = predict_label;
			}
		}
		else
		{
			predict_label = svm_predict(model,x);
			ptr_predict_label[instance_index] = predict_label;

			if(svm_type == ONE_CLASS ||
			   svm_type == EPSILON_SVR ||
			   svm_type == NU_SVR)
			{
				double res;
				svm_predict_values(model, x, &res);
				ptr_dec_values[instance_index] = res;
			}
			else
			{
				double *dec_values = (double *) malloc(sizeof(double) * nr_class*(nr_class-1)/2);
				svm_predict_values(model, x, dec_values);
				for(i=0;i<(nr_class*(nr_class-1))/2;i++)
					ptr_dec_values[instance_index + i * testing_instance_number] = dec_values[i];
				free(dec_values);
			}
		}

		if(predict_label == target_label)
			++correct;
		error += (predict_label-target_label)*(predict_label-target_label);
		sump += predict_label;
		sumt += target_label;
		sumpp += predict_label*predict_label;
		sumtt += target_label*target_label;
		sumpt += predict_label*target_label;
		++total;
	}
	if(svm_type==NU_SVR || svm_type==EPSILON_SVR)
	{
		mexPrintf("Mean squared error = %g (regression)\n",error/total);
		mexPrintf("Squared correlation coefficient = %g (regression)\n",
			((total*sumpt-sump*sumt)*(total*sumpt-sump*sumt))/
			((total*sumpp-sump*sump)*(total*sumtt-sumt*sumt))
			);
	}
	/* else */
	/*   mexPrintf("Accuracy = %g%% (%d/%d) (classification)\n", */
	/* 	    (double)correct/total*100,correct,total); */

	// return accuracy, mean squared error, squared correlation coefficient
	plhs[1] = mxCreateDoubleMatrix(3, 1, mxREAL);
	ptr = mxGetPr(plhs[1]);
	ptr[0] = (double)correct/total*100;
	ptr[1] = error/total;
	ptr[2] = ((total*sumpt-sump*sumt)*(total*sumpt-sump*sumt))/
				((total*sumpp-sump*sump)*(total*sumtt-sumt*sumt));

	free(x);
	if(prob_estimates != NULL)
		free(prob_estimates);
}
Esempio n. 28
0
int predict(FILE *input , FILE *output)
{
	int correct =0;
	int total = 0;
	int error = 0;
	double sump = 0, sumt = 0, sumpt = 0, sumpp = 0, sumtt = 0;
	int svm_type=svm_get_svm_type(model);
	int nr_class=svm_get_nr_class(model);
	double *prob_estimate = NULL;
	int j;
	if(predict_probability)
	{
			int *labels=(int *) malloc(nr_class*sizeof(int));
			svm_get_labels(model,labels);
			fprintf(output,"labels");
			for(j=0;j<nr_class;j++)
				fprintf(output," %d",labels[j]);
			fprintf(output,"\n");
			free(labels);
	}
	max_line_len = 1024;
	line = (char *)malloc(max_line_len*sizeof(char));

	while(readline(input) != NULL)
	{
		int i =0;
		double target_label, predict_label,idVal;
		char *idx, *val, *label, *endptr;
		int inst_max_index = -1;
		label = strtok(line, " \t\n");
		if(label == NULL)
		{
			exit_input_error_predict(total+1);
		}
		target_label = strtod(label, &endptr);
		//if(*endptr =='\0')
		if(endptr == label|| *endptr !='\0')
		{
			exit_input_error_predict(total +1);
		}
		while(1)
		{
			if(i >= max_nr_attr-1)
			{
				max_nr_attr *=2;
				x =(struct svm_node *) realloc(x, max_nr_attr*sizeof(struct svm_node));

			}
			//LOGD("line = %s ",line);
			//idVal = strtod(line,&endptr);
			//LOGD("endptr = %s", endptr);
			idx = strtok(NULL,":");
			val = strtok(NULL," \t");
			if(val == NULL)
			{
				break;
			}
			errno = 0;
			x[i].index = (int) strtol(idx,&endptr,10);
			//if(endptr ==idx ||errno !=0 || *endptr =='\0'||x[i].index <=inst_max_index)
			if(endptr ==idx ||errno !=0 || *endptr !='\0'||x[i].index <=inst_max_index)
			{
				exit_input_error_predict(total +1);
			}

			else
			{
				inst_max_index = x[i].index;
			}
			errno = 0;
			x[i].value = strtod(val, &endptr);
			//if(endptr == val || errno !=0||(*endptr =='\0'&& !isspace(*endptr)))
			if(endptr == val || errno !=0||(*endptr !='\0'&& !isspace(*endptr)))
			{
				exit_input_error_predict(total +1);
			}
			++i;
		}
		x[i].index = -1;
		if(predict_probability &&(svm_type == C_SVC || svm_type == NU_SVC))
		{
			predict_label = svm_predict_probability(model, x, prob_estimate);
			fprintf(output,"%g", predict_label);
			for (j = 0; j < nr_class; j++)
			{
				fprintf(output,"%g",prob_estimate[j]);
			}
			fprintf(output,"\n");
		}
		else
		{
			//LOGD(" CAI DECK");
			predict_label = svm_predict(model,x);
			fprintf(output,"%g\n",predict_label);
		}
		if(predict_label == target_label)
		{
			++correct;
		}
		error +=(predict_label - target_label)*(predict_label - target_label);
		sump += predict_label;
		sumt +=target_label;
		sumpp += predict_label*predict_label;
		sumtt += target_label*target_label;
		sumpt += predict_label * target_label;
		++total;
	}
		if(predict_probability)
			free(prob_estimate);
}