Пример #1
0
void SVMTrainModel::setNoPrint(bool _no_print)
{
  if(_no_print)
  {
    void (*print_func)(const char*) = &print_null;
    svm_set_print_string_function(print_func);
  }
  else
  {
    void (*print_func)(const char*) = NULL;       // default printing to stdout
    svm_set_print_string_function(print_func);
  }
}
Пример #2
0
bool SvmClassifier::Train(const Mat &feats, const vector<int> &labels)
{
    if (svm_model_ != NULL)
    {
        svm_free_and_destroy_model(&svm_model_);
    }

    // Calculate the normalization parameters
    TrainNormalize(feats, &normA_, &normB_);

    // Normalize the features
    Mat feats_norm;
    Normalize(feats, &feats_norm);

    // Prepare the input for SVM
    svm_parameter param;
    svm_problem problem;
    svm_node* x_space = NULL;

    PrepareParameter(feats_norm.cols, &param);
    PrepareProblem(feats_norm, labels, &problem, x_space);

    // Train the SVM model
    svm_set_print_string_function(&PrintNull);  // Close the training output
    svm_model_ = svm_train(&problem, &param);

    // Release the parameters for training
    svm_destroy_param(&param);
    return true;

}
Пример #3
0
SVM::SVM(void (*f) (const char*)) {
	problem.l = 0;

	main_equation = NULL;
	model = NULL;

	param.svm_type = C_SVC;
	param.kernel_type = LINEAR;
	param.degree = 3;
	param.gamma = 0;	// 1/num_features
	param.coef0 = 0;
	param.nu = 0.5;
	param.cache_size = 100;
	//	param.C = 1;
	param.C = DBL_MAX;
	//param.C = 1000;
	param.eps = 1e-3;
	param.p = 0.1;
	param.shrinking = 1;
	param.probability = 0;
	param.nr_weight = 0;
	param.weight_label = NULL;
	param.weight = NULL;
	if (f != NULL)
		svm_set_print_string_function(f);

	for (int i = 0; i < 2 * max_items; i++)
		training_label[i] = -1;
	problem.x = (svm_node**)(training_set);
	problem.y = training_label;
}
Пример #4
0
/****************************************
get the SVM results of classifying activation vectors of two categories for every voxel
the linear kernel of libSVM is applied here
input: the average activation matrix array, the blocks(trials), the number of blocks, the number of test samples, the number of folds in the cross validation, the location info
output: a list of voxels' scores in terms of SVM accuracies
*****************************************/
VoxelScore* GetSearchlightSVMPerformance(RawMatrix** avg_matrices, Trial* trials, int nTrials, int nTests, int nFolds, Point* pts)  //classifiers for a r_matrix array
{
  svm_set_print_string_function(&print_null);
  int row = avg_matrices[0]->row;  // assume all elements in r_matrices array have the same row
  VoxelScore* score = new VoxelScore[row];  // get step voxels classification accuracy here
  int i, j;
  for (i=0; i<row; i++)
  {
    SVMProblem* prob = GetSearchlightSVMProblem(avg_matrices, trials, i, nTrials-nTests, pts);
    if (i%1000==0) cout<<i<<" ";
    //if (me==0) PrintSVMProblem(prob);
    SVMParameter* param = SetSVMParameter(0); // 2 for RBF kernel
    (score+i)->vid = i;
    (score+i)->score = DoSVM(nFolds, prob, param);
    delete param;
    delete prob->y;
    for (j=0; j<nTrials-nTests; j++)
    {
      delete prob->x[j];
    }
    delete prob->x;
    delete prob;
  }
  //if (me == 0)
  //{
  //  cout<<endl;
  //}
  return score;
}
Пример #5
0
SVMTrainModel::SVMTrainModel()
{
  // default values
  param.svm_type = C_SVC;
  param.kernel_type = RBF;
  param.degree = 3;
  param.gamma = 0;        // 1/num_features
  param.coef0 = 0;
  param.nu = 0.5;
  param.cache_size = 100;
  param.C = 1;
  param.eps = 1e-3;
  param.p = 0.1;
  param.shrinking = 1;
  param.probability = 0;
  param.nr_weight = 0;
  param.weight_label = NULL;
  param.weight = NULL;
  cross_validation = 0;

  void (*print_func)(const char*) = NULL;       // default printing to stdout
  svm_set_print_string_function(print_func);

  have_input_file_name = false;
  have_model_file_name = false;
}
/* provide convenience wrapper */
void set_verbosity(int verbosity_flag){
	if (verbosity_flag)
# if LIBSVM_VERSION < 291
		svm_print_string = &print_string_stdout;
	else
		svm_print_string = &print_null;
# else
		svm_set_print_string_function(&print_string_stdout);
	else
Пример #7
0
/***************************************
predict a new sample based on a trained SVM model and a variation of the numbers of top voxels. if correlation, assume that it's a self correlation, so only one mask file is enough
input: the raw activation matrix array, the average activation matrix array, the number of subjects, the number of blocks(trials), the blocks, the number of test samples, the task type, the files to store the results, the mask file
output: the results are displayed on the screen
****************************************/
void SVMPredict(RawMatrix** r_matrices, RawMatrix** avg_matrices, int nSubs, int nTrials, Trial* trials, int nTests, int taskType, const char* topVoxelFile, const char* mask_file, int is_quiet_mode)
{
  RawMatrix** masked_matrices=NULL;
  int row = 0;
  int col = 0;
  svm_set_print_string_function(&print_null);
  VoxelScore* scores = NULL;
  int tops[] = {10, 20, 50, 100, 200, 500, 1000, 2000, 5000};//, 10000, 20000, 40000};
  int maxtops = sizeof(tops)/sizeof((tops)[0]);
  int ntops;
  switch (taskType)
  {
    case 0:
    case 1:
      if (mask_file!=NULL)
        masked_matrices = GetMaskedMatrices(r_matrices, nSubs, mask_file);
      else
        masked_matrices = r_matrices;
      row = masked_matrices[0]->row;
      col = masked_matrices[0]->col;
      scores = ReadTopVoxelFile(topVoxelFile, row);
      RearrangeMatrix(masked_matrices, scores, row, col, nSubs);
      ntops = getNumTopIndices(tops, maxtops, row);
      if (ntops > 0)
          CorrelationBasedClassification(tops, ntops, nSubs, nTrials, trials, nTests, masked_matrices, is_quiet_mode);
      else
          cerr<<"less than "<<tops[0]<<"voxels!"<<endl;
      break;
    case 2:
      if (mask_file!=NULL)
        masked_matrices = GetMaskedMatrices(avg_matrices, nSubs, mask_file);
      else
        masked_matrices = avg_matrices;
      row = masked_matrices[0]->row;
      col = masked_matrices[0]->col;
      scores = ReadTopVoxelFile(topVoxelFile, row);
      RearrangeMatrix(masked_matrices, scores, row, col, nSubs);
      ntops = getNumTopIndices(tops, maxtops, row);
      if (ntops > 0)
          ActivationBasedClassification(tops, ntops, nTrials, trials, nTests, masked_matrices, is_quiet_mode);
      else
          cerr<<"less than "<<tops[0]<<"voxels!"<<endl;
      break;
    default:
      FATAL("Unknown task type");
  }
  delete[] scores;
}
Пример #8
0
/// <summary>
/// use a kernel matrix and solve the svm problem
/// </summary>
/// <param name="Labels">vector of all Labels</param>
/// <param name="C">regularisation parameter</param>
libSVMWrapper::libSVMWrapper(Eigen::VectorXd Labels, double C)
{
	svm_set_print_string_function(print_null);
	Y = Labels;

	// we need these parameters
	libSVM_Parameter.svm_type = C_SVC;
	libSVM_Parameter.kernel_type = PRECOMPUTED;
	libSVM_Parameter.C = C;

	// dummy values
	libSVM_Parameter.degree = 3;
	libSVM_Parameter.gamma = 0;
	libSVM_Parameter.coef0 = 0;
	libSVM_Parameter.nu = 0.5;
	libSVM_Parameter.cache_size = 100;
	libSVM_Parameter.eps = 1e-3;
	libSVM_Parameter.p = 0.1;
	libSVM_Parameter.shrinking = 1;
	libSVM_Parameter.probability = 0;
	libSVM_Parameter.nr_weight = 0;
	libSVM_Parameter.weight_label = NULL;
	libSVM_Parameter.weight = NULL;

	// the labels do not change
	NumberOfData = Labels.rows();
	int sc = Labels.rows() + 1;
	libSVM_Problem.y = Malloc(double,NumberOfData);
	libSVM_Problem.l = NumberOfData;

	for (int i = 0; i < NumberOfData; i++)
		libSVM_Problem.y[i] = Labels[i];

	// get space for kernel matrix (fixed size) only once
	libSVM_Problem.x = Malloc(struct svm_node *,NumberOfData);
	libSVM_x_space = Malloc(struct svm_node, NumberOfData* (sc+1));

	// get space only once
	for (int i = 0; i < libSVM_Problem.l; i++)
	{
		libSVM_Problem.x[i] = &libSVM_x_space[i * (sc + 1)];
		libSVM_Problem.y[i] = Labels[i];
	}

}
Пример #9
0
// nrhs should be 3
int parse_command_line(int nrhs, const mxArray *prhs[], char *model_file_name)
{
	int i, argc = 1;
	char cmd[CMD_LEN];
	char *argv[CMD_LEN/2];
	void (*print_func)(const char *) = print_string_matlab;	// default printing to matlab display

	// default values
	param.svm_type = C_SVC;
	param.kernel_type = RBF;
	param.degree = 3;
	param.gamma = 0;	// 1/num_features
	param.coef0 = 0;
	param.nu = 0.5;
	param.cache_size = 100;
	param.C = 1;
	param.eps = 1e-3;
	param.p = 0.1;
	param.shrinking = 1;
	param.probability = 0;
	param.nr_weight = 0;
	param.weight_label = NULL;
	param.weight = NULL;
	cross_validation = 0;

	if(nrhs <= 1)
		return 1;

	if(nrhs > 2)
	{
		// put options in argv[]
		mxGetString(prhs[2], cmd, mxGetN(prhs[2]) + 1);
		if((argv[argc] = strtok(cmd, " ")) != NULL)
			while((argv[++argc] = strtok(NULL, " ")) != NULL)
				;
	}

	// parse options
	for(i=1;i<argc;i++)
	{
		if(argv[i][0] != '-') break;
		++i;
		if(i>=argc && argv[i-1][1] != 'q')	// since option -q has no parameter
			return 1;
		switch(argv[i-1][1])
		{
			case 's':
				param.svm_type = atoi(argv[i]);
				break;
			case 't':
				param.kernel_type = atoi(argv[i]);
				break;
			case 'd':
				param.degree = atoi(argv[i]);
				break;
			case 'g':
				param.gamma = atof(argv[i]);
				break;
			case 'r':
				param.coef0 = atof(argv[i]);
				break;
			case 'n':
				param.nu = atof(argv[i]);
				break;
			case 'm':
				param.cache_size = atof(argv[i]);
				break;
			case 'c':
				param.C = atof(argv[i]);
				break;
			case 'e':
				param.eps = atof(argv[i]);
				break;
			case 'p':
				param.p = atof(argv[i]);
				break;
			case 'h':
				param.shrinking = atoi(argv[i]);
				break;
			case 'b':
				param.probability = atoi(argv[i]);
				break;
			case 'q':
				print_func = &print_null;
				i--;
				break;
			case 'v':
				cross_validation = 1;
				nr_fold = atoi(argv[i]);
				if(nr_fold < 2)
				{
					mexPrintf("n-fold cross validation: n must >= 2\n");
					return 1;
				}
				break;
			case 'w':
				++param.nr_weight;
				param.weight_label = (int *)realloc(param.weight_label,sizeof(int)*param.nr_weight);
				param.weight = (double *)realloc(param.weight,sizeof(double)*param.nr_weight);
				param.weight_label[param.nr_weight-1] = atoi(&argv[i-1][2]);
				param.weight[param.nr_weight-1] = atof(argv[i]);
				break;
			default:
				mexPrintf("Unknown option -%c\n", argv[i-1][1]);
				return 1;
		}
	}

	svm_set_print_string_function(print_func);

	return 0;
}
Пример #10
0
void parse_command_line(int argc, char **argv, char *input_file_name, char *model_file_name)
{
	int i;
	void (*print_func)(const char*) = NULL;	// default printing to stdout

	// default values
	param.svm_type = C_SVC;
	param.kernel_type = RBF;
	param.degree = 3;
	param.gamma = 0;	// 1/num_features
	param.coef0 = 0;
	param.nu = 0.5;
	param.cache_size = 100;
	param.C = 1;
	param.eps = 1e-3;
	param.p = 0.1;
	param.shrinking = 1;
	param.probability = 0;
	param.nr_weight = 0;
	param.weight_label = NULL;
	param.weight = NULL;
	cross_validation = 0;

	// parse options
	for(i=1;i<argc;i++)
	{
		if(argv[i][0] != '-') break;
		if(++i>=argc)
			exit_with_help();
		switch(argv[i-1][1])
		{
			case 's':
				param.svm_type = atoi(argv[i]);
				break;
			case 't':
				param.kernel_type = atoi(argv[i]);
				break;
			case 'd':
				param.degree = atoi(argv[i]);
				break;
			case 'g':
				param.gamma = atof(argv[i]);
				break;
			case 'r':
				param.coef0 = atof(argv[i]);
				break;
			case 'n':
				param.nu = atof(argv[i]);
				break;
			case 'm':
				param.cache_size = atof(argv[i]);
				break;
			case 'c':
				param.C = atof(argv[i]);
				break;
			case 'e':
				param.eps = atof(argv[i]);
				break;
			case 'p':
				param.p = atof(argv[i]);
				break;
			case 'h':
				param.shrinking = atoi(argv[i]);
				break;
			case 'b':
				param.probability = atoi(argv[i]);
				break;
			case 'q':
				print_func = &print_null;
				i--;
				break;
			case 'v':
				cross_validation = 1;
				nr_fold = atoi(argv[i]);
				if(nr_fold < 2)
				{
					fprintf(stderr,"n-fold cross validation: n must >= 2\n");
					exit_with_help();
				}
				break;
			case 'w':
				++param.nr_weight;
				param.weight_label = (int *)realloc(param.weight_label,sizeof(int)*param.nr_weight);
				param.weight = (double *)realloc(param.weight,sizeof(double)*param.nr_weight);
				param.weight_label[param.nr_weight-1] = atoi(&argv[i-1][2]);
				param.weight[param.nr_weight-1] = atof(argv[i]);
				break;
			case 'W':
				weight_file = argv[i];
				break;
			default:
				fprintf(stderr,"Unknown option: -%c\n", argv[i-1][1]);
				exit_with_help();
		}
	}

	svm_set_print_string_function(print_func);

	// determine filenames

	if(i>=argc)
		exit_with_help();

	strcpy(input_file_name, argv[i]);

	if(i<argc-1)
		strcpy(model_file_name,argv[i+1]);
	else
	{
		char *p = strrchr(argv[i],'/');
		if(p==NULL)
			p = argv[i];
		else
			++p;
		sprintf(model_file_name,"%s.model",p);
	}
}
Пример #11
0
/* provide convenience wrapper */
void set_verbosity(int verbosity_flag){
	if (verbosity_flag)
		svm_set_print_string_function(&print_string_stdout);
	else
		svm_set_print_string_function(&print_null);
}
void parse_command_line(int argc, char **argv, char *input_file_name, char *model_file_name)
{
	int i;
	void (*print_func)(const char*) = NULL;	// default printing to stdout

	// default values
	param.scale = 1.0;
    param.kMeansperLabel = 10;
    param.kNN = 20;
    param.svmIterations = 500;
    param.distanceCoefficient = 0.1;
    param.jointClustering = 0;

    walltime = -1;
    savetime = -1;
	subsamplingAmount  = -1;

	// parse options
	for(i=1;i<argc;i++)
	{
		if(argv[i][0] != '-') break;
		if(++i>=argc)
			exit_with_help();
		switch(argv[i-1][1])
		{
			case 's':
				param.scale = atof(argv[i]);
				break;
			case 'k':
				param.kMeansperLabel = atoi(argv[i]);
				break;
			case 'n':
				param.kNN = atoi(argv[i]);
				break;
            case 'i':
                param.svmIterations = atoi(argv[i]);
                break;
			case 'd':
				param.distanceCoefficient = atof(argv[i]);
				break;
			case 'j':
				param.jointClustering = atoi(argv[i]);
				break;

            
            case 'a':
                    savetime = atoi(argv[i]);
                    break;
            case 'l':
                    walltime = atoi(argv[i]);
                    break;
			default:
				fprintf(stderr,"Unknown option: -%c\n", argv[i-1][1]);
				exit_with_help();
		}
	}

	svm_set_print_string_function(print_func);

	// determine filenames

	if(i>=argc)
		exit_with_help();

	strcpy(input_file_name, argv[i]);

	if(i<argc-1)
		strcpy(model_file_name,argv[i+1]);
	else
	{
		char *p = strrchr(argv[i],'/');
		if(p==NULL)
			p = argv[i];
		else
			++p;
		sprintf(model_file_name,"%s.model",p);
	}
}
Пример #13
0
int main(void){
	list_t passengerList,testPassengerList;
	FILE *fp;
	int i;


	svm_set_print_string_function(print_null);

	memset(&passengerList,0,sizeof(passengerList));
	memset(&testPassengerList,0,sizeof(passengerList));

	char buf[1024];
	char **token;

	dfopen(fp,"data/train.csv","r",exit(EXIT_FAILURE));
	fgets(buf,sizeof(buf),fp);

	while(fgets(buf,sizeof(buf),fp)){		
		int n=explode(&token,buf,',');
		passenger *human=Calloc(passenger,1);
		human->passengerId=atoi(token[0]);
		human->survived=atoi(token[1]);
		human->sex=(strcmp("male",token[4])==0) ? 0 : 1;
		human->age=(*token[5]=='\0') ? -1 : atoi(token[5]);
		human->rank=atoi(token[2]);
		human->fare=atoi(token[9]);
		human->name=parseName(token[3]);
		human->ticketNo=strdup(token[8]);
		human->cabin=strdup(token[10]);
		addList(&passengerList,human);
		for(i=0;i<n;i++){
			free(token[i]);
		}
		free(token);
	}
	fclose(fp);
	sameTicketScale(&passengerList);

	dfopen(fp,"data/test.csv","r",exit(EXIT_FAILURE));
	fgets(buf,sizeof(buf),fp);
	while(fgets(buf,sizeof(buf),fp)){
		int n=explode(&token,strdup(buf),',');
		passenger *human=Calloc(passenger,1);
		human->passengerId=atoi(token[0]);
		human->sex=(strcmp("male",token[3])==0) ? 0 : 1;
		human->age=(*token[4]=='\0') ? -1 : atoi(token[4]);
		human->rank=atoi(token[1]);
		human->fare=atoi(token[8]);
		human->name=parseName(token[2]);
		human->ticketNo=strdup(token[7]);
		human->cabin=strdup(token[9]);
		addList(&testPassengerList,human);
		for(i=0;i<n;i++){
			free(token[i]);
		}
		free(token);
	}
	fclose(fp);

	sameTicketScale(&testPassengerList);

	fillAge(&passengerList,&testPassengerList);
	checkAlone(&passengerList,&testPassengerList);
	ML_bayesianNetwork(&passengerList,&testPassengerList);
	exit(0);
	
	return 0;
}
int main(int argc, char** argv)
{
	if (argc < 1) {
		std::cout << "Arguments less than 2.\n";
		exit(-1);
	}	
	if (argc >= 3) {
		minv = atoi(argv[1]);
		maxv = atoi(argv[2]);
	}
	struct svm_parameter param;
	param.svm_type = C_SVC;
	param.kernel_type = LINEAR;
	param.degree = 3;
	param.gamma = 0;	// 1/num_features
	param.coef0 = 0;
	param.nu = 0.5;
	param.cache_size = 100;
	//	param.C = 1;
	param.C = DBL_MAX;
	param.eps = 1e-3;
	param.p = 0.1;
	param.shrinking = 1;
	param.probability = 0;
	param.nr_weight = 0;
	param.weight_label = NULL;
	param.weight = NULL;
	svm_set_print_string_function(print_null);


	int rnd = 1;
	srand(time(NULL));
#ifdef _TEST0_
	std::cout << "[1]******************************************************" << std::endl;
	std::cout << "\t(1) running programs... [" << inputs_init <<"]" << std::endl;
#endif
	for (int i = 0; i < inputs_init; i++) {
		for (int j = 0; j < vars; j++) {
			inputs[j] = rand() % (maxv - minv + 1) + minv;
		}
		before_loop();
		m(inputs);
		after_loop();
	}


start_processing:	
	if (positive_set_changed) { 
		qsort(positive_set, positive_idx, sizeof(node), node::compare);
		positive_set_changed = false;
	}
	if (negative_set_changed) { 
		qsort(negative_set, negative_idx, sizeof(node), node::compare);
		negative_set_changed = false;
	}
	//	nice_set_print();

#ifdef _TEST0_
	std::cout << "\t(2) converting data into svm format..." << std::endl;
#endif
	svm_linker sl;
	sl.add_node_set(positive_set, positive_idx, 1);
	sl.add_node_set(negative_set, negative_idx, -1);

#ifdef _TEST0_
	std::cout << "\t(3) svm training...[" << sl.l << "]" << std::endl;
#endif
	struct svm_model* model = svm_train((const struct svm_problem *)&sl, &param);
//	svm_save_model("model_file", model);
	struct coef co;
	svm_model_visualization(model, &co);
	printf(" %.16g [0]", co.theta[0]);
	for (int j = 1; j < vars; j++)
		printf ("  +  %.16g [%d]", co.theta[j], j);
	printf (" >= %.16g\n", -co.theta0);

	print_svm_samples((const struct svm_problem*)&sl);
	svm_free_and_destroy_model(&model);

	rnd++;
	if (rnd <= max_iter) {
#ifdef _TEST0_
		std::cout << "[" << rnd << "]*********************************************************" << std::endl;
		std::cout << "\t(1) running programs...[" << inputs_aft << "]" << std::endl;
#endif
		for (int i = 0; i < inputs_aft; i++) {
			linear_solver(co, inputs);
			before_loop();
			m(inputs);
			after_loop();
		}
		goto start_processing;

	}


	return 0;
}
/***************************************
Get two parts of the brain to compute the correlation and then use the correlation vectors to predict
input: the raw activation matrix array, the number of subjects, the number of subjects, the first mask file, the second mask file, the number of blocks(trials), the blocks, the number of test samples
output: the results are displayed on the screen and returned
****************************************/
int SVMPredictCorrelationWithMasks(RawMatrix** r_matrices, int nSubs, const char* maskFile1, const char* maskFile2, int nTrials, Trial* trials, int nTests, int is_quiet_mode)
{
  int i, j;
  svm_set_print_string_function(&print_null);
  RawMatrix** masked_matrices1=NULL;
  RawMatrix** masked_matrices2=NULL;
  if (maskFile1!=NULL)
    masked_matrices1 = GetMaskedMatrices(r_matrices, nSubs, maskFile1);
  else
    masked_matrices1 = r_matrices;
  if (maskFile2!=NULL)
    masked_matrices2 = GetMaskedMatrices(r_matrices, nSubs, maskFile2);
  else
    masked_matrices2 = r_matrices;
  cout<<"masked matrices generating done!"<<endl;
  cout<<"#voxels for mask1: "<<masked_matrices1[0]->row<<" #voxels for mask2: "<<masked_matrices2[0]->row<<endl;
  float* simMatrix = new float[nTrials*nTrials];
  int corrRow = masked_matrices1[0]->row;
  //int corrCol = masked_matrices2[0]->row; // no use here
  memset((void*)simMatrix, 0, nTrials*nTrials*sizeof(float));
  int sr = 0, rowLength = 100;
  int result = 0;
  while (sr<corrRow)
  {
    if (rowLength >= corrRow - sr)
    {
      rowLength = corrRow - sr;
    }
    float* tempSimMatrix = GetPartialInnerSimMatrixWithMasks(nSubs, nTrials, sr, rowLength, trials, masked_matrices1, masked_matrices2);
    for (i=0; i<nTrials*nTrials; i++) simMatrix[i] += tempSimMatrix[i];
    delete[] tempSimMatrix;
    sr += rowLength;
  }
  SVMParameter* param = SetSVMParameter(4); // precomputed
  SVMProblem* prob = GetSVMTrainingSet(simMatrix, nTrials, trials, nTrials-nTests);
  struct svm_model *model = svm_train(prob, param);
  int nTrainings = nTrials-nTests;
  SVMNode* x = new SVMNode[nTrainings+2];
  double predict_distances[nTrials-nTrainings];
  bool predict_correctness[nTrials-nTrainings];
  for (i=nTrainings; i<nTrials; i++)
  {
    x[0].index = 0;
    x[0].value = i-nTrainings+1;
    for (j=0; j<nTrainings; j++)
    {
      x[j+1].index = j+1;
      x[j+1].value = simMatrix[i*nTrials+j];
    }
    x[j+1].index = -1;
    predict_distances[i-nTrainings] = svm_predict_distance(model, x);
    int predict_label = predict_distances[i-nTrainings]>0?0:1;
    if (trials[i].label == predict_label)
    {
      result++;
      predict_correctness[i-nTrainings] = true;
    }
    else
    {
      predict_correctness[i-nTrainings] = false;
    }
  }
  if (!is_quiet_mode)
  {
    cout<<"blocking testing confidence:"<<endl;
    for (i=nTrainings; i<nTrials; i++)
    {
      cout<<fabs(predict_distances[i-nTrainings])<<" (";
      if (predict_correctness[i-nTrainings])
      {
        cout<<"Correct) ";
      }
      else
      {
        cout<<"Incorrect) ";
      }
    }
    cout<<endl;
  }
  svm_free_and_destroy_model(&model);
  delete[] x;
  delete prob->y;
  for (i=0; i<nTrainings; i++)
  {
    delete prob->x[i];
  }
  delete prob->x;
  delete prob;
  svm_destroy_param(param);
  delete[] simMatrix;
  for (i=0; i<nSubs; i++)
  {
    delete masked_matrices1[i]->matrix;
    if (maskFile2!=NULL) delete masked_matrices2[i]->matrix;
  }
  delete masked_matrices1;
  if (maskFile2!=NULL) delete masked_matrices2;
  return result;
}
/***************************************
Get one part of the brain to compute the averaged activation and then use the normalized activation vectors to predict
input: the raw activation matrix array, the number of voxels, the number of subjects, the ROI mask file, the number of blocks(trials), the blocks, the number of test samples
output: the results are displayed on the screen and returned
****************************************/
int SVMPredictActivationWithMasks(RawMatrix** avg_matrices, int nSubs, const char* maskFile, int nTrials, Trial* trials, int nTests, int is_quiet_mode)
{
  int i, j;
  int nTrainings = nTrials-nTests;
  SVMParameter* param = SetSVMParameter(0); // linear
  SVMProblem* prob = new SVMProblem();
  prob->l = nTrainings;
  prob->y = new double[nTrainings];
  prob->x = new SVMNode*[nTrainings];
  svm_set_print_string_function(&print_null);
  
  RawMatrix** masked_matrices=NULL;
  if (maskFile!=NULL)
    masked_matrices = GetMaskedMatrices(avg_matrices, nSubs, maskFile);
  else
    masked_matrices = avg_matrices;
  cout<<"masked matrices generating done!"<<endl;
  cout<<"#voxels for mask "<<masked_matrices[0]->row<<endl;
  int nVoxels = masked_matrices[0]->row;
  for (i=0; i<nTrainings; i++)
  {
    int sid = trials[i].sid;
    prob->y[i] = trials[i].label;
    prob->x[i] = new SVMNode[nVoxels+1];
    for (j=0; j<nVoxels; j++)
    {
      prob->x[i][j].index = j+1;
      int col = masked_matrices[sid]->col;
      int offset = trials[i].tid_withinsubj;
      prob->x[i][j].value = masked_matrices[sid]->matrix[j*col+offset];
    }
    prob->x[i][j].index = -1;
  }
  struct svm_model *model = svm_train(prob, param);
  SVMNode* x = new SVMNode[nVoxels+1];
  double predict_distances[nTrials-nTrainings];
  bool predict_correctness[nTrials-nTrainings];
  int result = 0;
  for (i=nTrainings; i<nTrials; i++)
  {
    int sid = trials[i].sid;
    for (j=0; j<nVoxels; j++)
    {
      x[j].index = j+1;
      int col = masked_matrices[sid]->col;
      int offset = trials[i].tid_withinsubj;
      x[j].value = masked_matrices[sid]->matrix[j*col+offset];
    }
    x[j].index = -1;
    predict_distances[i-nTrainings] = svm_predict_distance(model, x);
    int predict_label = predict_distances[i-nTrainings]>0?0:1;
    if (trials[i].label == predict_label)
    {
      result++;
      predict_correctness[i-nTrainings] = true;
    }
    else
    {
      predict_correctness[i-nTrainings] = false;
    }
  }
  if (!is_quiet_mode)
  {
    cout<<"blocking testing confidence:"<<endl;
    for (i=nTrainings; i<nTrials; i++)
    {
      cout<<fabs(predict_distances[i-nTrainings])<<" (";
      if (predict_correctness[i-nTrainings])
      {
        cout<<"Correct) ";
      }
      else
      {
        cout<<"Incorrect) ";
      }
    }
    cout<<endl;
  }
  svm_free_and_destroy_model(&model);
  delete[] x;
  delete[] prob->y;
  for (i=0; i<nTrainings; i++)
  {
    delete prob->x[i];
  }
  delete[] prob->x;
  delete prob;
  svm_destroy_param(param);
  for (i=0; i<nSubs; i++)
  {
    delete masked_matrices[i]->matrix;
  }
  delete masked_matrices;
  return result;
}