bool SvmClassifier::Train(const Mat &feats, const vector<int> &labels) { if (svm_model_ != NULL) { svm_free_and_destroy_model(&svm_model_); } // Calculate the normalization parameters TrainNormalize(feats, &normA_, &normB_); // Normalize the features Mat feats_norm; Normalize(feats, &feats_norm); // Prepare the input for SVM svm_parameter param; svm_problem problem; svm_node* x_space = NULL; PrepareParameter(feats_norm.cols, ¶m); PrepareProblem(feats_norm, labels, &problem, x_space); // Train the SVM model svm_set_print_string_function(&PrintNull); // Close the training output svm_model_ = svm_train(&problem, ¶m); // Release the parameters for training svm_destroy_param(¶m); return true; }
int CV_MLBaseTest::train( int testCaseIdx ) { bool is_trained = false; FileNode modelParamsNode = validationFS.getFirstTopLevelNode()["validation"][modelName][dataSetNames[testCaseIdx]]["model_params"]; if( !modelName.compare(CV_NBAYES) ) is_trained = nbayes_train( nbayes, &data ); else if( !modelName.compare(CV_KNEAREST) ) { assert( 0 ); //is_trained = knearest->train( &data ); } else if( !modelName.compare(CV_SVM) ) { string svm_type_str, kernel_type_str; modelParamsNode["svm_type"] >> svm_type_str; modelParamsNode["kernel_type"] >> kernel_type_str; CvSVMParams params; params.svm_type = str_to_svm_type( svm_type_str ); params.kernel_type = str_to_svm_kernel_type( kernel_type_str ); modelParamsNode["degree"] >> params.degree; modelParamsNode["gamma"] >> params.gamma; modelParamsNode["coef0"] >> params.coef0; modelParamsNode["C"] >> params.C; modelParamsNode["nu"] >> params.nu; modelParamsNode["p"] >> params.p; is_trained = svm_train( svm, &data, params ); }
int main(int argc, char **argv) { char input_file_name[1024]; char model_file_name[1024]; const char *error_msg; parse_command_line(argc, argv, input_file_name, model_file_name); read_problem(input_file_name); error_msg = svm_check_parameter(&prob,¶m); if(error_msg) { fprintf(stderr,"Error: %s\n",error_msg); exit(1); } if(cross_validation) { do_cross_validation(); } else { model = svm_train(&prob,¶m); svm_save_model(model_file_name,model); svm_destroy_model(model); } svm_destroy_param(¶m); free(prob.y); free(prob.x); free(x_space); return 0; }
static void cmd_optim(WindowFile &trainA, WindowFile &trainB, WindowFile &crossA, WindowFile &crossB, double cStart, double cStop, double cStep, double gStart, double gStop, double gStep) { SVMProblem problem(trainA, trainB); SVMParam param; const int totalWins = crossA.getNumEventsAndNumWins().second + crossB.getNumEventsAndNumWins().second; int bestErrors = totalWins; double bestg = 0., bestc = 0.; const double eps = std::numeric_limits<float>::epsilon(); for(double g = gStart; g <= gStop; g += gStep) { for(double c = cStart; c <= cStop; c += cStep) { fprintf(stderr, "=> Trying c=%.1f, g=%.1f\n", c, g); param.setcg(c, g); svm_model *model = svm_train(&problem, ¶m); const int errors = countErrors(model, crossA, crossB); svm_free_and_destroy_model(&model); fprintf(stderr, "=> Errors: %d\n", errors); if((errors < bestErrors) || (errors == bestErrors && fabs(g-bestg)<=eps && c<bestc)) { bestErrors = errors; bestc = c; bestg = g; } } } fprintf(stderr, "\n\n=> Best: c=%.1f, g=%.1f\n", bestc, bestg); fprintf(stderr, "=> Errors: %d (%.2f%%)\n", bestErrors, (100.*bestErrors)/totalWins); }
double cv(struct svm_parameter *param,struct svm_problem *prob){ struct svm_problem prob2; struct svm_model *model; int i,j; double predict_label,target_label; prob2.l=prob->l; prob2.y = Malloc(double,prob->l); prob2.x = Malloc(struct svm_node *,prob->l); double error = 0; int total = 0; prob2.l--; for(i=0;i<prob->l;i++){ for(j=0;j<i;j++){ prob2.x[j]=prob->x[j]; prob2.y[j]=prob->y[j]; } for(;j<prob2.l;j++){ prob2.x[j]=prob->x[j+1]; prob2.y[j]=prob->y[j+1]; } model = svm_train(&prob2,param); predict_label = svm_predict(model,prob->x[i]); svm_free_and_destroy_model(&model); printf("predict_label:%lf prob->y[i]:%lf\n",predict_label,prob->y[i]); target_label=prob->y[i]; error += (predict_label-target_label)*(predict_label-target_label); ++total; } return error/(double)total; }
void svm_train_and_test(const std::vector<feature_t> &feats, const std::vector<int> &labels, const char * model_file, std::ofstream &ofs) { svm_model * model; if(_access(model_file, 0) == -1) { auto param = svm_fill_parameter(); auto prob = svm_fill_problem(feats, labels); model = svm_train(prob, param); svm_save_model(model_file, model); auto acc = svm_test_acc(prob, model); svm_destroy_param(param); svm_free_problem(prob); std::cout<<model_file<<" acc: "<<acc*100<<std::endl; ofs<<model_file<<" acc: "<<acc*100<<std::endl; } else { model = svm_load_model(model_file); auto acc = svm_test_acc(feats, labels, model); std::cout<<model_file<<" acc: "<<acc*100<<std::endl; ofs<<model_file<<" acc: "<<acc*100<<std::endl; } //free svm_free_and_destroy_model(&model); }
void TrainableOneClassSvmClassifier::train() { problem = move(createProblem()); const char* message = svm_check_parameter(problem.get(), param.get()); if (message != 0) throw invalid_argument(string("invalid SVM parameters: ") + message); model.reset(svm_train(problem.get(), param.get())); updateSvmParameters(); }
inline void cv_dist_vector_GrassBC::train(int tr_scale, int tr_shift ) { distances(tr_scale, tr_shift); svm_train(); }
inline void cv_dist_vector_LogEucl::train(int tr_scale, int tr_shift ) { distances(tr_scale, tr_shift); svm_train(); }
PredictModel Problem::CProblem::trainAndCreateModel(const Paramter& param) const { if (!m_hasData) { return PredictModel(); } return PredictModel(std::make_unique<svm_model*>(svm_train(&m_problem, ¶m))); }
static void cmd_train(const char *modelfile, double cParam, double gParam, WindowFile &trainA, WindowFile &trainB) { SVMProblem problem(trainA, trainB); SVMParam param(cParam, gParam, true); svm_model *model = svm_train(&problem, ¶m); svm_save_model(modelfile, model); svm_free_and_destroy_model(&model); }
svm_classifier_t *svm_finish_classifier(svm_classifier_t *svm) { int i, j, nr_fold=5, total_correct, best_correct=0, best_c=svm->param.C, best_g=svm->param.gamma; const char *error_msg; double *result = (double *) rs_malloc(sizeof(double)*svm->problem.l,"cross validation result"); rs_msg("Building SVM classifier..."); if (svm->finished) rs_warning("SVM classifier is already trained!"); error_msg = svm_check_parameter(&(svm->problem),&(svm->param)); if(error_msg) rs_error("%s",error_msg); /* Skalierung */ _create_scaling(svm->problem,svm->feature_dim,&(svm->max),&(svm->min)); for (i=0;i<svm->problem.l;i++) _scale_instance(&(svm->problem.x[i]),svm->feature_dim,svm->max,svm->min); /* Cross-Validation, um C und G zu bestimmen bei RBF-Kernel */ if (svm->param.kernel_type == RBF) { svm->param.probability = 0; for (i=0;i<C_G_ITER;i++) { total_correct=0; svm->param.C=pow(2,C[i]); svm->param.gamma=pow(2,G[i]); svm_cross_validation(&(svm->problem),&(svm->param),nr_fold,result); for(j=0;j<svm->problem.l;j++) { if(result[j] == svm->problem.y[j]) ++total_correct; } if (total_correct > best_correct) { best_correct=total_correct; best_c=C[i]; best_g=G[i]; } rs_msg("C-G-Selektion-Iteration # %d: tried c=%g and g=%g => CV rate is %g; current best c=%g and g=%g with CV rate %g",i+1,pow(2,C[i]),pow(2,G[i]),total_correct*100.0/svm->problem.l,pow(2,best_c),pow(2,best_g),best_correct*100.0/svm->problem.l); } /* Training */ svm->param.C=pow(2,best_c); svm->param.gamma=pow(2,best_g); svm->param.probability = 1; } svm->model=svm_train(&(svm->problem),&(svm->param)); svm->finished=1; // @begin_add_johannes rs_free (result); // @end_add_johannes return svm; }
// Calls LibSVM function svm_train for simplicity. int SupportVectorMachine::Train() { // Calls svm_train to train an SVM model. const char *svm_check_param = svm_check_parameter(&svm_problem_,\ &svm_parameter_); struct svm_model *curr_svm_model = svm_train(&svm_problem_,&svm_parameter_); memcpy(&svm_model_,curr_svm_model,sizeof(struct svm_model)); return 0; }
void SVMClassifier::train(const std::vector<Example> &examples){ // destroy any existing models if(model){ model = 0; // hopefully already done in prefious function } // destroy any existing problems if(prblm) svm_destroy_problem(&prblm); // train! prblm = compileProblem(examples); model = svm_train(prblm, ¶meters); }
svm_model * SupportVectorMachine::get_svm_model(std::vector<Abalone> &learning_Abalones) { svm_problem SVM_Problem; SVM_Problem.l = learning_Abalones.size(); SVM_Problem.y = Abalone::get_target_values(learning_Abalones); svm_node **x = new svm_node*[learning_Abalones.size()]; for (int i = 0; i < learning_Abalones.size(); i++) { x[i] = new svm_node[9]; svm_node node1; node1.index = 1; node1.value = learning_Abalones[i].get_Sex(); x[i][0] = node1; svm_node node2; node2.index = 2; node2.value = learning_Abalones[i].get_Diameter(); x[i][1] = node2; svm_node node3; node3.index = 3; node3.value = learning_Abalones[i].get_Height(); x[i][2] = node3; svm_node node4; node4.index = 4; node4.value = learning_Abalones[i].get_Length(); x[i][3] = node4; svm_node node5; node5.index = 5; node5.value = learning_Abalones[i].get_Shell_weight(); x[i][4] = node5; svm_node node6; node6.index = 6; node6.value = learning_Abalones[i].get_Shucked_weight(); x[i][5] = node6; svm_node node7; node7.index = 7; node7.value = learning_Abalones[i].get_Viscera_weight(); x[i][6] = node7; svm_node node8; node8.index = 8; node8.value = learning_Abalones[i].get_Whole_weight(); x[i][7] = node8; svm_node node9; node9.index = -1; node9.value = '?'; x[i][8] = node9; } SVM_Problem.x = x; svm_parameter SVM_Parameter; SVM_Parameter.C = 1; SVM_Parameter.svm_type = C_SVC; SVM_Parameter.kernel_type = LINEAR; SVM_Parameter.degree = 3; /* for poly */ SVM_Parameter.gamma = 2; /* for poly/rbf/sigmoid */ SVM_Parameter.coef0 = 1; /* for poly/sigmoid */ SVM_Parameter.cache_size = 64; SVM_Parameter.eps = 0.001; SVM_Parameter.nr_weight = 0; svm_check_parameter(&SVM_Problem, &SVM_Parameter); svm_model *model = svm_train(&SVM_Problem, &SVM_Parameter); /*for (int i = 0; i < learning_Abalones.size(); i++) { delete[] x[i]; } delete[] x; delete SVM_Problem.y;*/ return model; }
double svm_train_and_test(const std::vector<feature_t> &train_feats, const std::vector<int> &train_labels, const std::vector<feature_t> &test_feats, const std::vector<int> &test_labels, const char * model_file) { svm_model * model; auto param = svm_fill_parameter(); auto prob = svm_fill_problem(train_feats, train_labels); model = svm_train(prob, param); svm_save_model(model_file, model); auto acc = svm_test_acc(test_feats, test_labels, model); svm_destroy_param(param); svm_free_problem(prob); //free svm_free_and_destroy_model(&model); return acc; }
int main(int argc, char **argv) { char input_file_name[1024]; char model_file_name[1024]; const char *error_msg; parse_command_line(argc, argv, input_file_name, model_file_name); read_problem(input_file_name); #ifdef CUSTOM_SOLVER if (param.svm_type == ONE_CLASS) { param.strong_footlier_indexes = filter_strong_footliers(input_file_name); } #endif error_msg = svm_check_parameter(&prob,¶m); if(error_msg) { fprintf(stderr,"Error: %s\n",error_msg); exit(1); } if(cross_validation) { if(param.svm_type == R2 || param.svm_type == R2q) fprintf(stderr, "\"R^2\" cannot do cross validation.\n"); else do_cross_validation(); } else { model = svm_train(&prob,¶m); if(param.svm_type == R2 || param.svm_type == R2q) fprintf(stderr, "\"R^2\" does not generate model.\n"); else if(svm_save_model(model_file_name,model)) { fprintf(stderr, "can't save model to file %s\n", model_file_name); exit(1); } svm_free_and_destroy_model(&model); } svm_destroy_param(¶m); free(prob.y); free(prob.x); free(x_space); free(line); return 0; }
void* jpcnn_create_predictor_from_trainer(void* trainerHandle) { SLibSvmTrainingInfo* trainer = (SLibSvmTrainingInfo*)(trainerHandle); SLibSvmProblem* problem = create_svm_problem_from_training_info(trainer); const char* parameterCheckError = svm_check_parameter(problem->svmProblem, problem->svmParameters); if (parameterCheckError != NULL) { fprintf(stderr, "libsvm parameter check error: %s\n", parameterCheckError); destroy_svm_problem(problem); return NULL; } struct svm_model* model = svm_train(problem->svmProblem, problem->svmParameters); SPredictorInfo* result = (SPredictorInfo*)(malloc(sizeof(SPredictorInfo))); result->model = model; result->problem = problem; return result; }
Datum pgm_svm_train(PG_FUNCTION_ARGS){ PGM_Matriz_Double *matrix = (PGM_Matriz_Double*)PG_GETARG_POINTER(0); PGM_Vetor_Double *vector = (PGM_Vetor_Double*)PG_GETARG_POINTER(1); struct svm_parameter *param = (struct svm_parameter*) pgm_malloc (sizeof(struct svm_parameter)); struct svm_problem* prob; //Cross Validation int cross_validation = 0, n_fold = PG_GETARG_INT32(14); if (n_fold < 2 && n_fold != 0) exit_with_help(); else if( n_fold >= 2){ cross_validation = 1; elog(ERROR,"CROSS VALIDATION NÃO IMPLEMENTADO"); } //Mount Parameter Struct param->svm_type = PG_GETARG_INT32(2); param->kernel_type= PG_GETARG_INT32(3); param->degree= PG_GETARG_INT32(4); param->gamma= PG_GETARG_FLOAT8(5); param->coef0= PG_GETARG_FLOAT8(6); param->cache_size= PG_GETARG_FLOAT8(7); param->eps= PG_GETARG_FLOAT8(8); param->C= PG_GETARG_FLOAT8(9); param->nr_weight = 0; param->weight_label = NULL; param->weight = NULL; param->nu= PG_GETARG_FLOAT8(10); param->p= PG_GETARG_FLOAT8(11); param->shrinking= PG_GETARG_INT32(12); param->probability= PG_GETARG_INT32(13); prob = PGM_Matriz_Double2svm_problem(matrix,vector,param); if (cross_validation){ do_cross_validation(prob,param,n_fold); elog(ERROR,"CROSS VALIDATION NÃO IMPLEMENTADO"); // Pergunta ao Filipe sobre isso! PG_RETURN_VOID(); }else{ MemoryContext contextoAnterior = MemoryContextSwitchTo( CurTransactionContext ); struct svm_model *model = svm_train(prob,param); MemoryContextSwitchTo( contextoAnterior ); PG_RETURN_POINTER(model); } }
int main(int argc, char **argv) { char input_file_name[1024]; char model_file_name[1024]; const char *error_msg; parse_command_line(argc, argv, input_file_name, model_file_name); read_problem(input_file_name); error_msg = svm_check_parameter(&prob,¶m); if(error_msg) { fprintf(stderr,"ERROR: %s\n",error_msg); exit(1); } if(cross_validation) { if (nr_fold <= 10) { do_cross_validation(); } else { double cv; nr_fold = nr_fold - 10; cv = binary_class_cross_validation(&prob, ¶m, nr_fold); printf("Cross Validation = %g%%\n",100.0*cv); } } else { model = svm_train(&prob,¶m); if(svm_save_model(model_file_name,model)) { fprintf(stderr, "can't save model to file %s\n", model_file_name); exit(1); } svm_free_and_destroy_model(&model); } svm_destroy_param(¶m); free(prob.y); free(prob.x); free(x_space); free(line); return 0; }
virtual void Learn(IDataSet* data) { vector< vector<double> > features = PreprocessFeatures(data); vector<int> targets; for (int i = 0; i < data->GetObjectCount(); i++) targets.push_back(data->GetTarget(i)); train_prob = sh_ptr<svm_problem_xx>(new svm_problem_xx(features, targets)); double c_values[] = { 2048, 32768 }; double gamma_values[] = { 3.0517578125e-05, 0.0001220703125 }; svm_parameter best_param; double best_accuracy = -1; // select hyperparameters (regularization coefficient C and kernel parameter gamma) by 10-fold crossvalidation for (int c_index = 0; c_index < ARRAY_SIZE(c_values); c_index++) { for (int gamma_index = 0; gamma_index < ARRAY_SIZE(gamma_values); gamma_index++) { svm_parameter param; memset(¶m, 0, sizeof(param)); param.svm_type = C_SVC; param.kernel_type = RBF; param.C = c_values[c_index]; param.gamma = gamma_values[gamma_index]; param.cache_size = 100; param.eps = 1e-3; param.shrinking = 1; vector<double> res(targets.size()); svm_cross_validation(train_prob.get(), ¶m, 10, &res[0]); double accuracy = 0; for (int i = 0; i < res.size(); i++) if (res[i] == targets[i]) accuracy += 1; accuracy /= res.size(); if (accuracy > best_accuracy) { best_accuracy = accuracy; best_param = param; } } } model = sh_ptr<svm_model_xx>(new svm_model_xx(svm_train(train_prob.get(), &best_param))); }
static VALUE cModel_class_train(VALUE obj,VALUE problem,VALUE parameter) { const struct svm_problem *prob; const struct svm_parameter *param; struct svm_model *model; const char *check_error; Data_Get_Struct(problem, struct svm_problem, prob); Data_Get_Struct(parameter, struct svm_parameter, param); check_error = svm_check_parameter(prob, param); if(check_error != NULL) { rb_raise(rb_eArgError, "Parameters not valid for Problem: '%s'", check_error); } model = svm_train(prob,param); return Data_Wrap_Struct(cModel, 0, model_free, model); }
int SVM::train() { if (problem.y == NULL || problem.x == NULL) return -1; const char* error_msg = svm_check_parameter(&problem, ¶m); if (error_msg) { std::cout << "ERROR: " << error_msg << std::endl; exit(-1); } model = svm_train(&problem, ¶m); /*for (int i = 0; i < 100000; i++) if (sin(i) + cos(i) > 1.414) std::cout << "."; */ main_equation = new Equation(); svm_model_visualization(model, main_equation); svm_free_and_destroy_model(&model); return 0; }
bool CmySvmArth::Train( char* path) { const char *error_msg; error_msg = svm_check_parameter(&prob,¶m); if(error_msg) { fprintf(stderr,"ERROR: %s\n",error_msg); free(prob.y); free(prob.x); free(x_space); free(line); line = NULL; x_space = NULL; return false; } if(cross_validation) { do_cross_validation(); } else { model = svm_train(&prob,¶m); if(path!=NULL&&svm_save_model(path,model)) { fprintf(stderr, "can't save model to file %s\n", path); return false; } if(path!=NULL) { svm_free_and_destroy_model(&model); } } svm_destroy_param(¶m); free(prob.y); free(prob.x); if(path!=NULL) { free(x_space); free(line); line = NULL; x_space = NULL; } return true; }
int Linear_Kernel_Kmeans::OneClassSVM() { if(verbose) std::cout<<" Training SVM for cluster : "; // #pragma omp parallel for num_threads(2) // NOTE: set num_threads = 2 or comment out to avoid memory overflow for // large datasets for(int i=0;i<k;i++) { if(verbose) { std::cout<<i+1<<"."; std::cout.flush(); } if(counts[i]==0) { rho[i] = -1e200; std::fill_n(eval.p[i],m,0.0); } else { svm_parameter param; svm_problem prob; svm_model *model = NULL; svm_node *x_space = NULL; std::set<int> choose; UseSVM_Init(param,prob,x_space); param.svm_type = ONE_CLASS; param.kernel_type = 0; // '0' -- linear kernel param.nu = 0.2; choose.clear(); choose.insert(i); // only use points from cluster 'i', so a one-class problem UseSVM_BuildProblem<double>(data,labels,choose,prob,x_space,true,5000); model = svm_train(&prob,¶m); rho[i] = UseSVM_Linear_FastEvaluationStructure(*model,m,eval,i); UseSVM_CleanUp(model,param,prob,x_space); } } if(verbose) std::cout<<std::endl; if(verbose) { #pragma omp parallel for for(int i=0;i<n;i++) labels[i] = Linear_Find_Nearest(data.p[i],eval,m,k,rho,useMedian); std::fill_n(counts,k,0); for(int i=0;i<n;i++) counts[labels[i]]++; std::cout<<" Error after 1-class SVM = "<<ComputeConstants(false)<<std::endl; } return validcenters; }
int main(int argc, char **argv) { char input_file_name[1024]; char model_file_name[1024]; const char *error_msg; parse_command_line(argc, argv, input_file_name, model_file_name); read_problem(input_file_name); error_msg = svm_check_parameter(&prob,¶m); if(error_msg) { fprintf(stderr,"ERROR: %s\n",error_msg); exit(1); } if(cross_validation) { do_cross_validation(); } else { model = svm_train(&prob,¶m); if(svm_save_model(model_file_name,model)) { fprintf(stderr, "can't save model to file %s\n", model_file_name); exit(1); } svm_free_and_destroy_model(&model); } svm_destroy_param(¶m); free(prob.y); #ifdef _DENSE_REP for (int i = 0; i < prob.l; ++i) free((prob.x+i)->values); #else free(x_space); #endif free(prob.x); free(line); return 0; }
int SVMTrainModel::train(double &RecRate, std::vector<int> &ConfusionTable) { if((!have_input_file_name) || (!have_model_file_name)) { fprintf(stderr,"ERROR: Set Input and Model files first!\n"); exit(1); } const char *error_msg; readProblem(input_file_name); error_msg = svm_check_parameter(&prob,¶m); if(error_msg) { fprintf(stderr,"ERROR: %s\n",error_msg); exit(1); } if(cross_validation) { do_cross_validation(RecRate,ConfusionTable); } else { model = svm_train(&prob,¶m); if(svm_save_model(model_file_name,model)) { fprintf(stderr, "can't save model to file %s\n", model_file_name); exit(1); } svm_free_and_destroy_model(&model); } svm_destroy_param(¶m); free(prob.y); free(prob.x); free(x_space); free(line); return 0; }
void LibsvmSvm::train(const std::vector<PidMat>& features,const std::vector<int>& labels) { assert(features.size() > 0 && labels.size() == features.size()); int featureCount = features.size(); int featureLength = features[0].rows; svm_problem prob; prob.l = featureCount; prob.y = new double[featureCount]; prob.x = new struct svm_node*[featureCount]; for(int i = 0; i < featureCount; i++) { prob.x[i] = new struct svm_node[featureLength + 1]; for(int j = 0; j < featureLength; j++) { svm_node node; node.index = j + 1; node.value = features[i](j,0); prob.x[i][j] = node; } prob.x[i][featureLength].index = -1; prob.y[i] = labels[i]; } _model = svm_train(&prob, &_param); }
void SVM::train(char* pInputSampleFileName, char* OutputModelFilename, double &dRetTrainError, double &dRetCrossValError) { struct svm_parameter strSvmParameters; struct svm_problem strSvmProblem; struct svm_model *pstrSvmModel; const char *error_msg; double dCrossValError = -1; double dTrainError = -1; //set parameters this->setParameters(strSvmParameters); //read sample file this->read_problem(pInputSampleFileName, strSvmProblem, strSvmParameters); //check parameters error_msg = svm_check_parameter(&strSvmProblem, &strSvmParameters); //train model pstrSvmModel = svm_train(&strSvmProblem, &strSvmParameters); //do cross validation check dCrossValError = this->crossValidationSamples(strSvmProblem, strSvmParameters, 5); //save trained model svm_save_model(OutputModelFilename, pstrSvmModel); //test trained model with training set -> train error cout << "test model " << OutputModelFilename << " with the training set " << pInputSampleFileName << std::endl; this->test(pInputSampleFileName, OutputModelFilename, dTrainError); //clean up svm_destroy_model(pstrSvmModel); svm_destroy_param(&strSvmParameters); free(strSvmProblem.y); free(strSvmProblem.x); dRetTrainError = dTrainError; dRetCrossValError = dCrossValError; }
bool Problem::CProblem::trainAndSaveModel(const FilePath& path, const Paramter& param) const { if (!m_hasData) { return false; } svm_model* model = svm_train(&m_problem, ¶m); const FilePath parentFilePath = FileSystem::ParentPath(path); if (!FileSystem::Exists(parentFilePath) && !FileSystem::CreateDirectories(parentFilePath)) { return false; } const int32 result = svm_save_model(path.narrow().c_str(), model); svm_free_and_destroy_model(&model); return (result == 0); }