One_Class_SVM::~One_Class_SVM(){ if ( trained ) { svm_free_model_content( _model ); free( _model ); } else { SVM_Module::free_support_vectors(); } delete _class_labels; delete _support_vectors; delete _parameters; }
void fillAge(list_t *passengerList,list_t *testPassengerList){ list_t inAge,notAge; struct svm_parameter param; // set by parse_command_line struct svm_problem prob,prob2; // set by read_problem struct svm_model *model; struct svm_node *x_space; int max_index=5; int i; cell_t *cur; passenger *human; // default values param.svm_type = NU_SVR; param.kernel_type = LINEAR; param.degree = 3; param.gamma = 0; // 1/num_features param.coef0 = 0; param.nu = 0.5; param.cache_size = 0; param.C = 1; param.eps = 1e-2; param.p = 0.1; param.shrinking = 1; param.probability = 0; param.nr_weight = 0; param.weight_label = NULL; param.weight = NULL; memset(&inAge,0,sizeof(list_t)); memset(¬Age,0,sizeof(list_t)); for(cur=passengerList->first;cur;cur=cur->next){ human=(passenger *)cur->data; if(human->age==-1) addList(¬Age,human); else addList(&inAge,human); } for(cur=testPassengerList->first;cur;cur=cur->next){ human=(passenger *)cur->data; if(human->age==-1) addList(¬Age,human); else addList(&inAge,human); } prob.l=inAge.size; prob.y = Malloc(double,prob.l); prob.x = Malloc(struct svm_node *,prob.l); for(i=0,cur=inAge.first;cur;cur=cur->next,i++){ x_space = Malloc(struct svm_node,max_index+1); passenger *human=(passenger *)cur->data; x_space[0].index=1; x_space[0].value=(double)human->sex; x_space[1].index=2; x_space[1].value=(double)human->rank; x_space[2].index=3; x_space[2].value=(double)human->fare; x_space[3].index=4; x_space[3].value=(double)human->prop1; x_space[4].index=5; x_space[4].value=getNameNo(human->name->honorific); x_space[max_index].index = -1; prob.x[i]=x_space; prob.y[i]=(double)human->age; } prob2.l=notAge.size; prob2.x = Malloc(struct svm_node *,prob2.l); passenger **tmp= Malloc(passenger *,prob2.l); for(i=0,cur=notAge.first;cur;cur=cur->next,i++){ x_space = Malloc(struct svm_node,max_index+1); passenger *human=(passenger *)cur->data; x_space[0].index=1; x_space[0].value=(double)human->sex; x_space[1].index=2; x_space[1].value=(double)human->rank; x_space[2].index=3; x_space[2].value=(double)human->fare; x_space[3].index=4; x_space[3].value=(double)human->prop1; x_space[4].index=5; x_space[4].value=getNameNo(human->name->honorific); x_space[max_index].index = -1; prob2.x[i]=x_space; tmp[i]=human; } param.gamma=1.0/(double)max_index; scale(prob.x,prob.l,prob2.x,prob2.l,max_index); //printf("%lf\n",cv(¶m,&prob));exit(0); model = svm_train(&prob,¶m); for(i=0;i<prob2.l;i++){ tmp[i]->age=(int)svm_predict(model,prob2.x[i]); } svm_free_model_content(model); svm_free_and_destroy_model(&model); svm_destroy_param(¶m); for(i=0,cur=inAge.first;cur;cur=cur->next,i++){ free(prob.x[i]); } for(i=0,cur=notAge.first;cur;cur=cur->next,i++){ free(prob2.x[i]); } free(prob.x); free(prob.y); free(prob2.x); }
/// <summary> /// use a kernel matrix and solve the svm problem /// </summary> /// <param name="Kernel">precomputed kernel</param> DualSolution libSVMWrapper::Solve(const Eigen::MatrixXd &Kernel) { // unfortunately libsvm needs "svm_nodes", so we have to copy everything // workaround pointer to matrix entries (eigen3 does not allow it?) // copy entries (code from libSVM) int j = 0, sc = NumberOfData + 1; // TODO // libSVM_x_space[j+k].value = *(Kernel.data() + (k-1)*NumberOfData + i); // by reference or pointer #pragma omp parallel for for (int i = 0; i < libSVM_Problem.l; i++) { j = (sc+1)*i ; for (int k = 0; k < sc; k++) { libSVM_x_space[j].index = k + 1; if (k == 0) { libSVM_x_space[j].value = i + 1; } else { libSVM_x_space[j+k].value = Kernel(i, k - 1); } } j = ((sc+1)*i+sc) ; libSVM_x_space[j+1].index = -1; } #ifdef DEBUG for (int i = 0; i < libSVM_Problem.l; i++) { if ((int) libSVM_Problem.x[i][0].value <= 0 || (int) libSVM_Problem.x[i][0].value > sc) { printf("Wrong input format: sample_serial_number out of range\n"); exit(0); } } const char *error_msg; error_msg = svm_check_parameter(&libSVM_Problem, &libSVM_Parameter); if (error_msg) { fprintf(stderr, "ERROR: %s\n", error_msg); exit(1); } #endif // train the model if (libSVM_Model != NULL) { svm_free_model_content(libSVM_Model); libSVM_Model = NULL; } libSVM_Model = svm_train(&libSVM_Problem, &libSVM_Parameter); // extract results // bias is easy double Bias = -1 * libSVM_Model->rho[0]; // alpha should be dense now Eigen::VectorXd Alpha = Eigen::MatrixXd::Zero(NumberOfData, 1); for (int i = 0; i < libSVM_Model->l; i++) { Alpha(libSVM_Model->sv_indices[i] - 1) = (libSVM_Model->sv_coef[0][i] < 0) ? -1 * libSVM_Model->sv_coef[0][i] : libSVM_Model->sv_coef[0][i]; } DualSolution DS; DS.Bias = Bias; DS.Alpha = Alpha; Eigen::VectorXd tt = Alpha.cwiseProduct(Y); // objective value of dual solution DS.Value = Alpha.sum() - 0.5* (double)(tt.transpose() * (Kernel*tt)); return DS; }