int predict_esvm(ESupportVectorMachine *esvm, double x[],double **margin) /* predicts svm model on a test point x. Proportions of neighbours for each class will be stored within the array margin (an array of length svm->nclasses). Return value: the predicted value on success (-1 or 1 for binary classification; 1,...,nclasses in the multiclass case), 0 on succes with non unique classification, -2 otherwise. */ { int b; int pred; double *tmpmargin; if(!((*margin)=dvector(2))){ fprintf(stderr,"predict_esvm: out of memory\n"); return -2; } for(b=0;b<esvm->nmodels;b++){ pred=predict_svm(&(esvm->svm[b]), x,&tmpmargin); if(pred < -1){ fprintf(stderr,"predict_esvm: predict_svm error\n"); return -2; } if(pred==-1) (*margin)[0] += esvm->weights[b]; else if(pred==1) (*margin)[1] += esvm->weights[b]; free_dvector(tmpmargin); } if((*margin)[0] > (*margin)[1]) return -1; else if((*margin)[0] < (*margin)[1]) return 1; else return 0; return -2; }
static int compute_svm_adaboost(ESupportVectorMachine *esvm,int n,int d, double *x[],int y[],int nmodels,int kernel, double kp,double C,double tol,double eps, int maxloops,int verbose) { int i,b; int *samples; double **trx; int *try; double *prob; double *prob_copy; double sumalpha; double epsilon; int *pred; double *margin; double sumprob; int nclasses; int *classes; if(nmodels<1){ fprintf(stderr,"compute_svm_adaboost: nmodels must be greater than 0\n"); return 1; } if(C<=0){ fprintf(stderr,"compute_svm_adaboost: regularization parameter C must be > 0\n"); return 1; } if(eps<=0){ fprintf(stderr,"compute_svm_adaboost: parameter eps must be > 0\n"); return 1; } if(tol<=0){ fprintf(stderr,"compute_svm_adaboost: parameter tol must be > 0\n"); return 1; } if(maxloops<=0){ fprintf(stderr,"compute_svm_adaboost: parameter maxloops must be > 0\n"); return 1; } switch(kernel){ case SVM_KERNEL_LINEAR: break; case SVM_KERNEL_GAUSSIAN: if(kp <=0){ fprintf(stderr,"compute_svm_adaboost: parameter kp must be > 0\n"); return 1; } break; case SVM_KERNEL_POLINOMIAL: if(kp <=0){ fprintf(stderr,"compute_svm_adaboost: parameter kp must be > 0\n"); return 1; } break; default: fprintf(stderr,"compute_svm_adaboost: kernel not recognized\n"); return 1; } nclasses=iunique(y,n, &classes); if(nclasses<=0){ fprintf(stderr,"compute_svm_adaboost: iunique error\n"); return 1; } if(nclasses==1){ fprintf(stderr,"compute_svm_adaboost: only 1 class recognized\n"); return 1; } if(nclasses==2) if(classes[0] != -1 || classes[1] != 1){ fprintf(stderr,"compute_svm_adaboost: for binary classification classes must be -1,1\n"); return 1; } if(nclasses>2){ fprintf(stderr,"compute_svm_adaboost: multiclass classification not allowed\n"); return 1; } if(!(esvm->svm=(SupportVectorMachine *) calloc(nmodels,sizeof(SupportVectorMachine)))){ fprintf(stderr,"compute_svm_adaboost: out of memory\n"); return 1; } if(!(esvm->weights=dvector(nmodels))){ fprintf(stderr,"compute_svm_adaboost: out of memory\n"); return 1; } if(!(trx=(double **)calloc(n,sizeof(double*)))){ fprintf(stderr,"compute_svm_adaboost: out of memory\n"); return 1; } if(!(try=ivector(n))){ fprintf(stderr,"compute_svm_adaboost: out of memory\n"); return 1; } if(!(prob_copy=dvector(n))){ fprintf(stderr,"compute_svm_adaboost: out of memory\n"); return 1; } if(!(prob=dvector(n))){ fprintf(stderr,"compute_svm_adaboost: out of memory\n"); return 1; } if(!(pred=ivector(n))){ fprintf(stderr,"compute_svm_adaboost: out of memory\n"); return 1; } for(i =0;i<n;i++) prob[i]=1.0/(double)n; esvm->nmodels=nmodels; sumalpha=0.0; for(b=0;b<nmodels;b++){ for(i =0;i<n;i++) prob_copy[i]=prob[i]; if(sample(n, prob_copy, n, &samples, TRUE,b)!=0){ fprintf(stderr,"compute_svm_adaboost: sample error\n"); return 1; } for(i =0;i<n;i++){ trx[i] = x[samples[i]]; try[i] = y[samples[i]]; } if(compute_svm(&(esvm->svm[b]),n,d,trx,try,kernel,kp,C, tol,eps,maxloops,verbose,NULL)!=0){ fprintf(stderr,"compute_svm_adaboost: compute_svm error\n"); return 1; } free_ivector(samples); epsilon=0.0; for(i=0;i<n;i++){ pred[i]=predict_svm(&(esvm->svm[b]),x[i],&margin); if(pred[i] < -1 ){ fprintf(stderr,"compute_svm_adaboost: predict_svm error\n"); return 1; } if(pred[i]==0 || pred[i] != y[i]) epsilon += prob[i]; free_dvector(margin); } if(epsilon > 0 && epsilon < 0.5){ esvm->weights[b]=0.5 *log((1.0-epsilon)/epsilon); sumalpha+=esvm->weights[b]; }else{ esvm->nmodels=b; break; } sumprob=0.0; for(i=0;i<n;i++){ prob[i]=prob[i]*exp(-esvm->weights[b]*y[i]*pred[i]); sumprob+=prob[i]; } if(sumprob <=0){ fprintf(stderr,"compute_svm_adaboost: sumprob = 0\n"); return 1; } for(i=0;i<n;i++) prob[i] /= sumprob; } if(esvm->nmodels<=0){ fprintf(stderr,"compute_svm_adaboost: no models produced\n"); return 1; } if(sumalpha <=0){ fprintf(stderr,"compute_svm_adaboost: sumalpha = 0\n"); return 1; } for(b=0;b<esvm->nmodels;b++) esvm->weights[b] /= sumalpha; free(trx); free_ivector(classes); free_ivector(try); free_ivector(pred); free_dvector(prob); free_dvector(prob_copy); return 0; } static void svm_smo(SupportVectorMachine *svm) { int i,k; int numChanged; int examineAll; int nloops=0; svm->end_support_i=svm->n; if(svm->kernel_type==SVM_KERNEL_LINEAR){ svm->kernel_func=dot_product_func; svm->learned_func=learned_func_linear; } if(svm->kernel_type==SVM_KERNEL_POLINOMIAL){ svm->kernel_func=polinomial_kernel; svm->learned_func=learned_func_nonlinear; } if(svm->kernel_type==SVM_KERNEL_GAUSSIAN){ /* svm->precomputed_self_dot_product=(double *)calloc(svm->n,sizeof(double)); */ for(i=0;i<svm->n;i++) svm->precomputed_self_dot_product[i] = dot_product_func(i,i,svm); svm->kernel_func=rbf_kernel; svm->learned_func=learned_func_nonlinear; } numChanged=0; examineAll=1; svm->convergence=1; while(svm->convergence==1 &&(numChanged>0 || examineAll)){ numChanged=0; if(examineAll){ for(k=0;k<svm->n;k++) numChanged += examineExample(k,svm); }else{ for(k=0;k<svm->n;k++) if(svm->alph[k] > 0 && svm->alph[k] < svm->Cw[k]) numChanged += examineExample(k,svm); } if(examineAll==1) examineAll=0; else if(numChanged==0) examineAll=1; nloops+=1; if(nloops==svm->maxloops) svm->convergence=0; if(svm->verbose==1) fprintf(stdout,"%6d\b\b\b\b\b\b\b",nloops); } }
void SVM__Class::testingSet(QString path){ //overloaded mtd to take limited number of files fileIterator(path, &testing_Descriptor, &total_testing_count, 't'); predict_svm(); }