void read_input_parameters(int argc, char **argv, char *testfile, char *modelfile, char *scorefile, STRUCT_LEARN_PARM *sparm) {

  long i;
  
  /* set default */
  sparm->custom_argc = 0;
  sparm->feature_size = 2405;

  for (i=1;(i<argc)&&((argv[i])[0]=='-');i++) {
    switch ((argv[i])[1]) {
      case '-': strcpy(sparm->custom_argv[sparm->custom_argc++],argv[i]);i++; strcpy(sparm->custom_argv[sparm->custom_argc++],argv[i]);break;  
      default: printf("\nUnrecognized option %s!\n\n",argv[i]); exit(0);    
    }
  }

  if (i>=argc) {
    printf("\nNot enough input parameters!\n\n");
    exit(0);
  }

  strcpy(testfile, argv[i]);
	if(i+1<argc)
  	strcpy(modelfile, argv[i+1]);
	if(i+2<argc)
		strcpy(scorefile,argv[i+2]);

  parse_struct_parameters(sparm);

}
Example #2
0
void MedSTC::set_init_param(STRUCT_LEARN_PARM *struct_parm, LEARN_PARM *learn_parm, 
							KERNEL_PARM *kernel_parm, int *alg_type)
{
	/* set default */
	(*alg_type) = DEFAULT_ALG_TYPE;
	struct_parm->C = -0.01;
	struct_parm->slack_norm = 1;
	struct_parm->epsilon = DEFAULT_EPS;
	struct_parm->custom_argc = 0;
	struct_parm->loss_function = DEFAULT_LOSS_FCT;
	struct_parm->loss_type = DEFAULT_RESCALING;
	struct_parm->newconstretrain = 100;
	struct_parm->ccache_size = 5;
	struct_parm->batch_size = 100;
	struct_parm->delta_ell = m_dDeltaEll;

	strcpy(learn_parm->predfile, "trans_predictions");
	strcpy(learn_parm->alphafile, "");
	verbosity = 0;/*verbosity for svm_light*/
	struct_verbosity = 0; /*verbosity for struct learning portion*/
	learn_parm->biased_hyperplane = 1;
	learn_parm->remove_inconsistent = 0;
	learn_parm->skip_final_opt_check = 0;
	learn_parm->svm_maxqpsize = 10;
	learn_parm->svm_newvarsinqp = 0;
	learn_parm->svm_iter_to_shrink = -9999;
	learn_parm->maxiter = 100000;
	learn_parm->kernel_cache_size = 40;
	learn_parm->svm_c = 99999999;  /* overridden by struct_parm->C */
	learn_parm->eps = 0.001;       /* overridden by struct_parm->epsilon */
	learn_parm->transduction_posratio = -1.0;
	learn_parm->svm_costratio = 1.0;
	learn_parm->svm_costratio_unlab = 1.0;
	learn_parm->svm_unlabbound = 1E-5;
	learn_parm->epsilon_crit = 0.001;
	learn_parm->epsilon_a = 1E-10;  /* changed from 1e-15 */
	learn_parm->compute_loo = 0;
	learn_parm->rho = 1.0;
	learn_parm->xa_depth = 0;
	kernel_parm->kernel_type = 0;
	kernel_parm->poly_degree = 3;
	kernel_parm->rbf_gamma = 1.0;
	kernel_parm->coef_lin = 1;
	kernel_parm->coef_const = 1;
	strcpy(kernel_parm->custom,"empty");

	if(learn_parm->svm_iter_to_shrink == -9999) {
		learn_parm->svm_iter_to_shrink=100;
	}

	if((learn_parm->skip_final_opt_check) 
		&& (kernel_parm->kernel_type == LINEAR)) {
			learn_parm->skip_final_opt_check=0;
	}    
	parse_struct_parameters(struct_parm);
}
void read_input_parameters(int argc, char **argv, char *testfile, char *modelfile, char *labelfile, char *latentfile,char *scorefile, char* kernel_info_file, char* filestub, STRUCT_LEARN_PARM *sparm) {
    long i;

    /* set default */
    strcpy(modelfile, "lssvm_model");
    strcpy(labelfile, "lssvm_label");
    strcpy(latentfile, "lssvm_latent");
    strcpy(scorefile, "lssvm_score");
    strcpy(kernel_info_file, "lssvm_kernelconfig");
    strcpy(filestub, "lssvm_filestub");
    sparm->custom_argc = 0;

    for (i=1;(i<argc)&&((argv[i])[0]=='-');i++) {
        switch ((argv[i])[1]) {
          case '-': strcpy(sparm->custom_argv[sparm->custom_argc++],argv[i]);i++; strcpy(sparm->custom_argv[sparm->custom_argc++],argv[i]);break;  
          default: printf("\nUnrecognized option %s!\n\n",argv[i]); exit(0);    
        }
    }

    if (i>=argc) {
        printf("\nNot enough input parameters!\n\n");
        exit(0);
    }

    strcpy(testfile, argv[i]);
    if(i+1<argc)
        strcpy(modelfile, argv[i+1]);
    if(i+2<argc)
        strcpy(labelfile,argv[i+2]);
    if(i+3<argc)
        strcpy(latentfile,argv[i+3]);
    if(i+4<argc)
        strcpy(scorefile,argv[i+4]);
    if(i+5<argc)
        strcpy(filestub,argv[i+5]);
    if(i+6<argc)
        strcpy(kernel_info_file,argv[i+6]);

    printf("1 is %s\n", modelfile);
    printf("2 is %s\n", labelfile);
    printf("3 is %s\n", latentfile);
    printf("4 is %s\n", scorefile);
    printf("5 is %s\n", filestub);
    printf("6 is %s\n", kernel_info_file);
    fflush(stdout);
    parse_struct_parameters(sparm);
}
void read_input_parameters(int argc,char *argv[],char *trainfile,
			   char *modelfile,
			   long *verbosity,long *struct_verbosity, 
			   STRUCT_LEARN_PARM *struct_parm,
			   LEARN_PARM *learn_parm, KERNEL_PARM *kernel_parm,
			   int *alg_type)
{
  long i;
  char type[100];
  
  /* set default */
  (*alg_type)=DEFAULT_ALG_TYPE;
  struct_parm->C=-0.01;
  struct_parm->slack_norm=1;
  struct_parm->epsilon=DEFAULT_EPS;
  struct_parm->custom_argc=0;
  struct_parm->loss_function=DEFAULT_LOSS_FCT;
  struct_parm->loss_type=DEFAULT_RESCALING;
  struct_parm->newconstretrain=100;
  struct_parm->ccache_size=5;
  struct_parm->batch_size=100;

  strcpy (modelfile, "svm_struct_model");
  strcpy (learn_parm->predfile, "trans_predictions");
  strcpy (learn_parm->alphafile, "");
  (*verbosity)=0;/*verbosity for svm_light*/
  (*struct_verbosity)=1; /*verbosity for struct learning portion*/
  learn_parm->biased_hyperplane=1;
  learn_parm->remove_inconsistent=0;
  learn_parm->skip_final_opt_check=0;
  learn_parm->svm_maxqpsize=10;
  learn_parm->svm_newvarsinqp=0;
  learn_parm->svm_iter_to_shrink=-9999;
  learn_parm->maxiter=100000;
  learn_parm->kernel_cache_size=40;
  learn_parm->svm_c=99999999;  /* overridden by struct_parm->C */
  learn_parm->eps=0.001;       /* overridden by struct_parm->epsilon */
  learn_parm->transduction_posratio=-1.0;
  learn_parm->svm_costratio=1.0;
  learn_parm->svm_costratio_unlab=1.0;
  learn_parm->svm_unlabbound=1E-5;
  learn_parm->epsilon_crit=0.001;
  learn_parm->epsilon_a=1E-10;  /* changed from 1e-15 */
  learn_parm->compute_loo=0;
  learn_parm->rho=1.0;
  learn_parm->xa_depth=0;
  kernel_parm->kernel_type=0;
  kernel_parm->poly_degree=3;
  kernel_parm->rbf_gamma=1.0;
  kernel_parm->coef_lin=1;
  kernel_parm->coef_const=1;
  strcpy(kernel_parm->custom,"empty");
  strcpy(type,"c");

  for(i=1;(i<argc) && ((argv[i])[0] == '-');i++) {
    switch ((argv[i])[1]) 
      { 
      case '?': print_help(); exit(0);
      case 'a': i++; strcpy(learn_parm->alphafile,argv[i]); break;
      case 'c': i++; struct_parm->C=atof(argv[i]); break;
      case 'p': i++; struct_parm->slack_norm=atol(argv[i]); break;
      case 'e': i++; struct_parm->epsilon=atof(argv[i]); break;
      case 'k': i++; struct_parm->newconstretrain=atol(argv[i]); break;
      case 'h': i++; learn_parm->svm_iter_to_shrink=atol(argv[i]); break;
      case '#': i++; learn_parm->maxiter=atol(argv[i]); break;
      case 'm': i++; learn_parm->kernel_cache_size=atol(argv[i]); break;
      case 'w': i++; (*alg_type)=atol(argv[i]); break;
      case 'o': i++; struct_parm->loss_type=atol(argv[i]); break;
      case 'n': i++; learn_parm->svm_newvarsinqp=atol(argv[i]); break;
      case 'q': i++; learn_parm->svm_maxqpsize=atol(argv[i]); break;
      case 'l': i++; struct_parm->loss_function=atol(argv[i]); break;
      case 'f': i++; struct_parm->ccache_size=atol(argv[i]); break;
      case 'b': i++; struct_parm->batch_size=atof(argv[i]); break;
      case 't': i++; kernel_parm->kernel_type=atol(argv[i]); break;
      case 'd': i++; kernel_parm->poly_degree=atol(argv[i]); break;
      case 'g': i++; kernel_parm->rbf_gamma=atof(argv[i]); break;
      case 's': i++; kernel_parm->coef_lin=atof(argv[i]); break;
      case 'r': i++; kernel_parm->coef_const=atof(argv[i]); break;
      case 'u': i++; strcpy(kernel_parm->custom,argv[i]); break;
      case '-': strcpy(struct_parm->custom_argv[struct_parm->custom_argc++],argv[i]);i++; strcpy(struct_parm->custom_argv[struct_parm->custom_argc++],argv[i]);break; 
      case 'v': i++; (*struct_verbosity)=atol(argv[i]); break;
      case 'y': i++; (*verbosity)=atol(argv[i]); break;
      default: printf("\nUnrecognized option %s!\n\n",argv[i]);
	       print_help();
	       exit(0);
      }
  }
  if(i>=argc) {
    printf("\nNot enough input parameters!\n\n");
    wait_any_key();
    print_help();
    exit(0);
  }
  strcpy (trainfile, argv[i]);
  if((i+1)<argc) {
    strcpy (modelfile, argv[i+1]);
  }
  if(learn_parm->svm_iter_to_shrink == -9999) {
    learn_parm->svm_iter_to_shrink=100;
  }

  if((learn_parm->skip_final_opt_check) 
     && (kernel_parm->kernel_type == LINEAR)) {
    printf("\nIt does not make sense to skip the final optimality check for linear kernels.\n\n");
    learn_parm->skip_final_opt_check=0;
  }    
  if((learn_parm->skip_final_opt_check) 
     && (learn_parm->remove_inconsistent)) {
    printf("\nIt is necessary to do the final optimality check when removing inconsistent \nexamples.\n");
    wait_any_key();
    print_help();
    exit(0);
  }    
  if((learn_parm->svm_maxqpsize<2)) {
    printf("\nMaximum size of QP-subproblems not in valid range: %ld [2..]\n",learn_parm->svm_maxqpsize); 
    wait_any_key();
    print_help();
    exit(0);
  }
  if((learn_parm->svm_maxqpsize<learn_parm->svm_newvarsinqp)) {
    printf("\nMaximum size of QP-subproblems [%ld] must be larger than the number of\n",learn_parm->svm_maxqpsize); 
    printf("new variables [%ld] entering the working set in each iteration.\n",learn_parm->svm_newvarsinqp); 
    wait_any_key();
    print_help();
    exit(0);
  }
  if(learn_parm->svm_iter_to_shrink<1) {
    printf("\nMaximum number of iterations for shrinking not in valid range: %ld [1,..]\n",learn_parm->svm_iter_to_shrink);
    wait_any_key();
    print_help();
    exit(0);
  }
  if(struct_parm->C<0) {
    printf("\nYou have to specify a value for the parameter '-c' (C>0)!\n\n");
    wait_any_key();
    print_help();
    exit(0);
  }
  if(((*alg_type) < 0) || (((*alg_type) > 5) && ((*alg_type) != 9))) {
    printf("\nAlgorithm type must be either '0', '1', '2', '3', '4', or '9'!\n\n");
    wait_any_key();
    print_help();
    exit(0);
  }
  if(learn_parm->transduction_posratio>1) {
    printf("\nThe fraction of unlabeled examples to classify as positives must\n");
    printf("be less than 1.0 !!!\n\n");
    wait_any_key();
    print_help();
    exit(0);
  }
  if(learn_parm->svm_costratio<=0) {
    printf("\nThe COSTRATIO parameter must be greater than zero!\n\n");
    wait_any_key();
    print_help();
    exit(0);
  }
  if(struct_parm->epsilon<=0) {
    printf("\nThe epsilon parameter must be greater than zero!\n\n");
    wait_any_key();
    print_help();
    exit(0);
  }
  if((struct_parm->ccache_size<=0) && ((*alg_type) == 4)) {
    printf("\nThe cache size must be at least 1!\n\n");
    wait_any_key();
    print_help();
    exit(0);
  }
  if(((struct_parm->batch_size<=0) || (struct_parm->batch_size>100))  
     && ((*alg_type) == 4)) {
    printf("\nThe batch size must be in the interval ]0,100]!\n\n");
    wait_any_key();
    print_help();
    exit(0);
  }
  if((struct_parm->slack_norm<1) || (struct_parm->slack_norm>2)) {
    printf("\nThe norm of the slacks must be either 1 (L1-norm) or 2 (L2-norm)!\n\n");
    wait_any_key();
    print_help();
    exit(0);
  }
  if((struct_parm->loss_type != SLACK_RESCALING) 
     && (struct_parm->loss_type != MARGIN_RESCALING)) {
    printf("\nThe loss type must be either 1 (slack rescaling) or 2 (margin rescaling)!\n\n");
    wait_any_key();
    print_help();
    exit(0);
  }
  if(learn_parm->rho<0) {
    printf("\nThe parameter rho for xi/alpha-estimates and leave-one-out pruning must\n");
    printf("be greater than zero (typically 1.0 or 2.0, see T. Joachims, Estimating the\n");
    printf("Generalization Performance of an SVM Efficiently, ICML, 2000.)!\n\n");
    wait_any_key();
    print_help();
    exit(0);
  }
  if((learn_parm->xa_depth<0) || (learn_parm->xa_depth>100)) {
    printf("\nThe parameter depth for ext. xi/alpha-estimates must be in [0..100] (zero\n");
    printf("for switching to the conventional xa/estimates described in T. Joachims,\n");
    printf("Estimating the Generalization Performance of an SVM Efficiently, ICML, 2000.)\n");
    wait_any_key();
    print_help();
    exit(0);
  }

  parse_struct_parameters(struct_parm);
}
void my_read_input_parameters(int argc, char *argv[], char *trainfile, char* modelfile,
			      LEARN_PARM *learn_parm, KERNEL_PARM *kernel_parm, STRUCT_LEARN_PARM *struct_parm) {
  
  long i;

  /* set default */
  learn_parm->maxiter=20000;
  learn_parm->svm_maxqpsize=100;
  learn_parm->svm_c=100.0;
  //learn_parm->eps=0.001;
  learn_parm->eps=0.1; //AJAY: Changing for faster convergence
  learn_parm->biased_hyperplane=12345; /* store random seed */
  learn_parm->remove_inconsistent=10; 
  kernel_parm->kernel_type=0;
  kernel_parm->rbf_gamma=0.05;
  kernel_parm->coef_lin=1;
  kernel_parm->coef_const=1;
  kernel_parm->poly_degree=3;

  struct_parm->custom_argc=0;

  // Ajay
  learn_parm->totalEpochs = 1;
  learn_parm->numChunks = 5;

  for(i=1;(i<argc) && ((argv[i])[0] == '-');i++) {
    switch ((argv[i])[1]) {
    case 'c': i++; learn_parm->svm_c=atof(argv[i]); break;
    case 'e': i++; learn_parm->eps=atof(argv[i]); break;
    case 's': i++; learn_parm->svm_maxqpsize=atol(argv[i]); break; 
    case 'g': i++; kernel_parm->rbf_gamma=atof(argv[i]); break;
    case 'd': i++; kernel_parm->poly_degree=atol(argv[i]); break;
    case 'r': i++; learn_parm->biased_hyperplane=atol(argv[i]); break; 
    case 't': i++; kernel_parm->kernel_type=atol(argv[i]); break;
    case 'n': i++; learn_parm->maxiter=atol(argv[i]); break;
    case 'p': i++; learn_parm->remove_inconsistent=atol(argv[i]); break;
    case '-': strcpy(struct_parm->custom_argv[struct_parm->custom_argc++],argv[i]);i++; strcpy(struct_parm->custom_argv[struct_parm->custom_argc++],argv[i]);break;
    // Added by Ajay
    case 'f': i++; strcpy(learn_parm->tmpdir,argv[i]); printf("\nTmp file is %s\n",learn_parm->tmpdir); break;
    case 'y': i++; learn_parm->frac_sim=atof(argv[i]); printf("Frac Sim is %g\n", learn_parm->frac_sim); break;
    case 'z': i++; strcpy(learn_parm->dataset_stats_file,argv[i]);printf("Dataset Stats file is %s\n",learn_parm->dataset_stats_file);break;
    case 'w': i++; learn_parm->Fweight=atof(argv[i]); printf("Weigting param of F is %g\n",learn_parm->Fweight);break;
    case 'o': i++; learn_parm->rho_admm=atof(argv[i]); printf("Rho is %g\n", learn_parm->rho_admm); break;
    case 'a': i++; learn_parm->isExhaustive=atol(argv[i]);printf("isExhaustive is %ld",learn_parm->isExhaustive); break;
    case 'b': i++; learn_parm->isLPrelaxation=atol(argv[i]);printf("isLPrelaxation is %ld",learn_parm->isLPrelaxation); break;
    case 'K': i++; learn_parm->numChunks=atoi(argv[i]); break;
    case 'E': i++; learn_parm->totalEpochs=atoi(argv[i]); break;

    case 'C': i++; learn_parm->Cdash=atof(argv[i]); break;
   ////////////////////////
    default: printf("\nUnrecognized option %s!\n\n",argv[i]);
      exit(0);
    }

  }

  if(i>=argc) {
    printf("\nNot enough input parameters!\n\n");
    my_wait_any_key();
    exit(0);
  }
  strcpy (trainfile, argv[i]);

  if((i+1)<argc) {
    strcpy (modelfile, argv[i+1]);
  }
  
  parse_struct_parameters(struct_parm);

}
void my_read_input_parameters(int argc, char *argv[], char *trainfile, char* modelfile, 
			      LEARN_PARM *learn_parm, KERNEL_PARM *kernel_parm, STRUCT_LEARN_PARM *struct_parm,
						double *init_spl_weight, double *spl_factor) {
  
  long i;

  /* set default */
  learn_parm->maxiter=20000;
  learn_parm->svm_maxqpsize=100;
  learn_parm->svm_c=100.0;
  learn_parm->eps=0.001;
  learn_parm->biased_hyperplane=12345; /* store random seed */
  learn_parm->remove_inconsistent=10; 
  kernel_parm->kernel_type=0;
  kernel_parm->rbf_gamma=0.05;
  kernel_parm->coef_lin=1;
  kernel_parm->coef_const=1;
  kernel_parm->poly_degree=3;
	/* default: no self-paced learning */
	*init_spl_weight = 0.0;
	*spl_factor = 1.3;

	struct_parm->gram_regularization = 1E-7;
  struct_parm->solve_dual = 1;

  struct_parm->custom_argc=0;

  for(i=1;(i<argc) && ((argv[i])[0] == '-');i++) {
    switch ((argv[i])[1]) {
    case 'c': i++; learn_parm->svm_c=atof(argv[i]); break;
    case 'e': i++; learn_parm->eps=atof(argv[i]); break;
    case 's': i++; learn_parm->svm_maxqpsize=atol(argv[i]); break; 
    case 'g': i++; kernel_parm->rbf_gamma=atof(argv[i]); break;
    case 'd': i++; kernel_parm->poly_degree=atol(argv[i]); break;
    case 'r': i++; learn_parm->biased_hyperplane=atol(argv[i]); break; 
    case 't': i++; kernel_parm->kernel_type=atol(argv[i]); break;
    case 'n': i++; learn_parm->maxiter=atol(argv[i]); break;
    case 'p': i++; learn_parm->remove_inconsistent=atol(argv[i]); break; 
		case 'k': i++; *init_spl_weight = atof(argv[i]); break;
		case 'm': i++; *spl_factor = atof(argv[i]); break;
    case 'q': i++; struct_parm->solve_dual = atoi(argv[i]); break;
    case '-': strcpy(struct_parm->custom_argv[struct_parm->custom_argc++],argv[i]);i++; strcpy(struct_parm->custom_argv[struct_parm->custom_argc++],argv[i]);break; 
    default: printf("\nUnrecognized option %s!\n\n",argv[i]);
      exit(0);
    }

  }
	*init_spl_weight = (*init_spl_weight)/learn_parm->svm_c;

  if(i>=argc) {
    printf("\nNot enough input parameters!\n\n");
    my_wait_any_key();
    exit(0);
  }
  strcpy (trainfile, argv[i]);

  if((i+1)<argc) {
    strcpy (modelfile, argv[i+1]);
  }
	else {
		strcpy (modelfile, "lssvm.model");
	}

	/* self-paced learning weight should be non-negative */
	if(*init_spl_weight < 0.0)
		*init_spl_weight = 0.0;
	/* self-paced learning factor should be greater than 1.0 */
	if(*spl_factor < 1.0)
		*spl_factor = 1.1;

  
  parse_struct_parameters(struct_parm);
}
void my_read_input_parameters(int argc, char *argv[], char *trainfile, char* modelfile,
                              LEARN_PARM *learn_parm, KERNEL_PARM *kernel_parm, STRUCT_LEARN_PARM *struct_parm) {

    long i;

    /* set default */
    learn_parm->maxiter=20000;
    learn_parm->svm_maxqpsize=100;
    learn_parm->svm_c=100.0;
    learn_parm->eps=0.001;
    learn_parm->biased_hyperplane=12345; /* store random seed */
    learn_parm->remove_inconsistent=10;
    kernel_parm->kernel_type=0;
    kernel_parm->rbf_gamma=0.05;
    kernel_parm->coef_lin=1;
    kernel_parm->coef_const=1;
    kernel_parm->poly_degree=3;

    struct_parm->custom_argc=0;

    for(i=1; (i<argc) && ((argv[i])[0] == '-'); i++) {
        switch ((argv[i])[1]) {
        case 'c':
            i++;
            learn_parm->svm_c=atof(argv[i]);
            break;
        case 'e':
            i++;
            learn_parm->eps=atof(argv[i]);
            break;
        case 's':
            i++;
            learn_parm->svm_maxqpsize=atol(argv[i]);
            break;
        case 'g':
            i++;
            kernel_parm->rbf_gamma=atof(argv[i]);
            break;
        case 'd':
            i++;
            kernel_parm->poly_degree=atol(argv[i]);
            break;
        case 'r':
            i++;
            learn_parm->biased_hyperplane=atol(argv[i]);
            break;
        case 't':
            i++;
            kernel_parm->kernel_type=atol(argv[i]);
            break;
        case 'n':
            i++;
            learn_parm->maxiter=atol(argv[i]);
            break;
        case 'p':
            i++;
            learn_parm->remove_inconsistent=atol(argv[i]);
            break;
        case '-':
            strcpy(struct_parm->custom_argv[struct_parm->custom_argc++],argv[i]);
            i++;
            strcpy(struct_parm->custom_argv[struct_parm->custom_argc++],argv[i]);
            break;
        default:
            printf("\nUnrecognized option %s!\n\n",argv[i]);
            exit(0);
        }

    }

    if(i>=argc) {
        printf("\nNot enough input parameters!\n\n");
        my_wait_any_key();
        exit(0);
    }
    strcpy (trainfile, argv[i]);

    if((i+1)<argc) {
        strcpy (modelfile, argv[i+1]);
    }

    parse_struct_parameters(struct_parm);

}
void
read_input_parameters (int argc,char *argv[],
                       long *verbosity,long *struct_verbosity,
                       STRUCT_LEARN_PARM *struct_parm,
                       LEARN_PARM *learn_parm, KERNEL_PARM *kernel_parm,
                       int *alg_type)
{
  long i ;

  (*alg_type)=DEFAULT_ALG_TYPE;

  /* SVM struct options */
  (*struct_verbosity)=1;

  struct_parm->C=-0.01;
  struct_parm->slack_norm=1;
  struct_parm->epsilon=DEFAULT_EPS;
  struct_parm->custom_argc=0;
  struct_parm->loss_function=DEFAULT_LOSS_FCT;
  struct_parm->loss_type=DEFAULT_RESCALING;
  struct_parm->newconstretrain=100;
  struct_parm->ccache_size=5;
  struct_parm->batch_size=100;

  /* SVM light options */
  (*verbosity)=0;

  strcpy (learn_parm->predfile, "trans_predictions");
  strcpy (learn_parm->alphafile, "");
  learn_parm->biased_hyperplane=1;
  learn_parm->remove_inconsistent=0;
  learn_parm->skip_final_opt_check=0;
  learn_parm->svm_maxqpsize=10;
  learn_parm->svm_newvarsinqp=0;
  learn_parm->svm_iter_to_shrink=-9999;
  learn_parm->maxiter=100000;
  learn_parm->kernel_cache_size=40;
  learn_parm->svm_c=99999999;  /* overridden by struct_parm->C */
  learn_parm->eps=0.001;       /* overridden by struct_parm->epsilon */
  learn_parm->transduction_posratio=-1.0;
  learn_parm->svm_costratio=1.0;
  learn_parm->svm_costratio_unlab=1.0;
  learn_parm->svm_unlabbound=1E-5;
  learn_parm->epsilon_crit=0.001;
  learn_parm->epsilon_a=1E-10;  /* changed from 1e-15 */
  learn_parm->compute_loo=0;
  learn_parm->rho=1.0;
  learn_parm->xa_depth=0;

  kernel_parm->kernel_type=0;
  kernel_parm->poly_degree=3;
  kernel_parm->rbf_gamma=1.0;
  kernel_parm->coef_lin=1;
  kernel_parm->coef_const=1;
  strcpy (kernel_parm->custom,"empty");

  /* Parse -x options, delegat --x ones */
  for(i=1;(i<argc) && ((argv[i])[0] == '-');i++) {
    switch ((argv[i])[1])
      {
      case 'a': i++; strcpy(learn_parm->alphafile,argv[i]); break;
      case 'c': i++; struct_parm->C=atof(argv[i]); break;
      case 'p': i++; struct_parm->slack_norm=atol(argv[i]); break;
      case 'e': i++; struct_parm->epsilon=atof(argv[i]); break;
      case 'k': i++; struct_parm->newconstretrain=atol(argv[i]); break;
      case 'h': i++; learn_parm->svm_iter_to_shrink=atol(argv[i]); break;
      case '#': i++; learn_parm->maxiter=atol(argv[i]); break;
      case 'm': i++; learn_parm->kernel_cache_size=atol(argv[i]); break;
      case 'w': i++; (*alg_type)=atol(argv[i]); break;
      case 'o': i++; struct_parm->loss_type=atol(argv[i]); break;
      case 'n': i++; learn_parm->svm_newvarsinqp=atol(argv[i]); break;
      case 'q': i++; learn_parm->svm_maxqpsize=atol(argv[i]); break;
      case 'l': i++; struct_parm->loss_function=atol(argv[i]); break;
      case 'f': i++; struct_parm->ccache_size=atol(argv[i]); break;
      case 'b': i++; struct_parm->batch_size=atof(argv[i]); break;
      case 't': i++; kernel_parm->kernel_type=atol(argv[i]); break;
      case 'd': i++; kernel_parm->poly_degree=atol(argv[i]); break;
      case 'g': i++; kernel_parm->rbf_gamma=atof(argv[i]); break;
      case 's': i++; kernel_parm->coef_lin=atof(argv[i]); break;
      case 'r': i++; kernel_parm->coef_const=atof(argv[i]); break;
      case 'u': i++; strcpy(kernel_parm->custom,argv[i]); break;
      case 'v': i++; (*struct_verbosity)=atol(argv[i]); break;
      case 'y': i++; (*verbosity)=atol(argv[i]); break;
      case '-':
        strcpy(struct_parm->custom_argv[struct_parm->custom_argc++],argv[i]);
        i++;
        strcpy(struct_parm->custom_argv[struct_parm->custom_argc++],argv[i]);
        break;
      default:
        {
          char msg [1024+1] ;
          #ifndef WIN
            snprintf(msg, sizeof(msg)/sizeof(char),
                   "Unrecognized option '%s'",argv[i]) ;
          #else
           sprintf(msg, sizeof(msg)/sizeof(char),
                   "Unrecognized option '%s'",argv[i]) ;
          #endif
          mexErrMsgTxt(msg) ;
        }
      }
  }

  /* whatever is left is an error */
  if (i < argc) {
    char msg [1024+1] ;
    #ifndef WIN
        snprintf(msg, sizeof(msg)/sizeof(char),
             "Unrecognized argument '%s'", argv[i]) ;
    #else
        sprintf(msg, sizeof(msg)/sizeof(char),
             "Unrecognized argument '%s'", argv[i]) ;
    #endif
    mexErrMsgTxt(msg) ;
  }

  /* Check parameter validity */
  if(learn_parm->svm_iter_to_shrink == -9999) {
    learn_parm->svm_iter_to_shrink=100;
  }

  if((learn_parm->skip_final_opt_check)
     && (kernel_parm->kernel_type == LINEAR)) {
    mexWarnMsgTxt("It does not make sense to skip the final optimality check for linear kernels.");
    learn_parm->skip_final_opt_check=0;
  }
  if((learn_parm->skip_final_opt_check)
     && (learn_parm->remove_inconsistent)) {
    mexErrMsgTxt("It is necessary to do the final optimality check when removing inconsistent examples.");
  }
  if((learn_parm->svm_maxqpsize<2)) {
    char msg [1025] ;
    #ifndef WIN
    snprintf(msg, sizeof(msg)/sizeof(char),
             "Maximum size of QP-subproblems not in valid range: %ld [2..]",learn_parm->svm_maxqpsize) ;
    #else
    sprintf(msg, sizeof(msg)/sizeof(char),
            "Maximum size of QP-subproblems not in valid range: %ld [2..]",learn_parm->svm_maxqpsize) ;
    #endif
    mexErrMsgTxt(msg) ;
  }
  if((learn_parm->svm_maxqpsize<learn_parm->svm_newvarsinqp)) {
    char msg [1025] ;
    #ifndef WIN
    snprintf(msg, sizeof(msg)/sizeof(char),
             "Maximum size of QP-subproblems [%ld] must be larger than the number of"
             " new variables [%ld] entering the working set in each iteration.",
             learn_parm->svm_maxqpsize, learn_parm->svm_newvarsinqp) ;
    #else
    sprintf(msg, sizeof(msg)/sizeof(char),
             "Maximum size of QP-subproblems [%ld] must be larger than the number of"
             " new variables [%ld] entering the working set in each iteration.",
             learn_parm->svm_maxqpsize, learn_parm->svm_newvarsinqp) ;
    #endif
    mexErrMsgTxt(msg) ;
  }
  if(learn_parm->svm_iter_to_shrink<1) {
    char msg [1025] ;
    #ifndef WIN
    snprintf(msg, sizeof(msg)/sizeof(char),
             "Maximum number of iterations for shrinking not in valid range: %ld [1,..]",
             learn_parm->svm_iter_to_shrink);
    #else
    sprintf(msg, sizeof(msg)/sizeof(char),
             "Maximum number of iterations for shrinking not in valid range: %ld [1,..]",
             learn_parm->svm_iter_to_shrink);
    #endif
    mexErrMsgTxt(msg) ;
  }
  if(struct_parm->C<0) {
    mexErrMsgTxt("You have to specify a value for the parameter '-c' (C>0)!");
  }
  if(((*alg_type) < 0) || (((*alg_type) > 5) && ((*alg_type) != 9))) {
    mexErrMsgTxt("Algorithm type must be either '0', '1', '2', '3', '4', or '9'!");
  }
  if(learn_parm->transduction_posratio>1) {
    mexErrMsgTxt("The fraction of unlabeled examples to classify as positives must "
                 "be less than 1.0 !!!");
  }
  if(learn_parm->svm_costratio<=0) {
    mexErrMsgTxt("The COSTRATIO parameter must be greater than zero!");
  }
  if(struct_parm->epsilon<=0) {
    mexErrMsgTxt("The epsilon parameter must be greater than zero!");
  }
  if((struct_parm->ccache_size<=0) && ((*alg_type) == 4)) {
    mexErrMsgTxt("The cache size must be at least 1!");
  }
  if(((struct_parm->batch_size<=0) || (struct_parm->batch_size>100))
     && ((*alg_type) == 4)) {
    mexErrMsgTxt("The batch size must be in the interval ]0,100]!");
  }
  if((struct_parm->slack_norm<1) || (struct_parm->slack_norm>2)) {
    mexErrMsgTxt("The norm of the slacks must be either 1 (L1-norm) or 2 (L2-norm)!");
  }
  if((struct_parm->loss_type != SLACK_RESCALING)
     && (struct_parm->loss_type != MARGIN_RESCALING)) {
    mexErrMsgTxt("The loss type must be either 1 (slack rescaling) or 2 (margin rescaling)!");
  }
  if(learn_parm->rho<0) {
    mexErrMsgTxt("The parameter rho for xi/alpha-estimates and leave-one-out pruning must"
                 " be greater than zero (typically 1.0 or 2.0, see T. Joachims, Estimating the"
                 " Generalization Performance of an SVM Efficiently, ICML, 2000.)!");
  }
  if((learn_parm->xa_depth<0) || (learn_parm->xa_depth>100)) {
    mexErrMsgTxt("The parameter depth for ext. xi/alpha-estimates must be in [0..100] (zero"
                  "for switching to the conventional xa/estimates described in T. Joachims,"
                  "Estimating the Generalization Performance of an SVM Efficiently, ICML, 2000.)") ;
  }

  parse_struct_parameters (struct_parm) ;
}