void print_help(void) { printf("\nSVM-light %s: Support Vector Machine, convert model to binary file %s\n",VERSION,VERSION_DATE); copyright_notice(); printf(" usage: svm_classify [options] model_file output_file\n\n"); printf("options: -h -> this help\n"); printf(" -v [0..3] -> verbosity level (default 1)\n"); printf(" -B [0,1] -> binary input files (default 1)\n"); }
void print_help(void) { printf("\nSVM-light %s: Support Vector Machine, classification module %s\n",VERSION,VERSION_DATE); copyright_notice(); printf(" usage: svm_classify [options] example_file model_file output_file\n\n"); printf("options: -h -> this help\n"); printf(" -v [0..3] -> verbosity level (default 2)\n"); printf(" -f [0,1] -> 0: old output format of V1.0\n"); printf(" -> 1: output the value of decision function (default)\n\n"); }
void print_help(void) { printf("\nSVM-struct classification module: %s, %s, %s\n",INST_NAME,INST_VERSION,INST_VERSION_DATE); printf(" includes SVM-struct %s for learning complex outputs, %s\n",STRUCT_VERSION,STRUCT_VERSION_DATE); printf(" includes SVM-light %s quadratic optimizer, %s\n",VERSION,VERSION_DATE); copyright_notice(); printf(" usage: svm_struct_classify [options] example_file model_file output_file\n\n"); printf("options: -h -> this help\n"); printf(" -v [0..3] -> verbosity level (default 2)\n\n"); print_struct_help_classify(); }
T bdNew(void) { struct T *p; p = calloc(1, (long)sizeof(*p)); if (!p) { mpFail("bdNew: Failed to calloc memory."); } copyright_notice(); /* set up with single zero digit */ p->digits = mpAlloc(1); p->digits[0] = 0; p->ndigits = 0; p->maxdigits = 1; //p->sign = 0; return p; }
void print_help() { printf("\nSVM-struct learning module: %s, %s, %s\n",INST_NAME,INST_VERSION,INST_VERSION_DATE); printf(" includes SVM-struct %s for learning complex outputs, %s\n",STRUCT_VERSION,STRUCT_VERSION_DATE); printf(" includes SVM-light %s quadratic optimizer, %s\n",VERSION,VERSION_DATE); copyright_notice(); printf(" usage: svm_struct_learn [options] example_file model_file\n\n"); printf("Arguments:\n"); printf(" example_file-> file with training data\n"); printf(" model_file -> file to store learned decision rule in\n"); printf("General Options:\n"); printf(" -? -> this help\n"); printf(" -v [0..3] -> verbosity level (default 1)\n"); printf(" -y [0..3] -> verbosity level for svm_light (default 0)\n"); printf("Learning Options:\n"); printf(" -c float -> C: trade-off between training error\n"); printf(" and margin (default 0.01)\n"); printf(" -p [1,2] -> L-norm to use for slack variables. Use 1 for L1-norm,\n"); printf(" use 2 for squared slacks. (default 1)\n"); printf(" -o [1,2] -> Rescaling method to use for loss.\n"); printf(" 1: slack rescaling\n"); printf(" 2: margin rescaling\n"); printf(" (default %d)\n",DEFAULT_RESCALING); printf(" -l [0..] -> Loss function to use.\n"); printf(" 0: zero/one loss\n"); printf(" ?: see below in application specific options\n"); printf(" (default %d)\n",DEFAULT_LOSS_FCT); printf("Optimization Options (see [2][5]):\n"); printf(" -w [0,..,9] -> choice of structural learning algorithm (default %d):\n",(int)DEFAULT_ALG_TYPE); printf(" 0: n-slack algorithm described in [2]\n"); printf(" 1: n-slack algorithm with shrinking heuristic\n"); printf(" 2: 1-slack algorithm (primal) described in [5]\n"); printf(" 3: 1-slack algorithm (dual) described in [5]\n"); printf(" 4: 1-slack algorithm (dual) with constraint cache [5]\n"); printf(" 9: custom algorithm in svm_struct_learn_custom.c\n"); printf(" -e float -> epsilon: allow that tolerance for termination\n"); printf(" criterion (default %f)\n",DEFAULT_EPS); printf(" -k [1..] -> number of new constraints to accumulate before\n"); printf(" recomputing the QP solution (default 100) (-w 0 and 1 only)\n"); printf(" -f [5..] -> number of constraints to cache for each example\n"); printf(" (default 5) (used with -w 4)\n"); printf(" -b [1..100] -> percentage of training set for which to refresh cache\n"); printf(" when no epsilon violated constraint can be constructed\n"); printf(" from current cache (default 100%%) (used with -w 4)\n"); printf("SVM-light Options for Solving QP Subproblems (see [3]):\n"); printf(" -n [2..q] -> number of new variables entering the working set\n"); printf(" in each svm-light iteration (default n = q). \n"); printf(" Set n < q to prevent zig-zagging.\n"); printf(" -m [5..] -> size of svm-light cache for kernel evaluations in MB\n"); printf(" (default 40) (used only for -w 1 with kernels)\n"); printf(" -h [5..] -> number of svm-light iterations a variable needs to be\n"); printf(" optimal before considered for shrinking (default 100)\n"); printf(" -# int -> terminate svm-light QP subproblem optimization, if no\n"); printf(" progress after this number of iterations.\n"); printf(" (default 100000)\n"); printf("Kernel Options:\n"); printf(" -t int -> type of kernel function:\n"); printf(" 0: linear (default)\n"); printf(" 1: polynomial (s a*b+c)^d\n"); printf(" 2: radial basis function exp(-gamma ||a-b||^2)\n"); printf(" 3: sigmoid tanh(s a*b + c)\n"); printf(" 4: user defined kernel from kernel.h\n"); printf(" -d int -> parameter d in polynomial kernel\n"); printf(" -g float -> parameter gamma in rbf kernel\n"); printf(" -s float -> parameter s in sigmoid/poly kernel\n"); printf(" -r float -> parameter c in sigmoid/poly kernel\n"); printf(" -u string -> parameter of user defined kernel\n"); printf("Output Options:\n"); printf(" -a string -> write all alphas to this file after learning\n"); printf(" (in the same order as in the training set)\n"); printf("Application-Specific Options:\n"); print_struct_help(); wait_any_key(); printf("\nMore details in:\n"); printf("[1] T. Joachims, Learning to Align Sequences: A Maximum Margin Aproach.\n"); printf(" Technical Report, September, 2003.\n"); printf("[2] I. Tsochantaridis, T. Joachims, T. Hofmann, and Y. Altun, Large Margin\n"); printf(" Methods for Structured and Interdependent Output Variables, Journal\n"); printf(" of Machine Learning Research (JMLR), Vol. 6(Sep):1453-1484, 2005.\n"); printf("[3] T. Joachims, Making Large-Scale SVM Learning Practical. Advances in\n"); printf(" Kernel Methods - Support Vector Learning, B. Schölkopf and C. Burges and\n"); printf(" A. Smola (ed.), MIT Press, 1999.\n"); printf("[4] T. Joachims, Learning to Classify Text Using Support Vector\n"); printf(" Machines: Methods, Theory, and Algorithms. Dissertation, Kluwer,\n"); printf(" 2002.\n"); printf("[5] T. Joachims, T. Finley, Chun-Nam Yu, Cutting-Plane Training of Structural\n"); printf(" SVMs, Machine Learning Journal, to appear.\n"); }
void print_help() { printf("\nSVM-light %s: Support Vector Machine, learning module %s\n",VERSION,VERSION_DATE); copyright_notice(); printf(" usage: svm_learn [options] example_file model_file\n\n"); printf("Arguments:\n"); printf(" example_file-> file with training data\n"); printf(" model_file -> file to store learned decision rule in\n"); printf("General options:\n"); printf(" -? -> this help\n"); printf(" -v [0..3] -> verbosity level (default 1)\n"); printf("Learning options:\n"); printf(" -z {c,r,p} -> select between classification (c), regression (r),\n"); printf(" and preference ranking (p) (default classification)\n"); printf(" -c float -> C: trade-off between training error\n"); printf(" and margin (default [avg. x*x]^-1)\n"); printf(" -w [0..] -> epsilon width of tube for regression\n"); printf(" (default 0.1)\n"); printf(" -j float -> Cost: cost-factor, by which training errors on\n"); printf(" positive examples outweight errors on negative\n"); printf(" examples (default 1) (see [4])\n"); printf(" -b [0,1] -> use biased hyperplane (i.e. x*w+b>0) instead\n"); printf(" of unbiased hyperplane (i.e. x*w>0) (default 1)\n"); printf(" -i [0,1] -> remove inconsistent training examples\n"); printf(" and retrain (default 0)\n"); printf("Performance estimation options:\n"); printf(" -x [0,1] -> compute leave-one-out estimates (default 0)\n"); printf(" (see [5])\n"); printf(" -o ]0..2] -> value of rho for XiAlpha-estimator and for pruning\n"); printf(" leave-one-out computation (default 1.0) (see [2])\n"); printf(" -k [0..100] -> search depth for extended XiAlpha-estimator \n"); printf(" (default 0)\n"); printf("Transduction options (see [3]):\n"); printf(" -p [0..1] -> fraction of unlabeled examples to be classified\n"); printf(" into the positive class (default is the ratio of\n"); printf(" positive and negative examples in the training data)\n"); printf("Kernel options:\n"); printf(" -t int -> type of kernel function:\n"); printf(" 0: linear (default)\n"); printf(" 1: polynomial (s a*b+c)^d\n"); printf(" 2: radial basis function exp(-gamma ||a-b||^2)\n"); printf(" 3: sigmoid tanh(s a*b + c)\n"); printf(" 4: user defined kernel from kernel.h\n"); printf(" -d int -> parameter d in polynomial kernel\n"); printf(" -g float -> parameter gamma in rbf kernel\n"); printf(" -s float -> parameter s in sigmoid/poly kernel\n"); printf(" -r float -> parameter c in sigmoid/poly kernel\n"); printf(" -u string -> parameter of user defined kernel\n"); printf("Optimization options (see [1]):\n"); printf(" -q [2..] -> maximum size of QP-subproblems (default 10)\n"); printf(" -n [2..q] -> number of new variables entering the working set\n"); printf(" in each iteration (default n = q). Set n<q to prevent\n"); printf(" zig-zagging.\n"); printf(" -m [5..] -> size of cache for kernel evaluations in MB (default 40)\n"); printf(" The larger the faster...\n"); printf(" -e float -> eps: Allow that error for termination criterion\n"); printf(" [y [w*x+b] - 1] >= eps (default 0.001)\n"); printf(" -y [0,1] -> restart the optimization from alpha values in file\n"); printf(" specified by -a option. (default 0)\n"); printf(" -h [5..] -> number of iterations a variable needs to be\n"); printf(" optimal before considered for shrinking (default 100)\n"); printf(" -f [0,1] -> do final optimality check for variables removed\n"); printf(" by shrinking. Although this test is usually \n"); printf(" positive, there is no guarantee that the optimum\n"); printf(" was found if the test is omitted. (default 1)\n"); printf(" -y string -> if option is given, reads alphas from file with given\n"); printf(" and uses them as starting point. (default 'disabled')\n"); printf(" -# int -> terminate optimization, if no progress after this\n"); printf(" number of iterations. (default 100000)\n"); printf("Output options:\n"); printf(" -l string -> file to write predicted labels of unlabeled\n"); printf(" examples into after transductive learning\n"); printf(" -a string -> write all alphas to this file after learning\n"); printf(" (in the same order as in the training set)\n"); wait_any_key(); printf("\nMore details in:\n"); printf("[1] T. Joachims, Making Large-Scale SVM Learning Practical. Advances in\n"); printf(" Kernel Methods - Support Vector Learning, B. Scholkopf and C. Burges and\n"); printf(" A. Smola (ed.), MIT Press, 1999.\n"); printf("[2] T. Joachims, Estimating the Generalization performance of an SVM\n"); printf(" Efficiently. International Conference on Machine Learning (ICML), 2000.\n"); printf("[3] T. Joachims, Transductive Inference for Text Classification using Support\n"); printf(" Vector Machines. International Conference on Machine Learning (ICML),\n"); printf(" 1999.\n"); printf("[4] K. Morik, P. Brockhausen, and T. Joachims, Combining statistical learning\n"); printf(" with a knowledge-based approach - A case study in intensive care \n"); printf(" monitoring. International Conference on Machine Learning (ICML), 1999.\n"); printf("[5] T. Joachims, Learning to Classify Text Using Support Vector\n"); printf(" Machines: Methods, Theory, and Algorithms. Dissertation, Kluwer,\n"); printf(" 2002.\n\n"); }
void print_help() { printf("\nTree Kernels in SVM-light %s : SVM Learning module %s\n",VERSION,VERSION_DATE); printf("by Alessandro Moschitti, [email protected]\n"); printf("University of Rome \"Tor Vergata\"\n\n"); copyright_notice(); printf(" usage: svm_learn [options] example_file model_file\n\n"); printf("Arguments:\n"); printf(" example_file-> file with training data\n"); printf(" model_file -> file to store learned decision rule in\n"); printf("General options:\n"); printf(" -? -> this help\n"); printf(" -v [0..3] -> verbosity level (default 1)\n"); printf("Learning options:\n"); printf(" -z {c,r,p} -> select between classification (c), regression (r),\n"); printf(" and preference ranking (p) (default classification)\n"); printf(" -c float -> C: trade-off between training error\n"); printf(" and margin (default [avg. x*x]^-1)\n"); printf(" -w [0..] -> epsilon width of tube for regression\n"); printf(" (default 0.1)\n"); printf(" -j float -> Cost: cost-factor, by which training errors on\n"); printf(" positive examples outweight errors on negative\n"); printf(" examples (default 1) (see [4])\n"); printf(" -b [0,1] -> use biased hyperplane (i.e. x*w+b>0) instead\n"); printf(" of unbiased hyperplane (i.e. x*w>0) (default 1)\n"); printf(" -i [0,1] -> remove inconsistent training examples\n"); printf(" and retrain (default 0)\n"); printf("Performance estimation options:\n"); printf(" -x [0,1] -> compute leave-one-out estimates (default 0)\n"); printf(" (see [5])\n"); printf(" -o ]0..2] -> value of rho for XiAlpha-estimator and for pruning\n"); printf(" leave-one-out computation (default 1.0) (see [2])\n"); printf(" -k [0..100] -> search depth for extended XiAlpha-estimator \n"); printf(" (default 0)\n"); printf("Transduction options (see [3]):\n"); printf(" -p [0..1] -> fraction of unlabeled examples to be classified\n"); printf(" into the positive class (default is the ratio of\n"); printf(" positive and negative examples in the training data)\n"); printf("Kernel options:\n"); printf(" -t int -> type of kernel function:\n"); printf(" 0: linear (default)\n"); printf(" 1: polynomial (s a*b+c)^d\n"); printf(" 2: radial basis function exp(-gamma ||a-b||^2)\n"); printf(" 3: sigmoid tanh(s a*b + c)\n"); printf(" 4: user defined kernel from kernel.h\n"); printf(" 5: combination of forest and vector sets according to W, V, S, C options\n"); printf(" 11: re-ranking based on trees (each instance must have two trees),\n"); printf(" 12: re-ranking based on vectors (each instance must have two vectors)\n"); printf(" 13: re-ranking based on both tree and vectors (each instance must have\n"); printf(" two trees and two vectors) \n"); printf(" -W [S,A] -> with an 'S', a tree kernel is applied to the sequence of trees of two input\n"); printf(" forests and the results are summed; \n"); printf(" -> with an 'A', a tree kernel is applied to all tree pairs from the two forests\n"); printf(" (default 'S')\n"); printf(" -V [S,A] -> same as before but regarding sequences of vectors are used (default 'S' and\n"); printf(" the type of vector-based kernel is specified by the option -S)\n"); printf(" -S [0,4] -> kernel to be used with vectors (default polynomial of degree 3,\n"); printf(" i.e. -S = 1 and -d = 3)\n"); printf(" -C [*,+,T,V]-> combination operator between forests and vectors (default 'T')\n"); printf(" -> 'T' only the contribution from trees is used (specified by option -W)\n"); printf(" -> 'V' only the contribution from vectors is used (specified by option -V)\n"); printf(" -> '+' or '*' sum or multiplication of the contributions from vectors and \n"); printf(" trees (default T) \n"); printf(" -F [0,1,2,3]-> 0 = ST kernel, 1 = SST kernel, 2 = SST-bow, 3 = PT kernel (default 1)\n"); printf(" -M float -> Mu decay factor for PT kernel (default 0.4)\n"); printf(" -L float -> decay factor in tree kernel (default 0.4)\n"); printf(" -S [0,4] -> kernel to be used with vectors (default polynomial of degree 3, \n"); printf(" i.e. -S = 1 and -d = 3)\n"); printf(" -T float -> multiplicative constant for the contribution of tree kernels when -C = '+'\n"); printf(" -N float -> 0 = no normalization, 1 = tree normalization, 2 = vector normalization and \n"); printf(" 3 = tree normalization of both trees and vectors. The normalization is applied \n"); printf(" to each individual tree or vector (default 3).\n"); printf(" -u string -> parameter of user defined kernel\n"); printf(" -d int -> parameter d in polynomial kernel\n"); printf(" -g float -> parameter gamma in rbf kernel\n"); printf(" -s float -> parameter s in sigmoid/poly kernel\n"); printf(" -r float -> parameter c in sigmoid/poly kernel\n"); printf(" -u string -> parameter of user defined kernel\n"); printf("Optimization options (see [1]):\n"); printf(" -q [2..] -> maximum size of QP-subproblems (default 10)\n"); printf(" -n [2..q] -> number of new variables entering the working set\n"); printf(" in each iteration (default n = q). Set n<q to prevent\n"); printf(" zig-zagging.\n"); printf(" -m [5..] -> size of cache for kernel evaluations in MB (default 40)\n"); printf(" The larger the faster...\n"); printf(" -e float -> eps: Allow that error for termination criterion\n"); printf(" [y [w*x+b] - 1] >= eps (default 0.001)\n"); printf(" -h [5..] -> number of iterations a variable needs to be\n"); printf(" optimal before considered for shrinking (default 100)\n"); printf(" -f [0,1] -> do final optimality check for variables removed\n"); printf(" by shrinking. Although this test is usually \n"); printf(" positive, there is no guarantee that the optimum\n"); printf(" was found if the test is omitted. (default 1)\n"); printf("Output options:\n"); printf(" -l string -> file to write predicted labels of unlabeled\n"); printf(" examples into after transductive learning\n"); printf(" -a string -> write all alphas to this file after learning\n"); printf(" (in the same order as in the training set)\n"); wait_any_key(); printf("\nMore details in:\n"); printf("[1] T. Joachims, Making Large-Scale SVM Learning Practical. Advances in\n"); printf(" Kernel Methods - Support Vector Learning, B. Schölkopf and C. Burges and\n"); printf(" A. Smola (ed.), MIT Press, 1999.\n"); printf("[2] T. Joachims, Estimating the Generalization performance of an SVM\n"); printf(" Efficiently. International Conference on Machine Learning (ICML), 2000.\n"); printf("[3] T. Joachims, Transductive Inference for Text Classification using Support\n"); printf(" Vector Machines. International Conference on Machine Learning (ICML),\n"); printf(" 1999.\n"); printf("[4] K. Morik, P. Brockhausen, and T. Joachims, Combining statistical learning\n"); printf(" with a knowledge-based approach - A case study in intensive care \n"); printf(" monitoring. International Conference on Machine Learning (ICML), 1999.\n"); printf("[5] T. Joachims, Learning to Classify Text Using Support Vector\n"); printf(" Machines: Methods, Theory, and Algorithms. Dissertation, Kluwer,\n"); printf(" 2002.\n\n"); printf("\nFor Tree-Kernel details:\n"); printf("[6] A. Moschitti, A study on Convolution Kernels for Shallow Semantic Parsing.\n"); printf(" In proceedings of the 42-th Conference on Association for Computational\n"); printf(" Linguistic, (ACL-2004), Barcelona, Spain, 2004.\n\n"); printf("[7] A. Moschitti, Making tree kernels practical for natural language learning.\n"); printf(" In Proceedings of the Eleventh International Conference for Computational\n"); printf(" Linguistics, (EACL-2006), Trento, Italy, 2006.\n\n"); }
void print_help() { printf("\nSVM-struct learning module: %s, %s, %s\n",INST_NAME,INST_VERSION,INST_VERSION_DATE); printf(" includes SVM-struct %s for learning complex outputs, %s\n",STRUCT_VERSION,STRUCT_VERSION_DATE); printf(" includes SVM-light %s quadratic optimizer, %s\n",VERSION,VERSION_DATE); copyright_notice(); printf(" usage: svm_struct_learn [options] example_file model_file\n\n"); printf("Arguments:\n"); printf(" example_file-> file with training data\n"); printf(" model_file -> file to store learned decision rule in\n"); printf("General options:\n"); printf(" -? -> this help\n"); printf(" -v [0..3] -> verbosity level (default 1)\n"); printf(" -y [0..3] -> verbosity level for svm_light (default 0)\n"); printf("Learning options:\n"); printf(" -c float -> C: trade-off between training error\n"); printf(" and margin (default 0.01)\n"); printf(" -p [1,2] -> L-norm to use for slack variables. Use 1 for L1-norm,\n"); printf(" use 2 for squared slacks. (default 1)\n"); printf(" -o [1,2] -> Slack rescaling method to use for loss.\n"); printf(" 1: slack rescaling\n"); printf(" 2: margin rescaling\n"); printf(" (default 1)\n"); printf(" -l [0..] -> Loss function to use.\n"); printf(" 0: zero/one loss\n"); printf(" (default 0)\n"); printf("Kernel options:\n"); printf(" -t int -> type of kernel function:\n"); printf(" 0: linear (default)\n"); printf(" 1: polynomial (s a*b+c)^d\n"); printf(" 2: radial basis function exp(-gamma ||a-b||^2)\n"); printf(" 3: sigmoid tanh(s a*b + c)\n"); printf(" 4: user defined kernel from kernel.h\n"); printf(" -d int -> parameter d in polynomial kernel\n"); printf(" -g float -> parameter gamma in rbf kernel\n"); printf(" -s float -> parameter s in sigmoid/poly kernel\n"); printf(" -r float -> parameter c in sigmoid/poly kernel\n"); printf(" -u string -> parameter of user defined kernel\n"); printf("Optimization options (see [2][3]):\n"); printf(" -q [2..] -> maximum size of QP-subproblems (default 10)\n"); printf(" -n [2..q] -> number of new variables entering the working set\n"); printf(" in each iteration (default n = q). Set n<q to prevent\n"); printf(" zig-zagging.\n"); printf(" -m [5..] -> size of cache for kernel evaluations in MB (default 40)\n"); printf(" The larger the faster...\n"); printf(" -e float -> eps: Allow that error for termination criterion\n"); printf(" (default 0.01)\n"); printf(" -h [5..] -> number of iterations a variable needs to be\n"); printf(" optimal before considered for shrinking (default 100)\n"); printf(" -k [1..] -> number of new constraints to accumulate before\n"); printf(" recomputing the QP solution (default 100)\n"); printf(" -# int -> terminate optimization, if no progress after this\n"); printf(" number of iterations. (default 100000)\n"); printf("Output options:\n"); printf(" -a string -> write all alphas to this file after learning\n"); printf(" (in the same order as in the training set)\n"); printf("Structure learning options:\n"); print_struct_help(); wait_any_key(); printf("\nMore details in:\n"); printf("[1] T. Joachims, Learning to Align Sequences: A Maximum Margin Aproach.\n"); printf(" Technical Report, September, 2003.\n"); printf("[2] I. Tsochantaridis, T. Hofmann, T. Joachims, and Y. Altun, Support Vector \n"); printf(" Learning for Interdependent and Structured Output Spaces, ICML, 2004.\n"); printf("[3] T. Joachims, Making Large-Scale SVM Learning Practical. Advances in\n"); printf(" Kernel Methods - Support Vector Learning, B. Schölkopf and C. Burges and\n"); printf(" A. Smola (ed.), MIT Press, 1999.\n"); printf("[4] T. Joachims, Learning to Classify Text Using Support Vector\n"); printf(" Machines: Methods, Theory, and Algorithms. Dissertation, Kluwer,\n"); printf(" 2002.\n\n"); }
void SVMLightRunner::libraryPrintHelp() { C_PRINTF("\nSVM-light %s: Support Vector Machine, learning module %s\n",VERSION,VERSION_DATE); copyright_notice(); }
int parse_index_options(IndexOptions *o, char **argv) { int i,j; SGREPDATA(o); i=0; j=1; while ( *argv!=NULL && *argv[0]=='-' ) { /* option -- means no more options */ if (strcmp(*argv,"--")==0) return i+1; switch((*argv)[j]) { case 'g': { char *arg; arg=get_arg(sgrep,&argv,&i,&j); if ((!arg) || set_scanner_option(o->sgrep,arg)==SGREP_ERROR) { return SGREP_ERROR; } break; } case 'h': print_index_help(); o->index_mode=IM_DONE; break; case 'i': o->sgrep->ignore_case=1; break; case 'l': { char *endptr; char *arg=get_arg(sgrep,&argv,&i,&j); if (!arg) return SGREP_ERROR; o->stop_word_limit=strtol(arg,&endptr,10); if (o->stop_word_limit<0 || *endptr!=0) { sgrep_error(sgrep,"Invalid stop word limit '%s'\n", arg); return SGREP_ERROR; } break; } case 'm': { char *endptr; char *arg=get_arg(sgrep,&argv,&i,&j); if (!arg) return SGREP_ERROR; o->available_memory=strtol(arg,&endptr,10)*1024*1024; if (o->available_memory<0 || *endptr!=0) { sgrep_error(sgrep,"Invalid memory size '%s'\n", arg); return SGREP_ERROR; } break; } case 'L': o->output_stop_word_file=get_arg(sgrep,&argv,&i,&j); if (!o->output_stop_word_file) return SGREP_ERROR; break; case 'S': o->input_stop_word_file=get_arg(sgrep,&argv,&i,&j); if (!o->input_stop_word_file) return SGREP_ERROR; break; case 'V': printf("sgindex version %s compiled at %s\n", VERSION,__DATE__); o->index_mode=IM_DONE; break; case 'v': o->sgrep->progress_output=1; break; case 'T': o->index_stats=1; break; #if 0 case 'C': copyright_notice(); o->index_mode=IM_DONE; break; #endif case 'R': o->sgrep->recurse_dirs=1; sgrep_error(sgrep,"WARNING -R not working (yet)\n"); break; case 'c': o->file_name=get_arg(sgrep,&argv,&i,&j); if (o->file_name==NULL) return SGREP_ERROR; o->index_mode=IM_CREATE; break; case 'x': o->sgrep->index_file=get_arg(sgrep,&argv,&i,&j); if (o->sgrep->index_file==NULL) return SGREP_ERROR; break; case 'q': { const char *arg=get_arg(sgrep,&argv,&i,&j); if (strcmp(arg,"terms")==0) { o->index_mode=IM_TERMS; } else { sgrep_error(sgrep,"Don't know how to query '%s'\n", arg); return SGREP_ERROR; } break; } case 'F': { char *arg; arg=get_arg(sgrep,&argv,&i,&j); if (arg==NULL) return SGREP_ERROR; if (o->file_list_files==NULL) { o->file_list_files=new_flist(sgrep); } flist_add(o->file_list_files,arg); break; } case 'w': o->sgrep->word_chars=get_arg(sgrep,&argv,&i,&j); if (!o->sgrep->word_chars) return SGREP_ERROR; break; default: sgrep_error(sgrep,"Illegal option -%c\n",(*argv)[j]); return SGREP_ERROR; break; } if ((*argv)[++j]==0) { argv++; i++; j=1; } } return i; }