void BaseLearner::initLearningOptions(const nor_utils::Args& args) { if ( args.hasArgument("verbose") ) args.getValue("verbose", 0, _verbose); // Set the value of theta if ( args.hasArgument("edgeoffset") ) args.getValue("edgeoffset", 0, _theta); }
VJCascadeClassifier::VJCascadeClassifier(const nor_utils::Args &args, int verbose) : _verbose(verbose), _args(args), _positiveLabelIndex(-1) { // The file with the step-by-step information if ( args.hasArgument("outputinfo") ) args.getValue("outputinfo", 0, _outputInfoFile); if ( args.hasArgument("positivelabel") ) { args.getValue("positivelabel", 0, _positiveLabelName); } else { cout << "The name of positive label has to be given!!!" << endl; exit(-1); } }
void FilterBoostLearner::getArgs(const nor_utils::Args& args) { AdaBoostMHLearner::getArgs( args ); // Set the value of the sample size if ( args.hasArgument("Cn") ) { args.getValue("Cn", 0, _Cn); if (_verbose > 1) cout << "--> Resampling size: " << _Cn << endl; } if ( args.hasArgument("onlinetraining") ) { _onlineWeakLearning = true; } }
MDDAGClassifier::MDDAGClassifier(const nor_utils::Args &args, int verbose) : _verbose(verbose), _args(args) { // The file with the step-by-step information if ( args.hasArgument("outputinfo") ) args.getValue("outputinfo", 0, _outputInfoFile); }
//---------------------------------------------------------------- //---------------------------------------------------------------- void Exp3::initLearningOptions(const nor_utils::Args& args) { if ( args.hasArgument( "gamma" ) ){ _gamma = args.getValue<double>("gamma", 0 ); } }
void StochasticLearner::initLearningOptions(const nor_utils::Args& args) { BaseLearner::initLearningOptions(args); if (args.hasArgument("initgamma")) args.getValue("initgamma", 0, _initialGammat); if (args.hasArgument("gammdivperiod")) args.getValue("gammdivperiod", 0, _gammdivperiod); if (args.hasArgument("graditer")) args.getValue("graditer", 0, _maxIter); if (args.hasArgument("gradmethod")) { string gradMethod; args.getValue("gradmethod", 0, gradMethod); if ( gradMethod.compare( "sgd" ) == 0 ) _gMethod = OPT_SGD; else if ( gradMethod.compare( "bgd" ) == 0 ) _gMethod = OPT_BGD; else { cerr << "SigmoidSingleStumpLearner::Unknown update gradient method" << endl; exit( -1 ); } } if (args.hasArgument("tfunc")) { string targetFunction; args.getValue("tfunc", 0, targetFunction); if ( targetFunction.compare( "exploss" ) == 0 ) _tFunction = TF_EXPLOSS; else if ( targetFunction.compare( "edge" ) == 0 ) _tFunction = TF_EDGE; else { cerr << "SigmoidSingleStumpLearner::Unknown target function" << endl; exit( -1 ); } } }
void EnumLearnerSA::initLearningOptions(const nor_utils::Args& args) { BaseLearner::initLearningOptions(args); if ( args.hasArgument( "uoffset" ) ) args.getValue("uoffset", 0, _uOffset); }
void FeaturewiseLearner::initLearningOptions(const nor_utils::Args& args) { AbstainableLearner::initLearningOptions(args); _maxNumOfDimensions = numeric_limits<int>::max(); // If the sampling is required if ( args.hasArgument("rsample") ) _maxNumOfDimensions = args.getValue<int>("rsample", 0); }
void MultiMDDAGLearner::getArgs(const nor_utils::Args& args) { MDDAGLearner::getArgs(args); // Set the value of theta if ( args.hasArgument("updateperc") ) args.getValue("updateperc", 0, _randomNPercent); }
void ParasiteLearner::initLearningOptions(const nor_utils::Args& args) { BaseLearner::initLearningOptions(args); args.getValue("pool", 0, _nameBaseLearnerFile); args.getValue("pool", 1, _numBaseLearners); if ( args.hasArgument("closed") ) _closed = 1; }
void FilterBoostLearner::doConfusionMatrix(const nor_utils::Args& args) { FilterBoostClassifier classifier(args, _verbose); // -cmatrix <dataFile> <shypFile> if ( args.hasArgument("cmatrix") ) { string testFileName = args.getValue<string>("cmatrix", 0); string shypFileName = args.getValue<string>("cmatrix", 1); classifier.printConfusionMatrix(testFileName, shypFileName); } // -cmatrixfile <dataFile> <shypFile> <outFile> else if ( args.hasArgument("cmatrixfile") ) { string testFileName = args.getValue<string>("cmatrix", 0); string shypFileName = args.getValue<string>("cmatrix", 1); string outResFileName = args.getValue<string>("cmatrix", 2); classifier.saveConfusionMatrix(testFileName, shypFileName, outResFileName); } }
int MultiMDDAGLearner::resumeProcess(const nor_utils::Args& args, InputData* pTestData) { int numPolicies = 0; AlphaReal policyAlpha = 0.0; if ( args.hasArgument("policyalpha") ) args.getValue("policyalpha", 0, policyAlpha); _policy = new AdaBoostArrayOfPolicyArray(args, _actionNumber); return numPolicies; }
void AbstainableLearner::initLearningOptions(const nor_utils::Args& args) { BaseLearner::initLearningOptions(args); // set abstention if ( args.hasArgument("abstention") ) { string abstType = args.getValue<string>("abstention", 0); if (abstType == "greedy") _abstention = ABST_GREEDY; else if (abstType == "full") _abstention = ABST_FULL; else if (abstType == "real") _abstention = ABST_REAL; else if (abstType == "classwise") _abstention = ABST_CLASSWISE; else { cerr << "ERROR: Invalid type of abstention <" << abstType << ">!!" << endl; exit(1); } } }
void TreeLearnerUCT::initLearningOptions(const nor_utils::Args& args) { BaseLearner::initLearningOptions(args); string baseLearnerName; args.getValue("baselearnertype", 0, baseLearnerName); args.getValue("baselearnertype", 1, _numBaseLearners); // get the registered weak learner (type from name) BaseLearner* pWeakHypothesisSource = BaseLearner::RegisteredLearners().getLearner(baseLearnerName); for( int ib = 0; ib < _numBaseLearners; ++ib ) { _baseLearners.push_back(pWeakHypothesisSource->create()); _baseLearners[ib]->initLearningOptions(args); vector< int > tmpVector( 2, -1 ); _idxPairs.push_back( tmpVector ); } string updateRule = ""; if ( args.hasArgument( "updaterule" ) ) args.getValue("updaterule", 0, updateRule ); if ( updateRule.compare( "edge" ) == 0 ) _updateRule = EDGE_SQUARE; else if ( updateRule.compare( "alphas" ) == 0 ) _updateRule = ALPHAS; else if ( updateRule.compare( "edgesquare" ) == 0 ) _updateRule = ESQUARE; else { cerr << "Unknown update rule in ProductLearnerUCT (set to default [edge]" << endl; _updateRule = EDGE_SQUARE; } }
void SoftCascadeLearner::getArgs(const nor_utils::Args& args) { if ( args.hasArgument("verbose") ) args.getValue("verbose", 0, _verbose); /////////////////////////////////////////////////// // get the output strong hypothesis file name, if given if ( args.hasArgument("shypname") ) args.getValue("shypname", 0, _shypFileName); else _shypFileName = string(SHYP_NAME); _shypFileName = nor_utils::addAndCheckExtension(_shypFileName, SHYP_EXTENSION); /////////////////////////////////////////////////// //TODO : create an abstract classe for cascade compliant base learners and accept only its offspring! // get the name of the learner _baseLearnerName = defaultLearner; if ( args.hasArgument("learnertype") ) args.getValue("learnertype", 0, _baseLearnerName); // cout << "! Only HaarSingleStumpeLearner is allowed.\n"; // -train <dataFile> <nInterations> if ( args.hasArgument("train") ) { args.getValue("train", 0, _trainFileName); args.getValue("train", 1, _numIterations); } // -traintest <trainingDataFile> <testDataFile> <nInterations> else if ( args.hasArgument("traintest") ) { args.getValue("traintest", 0, _trainFileName); args.getValue("traintest", 1, _testFileName); args.getValue("traintest", 2, _numIterations); } // The file with the step-by-step information if ( args.hasArgument("outputinfo") ) args.getValue("outputinfo", 0, _outputInfoFile); else _outputInfoFile = OUTPUT_NAME; // --constant: check constant learner in each iteration if ( args.hasArgument("constant") ) _withConstantLearner = true; if ( args.hasArgument("positivelabel") ) { args.getValue("positivelabel", 0, _positiveLabelName); } else { cout << "Error : The name of positive label must to given. \n Type --h softcascade to know the mandatory options." << endl; exit(-1); } if (args.hasArgument("trainposteriors")) { args.getValue("trainposteriors", 0, _trainPosteriorsFileName); } if (args.hasArgument("testposteriors")) { args.getValue("testposteriors", 0, _testPosteriorsFileName); } if (args.hasArgument("detectionrate")) { args.getValue("detectionrate", 0, _targetDetectionRate); } else { cout << "Error : the target detection rate must be given. \n Type --h softcascade to know the mandatory options."; exit(-1); } if (args.hasArgument("expalpha")) { args.getValue("expalpha", 0, _alphaExponentialParameter); } else { cout << "Error : the parameter used to initialize the rejection distribution vector must be given. \n Type --h softcascade to know the mandatory options."; exit(-1); } if (args.hasArgument("calibrate")) { args.getValue("calibrate", 0, _unCalibratedShypFileName); if (args.getNumValues("calibrate") > 1) { args.getValue("calibrate", 0, _inShypLimit); } } else { _fullRun = true; _unCalibratedShypFileName = "shypToBeCalibrated.xml"; cout << "The strong hypothesis file will be seved into the file " << _unCalibratedShypFileName; //cout << "Error : the shyp file of the uncalibrated trained classifier must be given ! \n"; //exit(-1); } if (args.hasArgument("bootstrap")) { cout << "Warning ! The bootstrapping set and the training set must come from the same superset. \n"; args.getValue("bootstrap", 0, _bootstrapFileName); args.getValue("bootstrap", 1, _bootstrapRate); } }
// ----------------------------------------------------------------------- void BanditLearner::initLearningOptions(const nor_utils::Args& args) { BaseLearner::initLearningOptions(args); string updateRule = ""; if ( args.hasArgument( "updaterule" ) ) args.getValue("updaterule", 0, updateRule ); if ( updateRule.compare( "edge" ) == 0 ) _updateRule = EDGE_SQUARE; else if ( updateRule.compare( "logedge" ) == 0 ) _updateRule = LOGEDGE; else if ( updateRule.compare( "alphas" ) == 0 ) _updateRule = ALPHAS; else if ( updateRule.compare( "edgesquare" ) == 0 ) _updateRule = ESQUARE; else { //cerr << "Unknown update rule in ProductLearnerUCT (set to default [edge]" << endl; _updateRule = LOGEDGE; } if ( args.hasArgument( "rsample" ) ){ _K = args.getValue<int>("rsample", 0); } string banditAlgoName = ""; if ( args.hasArgument( "banditalgo" ) ) args.getValue("banditalgo", 0, banditAlgoName ); if ( banditAlgoName.compare( "Random" ) == 0 ) _banditAlgoName = BA_RANDOM_LS; else if ( banditAlgoName.compare( "UCBK" ) == 0 ) _banditAlgoName = BA_UCBK_LS; else if ( banditAlgoName.compare( "UCBKR" ) == 0 ) _banditAlgoName = BA_UCBKR_LS; else if ( banditAlgoName.compare( "UCBKV" ) == 0 ) _banditAlgoName = BA_UCBKV_LS; else if ( banditAlgoName.compare( "EXP3" ) == 0 ) _banditAlgoName = BA_EXP3_LS; else if ( banditAlgoName.compare( "EXP3G" ) == 0 ) _banditAlgoName = BA_EXP3G_LS; else if ( banditAlgoName.compare( "UCT" ) == 0 ) _banditAlgoName = BA_UCT_LS; else { cerr << "Unknown bandit algo (BanditSingleStumpLearner)" << endl; _banditAlgoName = BA_EXP3_LS; } if ( _banditAlgo == NULL ) { switch ( _banditAlgoName ) { case BA_RANDOM_LS: //_banditAlgo = new Random(); break; case BA_UCBK_LS: //_banditAlgo = new UCBK(); break; case BA_UCBKV_LS: //_banditAlgo = new UCBKV(); break; case BA_UCBKR_LS: //_banditAlgo = new UCBKRandomized(); break; case BA_EXP3_LS: _banditAlgo = dynamic_cast<GenericBanditAlgorithmLS<double,string>*>( new Exp3LS<double,string>()); break; case BA_EXP3G_LS: _banditAlgo = dynamic_cast<GenericBanditAlgorithmLS<double,string>*>(new Exp3GLS<double,string>()); break; case BA_UCT_LS: _banditAlgo = dynamic_cast<GenericBanditAlgorithmLS<double,string>*>(new UCT<double,string>()); break; default: cerr << "There is no bandit algorithm to be given!" << endl; exit( -1 ); } } }
void FilterBoostLearner::getArgs(const nor_utils::Args& args) { if ( args.hasArgument("verbose") ) args.getValue("verbose", 0, _verbose); // The file with the step-by-step information if ( args.hasArgument("outputinfo") ) args.getValue("outputinfo", 0, _outputInfoFile); /////////////////////////////////////////////////// // get the output strong hypothesis file name, if given if ( args.hasArgument("shypname") ) args.getValue("shypname", 0, _shypFileName); else _shypFileName = string(SHYP_NAME); _shypFileName = nor_utils::addAndCheckExtension(_shypFileName, SHYP_EXTENSION); /////////////////////////////////////////////////// // get the output strong hypothesis file name, if given if ( args.hasArgument("shypcomp") ) args.getValue("shypcomp", 0, _isShypCompressed ); else _isShypCompressed = false; /////////////////////////////////////////////////// // Set time limit if ( args.hasArgument("timelimit") ) { args.getValue("timelimit", 0, _maxTime); if (_verbose > 1) cout << "--> Overall Time Limit: " << _maxTime << " minutes" << endl; } // Set the value of theta if ( args.hasArgument("edgeoffset") ) args.getValue("edgeoffset", 0, _theta); // Set the filename of the strong hypothesis file in the case resume is // called if ( args.hasArgument("resume") ) args.getValue("resume", 0, _resumeShypFileName); // get the name of the learner _baseLearnerName = defaultLearner; if ( args.hasArgument("learnertype") ) args.getValue("learnertype", 0, _baseLearnerName); // -train <dataFile> <nInterations> if ( args.hasArgument("train") ) { args.getValue("train", 0, _trainFileName); args.getValue("train", 1, _numIterations); } // -traintest <trainingDataFile> <testDataFile> <nInterations> else if ( args.hasArgument("traintest") ) { args.getValue("traintest", 0, _trainFileName); args.getValue("traintest", 1, _testFileName); args.getValue("traintest", 2, _numIterations); } // --constant: check constant learner in each iteration if ( args.hasArgument("constant") ) _withConstantLearner = true; // Set the value of the sample size if ( args.hasArgument("Cn") ) { args.getValue("Cn", 0, _Cn); if (_verbose > 1) cout << "--> Resampling size: " << _Cn << endl; } }
void BanditSingleStumpLearner::initLearningOptions(const nor_utils::Args& args) { FeaturewiseLearner::initLearningOptions(args); string updateRule = ""; if ( args.hasArgument( "updaterule" ) ) args.getValue("updaterule", 0, updateRule ); if ( updateRule.compare( "edge" ) == 0 ) _updateRule = EDGE_SQUARE; else if ( updateRule.compare( "logedge" ) == 0 ) _updateRule = LOGEDGE; else if ( updateRule.compare( "alphas" ) == 0 ) _updateRule = ALPHAS; else if ( updateRule.compare( "edgesquare" ) == 0 ) _updateRule = ESQUARE; else { //cerr << "Unknown update rule in ProductLearnerUCT (set to default [logedge]" << endl; _updateRule = LOGEDGE; } if ( args.hasArgument( "percent" ) ){ _percentage = args.getValue<double>("percent", 0); } else { _percentage = 0.1; } if ( args.hasArgument( "rsample" ) ){ _K = args.getValue<int>("rsample", 0); } else { _K = 1; } string banditAlgoName = ""; if ( args.hasArgument( "banditalgo" ) ) args.getValue("banditalgo", 0, banditAlgoName ); if ( banditAlgoName.compare( "Random" ) == 0 ) _banditAlgoName = BA_RANDOM; else if ( banditAlgoName.compare( "UCBK" ) == 0 ) _banditAlgoName = BA_UCBK; else if ( banditAlgoName.compare( "UCBKR" ) == 0 ) _banditAlgoName = BA_UCBKR; else if ( banditAlgoName.compare( "UCBKV" ) == 0 ) _banditAlgoName = BA_UCBKV; else if ( banditAlgoName.compare( "EXP3" ) == 0 ) _banditAlgoName = BA_EXP3; else if ( banditAlgoName.compare( "EXP3G" ) == 0 ) _banditAlgoName = BA_EXP3G; else if ( banditAlgoName.compare( "EXP3G2" ) == 0 ) _banditAlgoName = BA_EXP3G2; else if ( banditAlgoName.compare( "EXP3P" ) == 0 ) _banditAlgoName = BA_EXP3P; else { cerr << "Unknown bandit algo (BanditSingleStumpLearner)" << endl; _banditAlgoName = BA_UCBK; } if ( _banditAlgo == NULL ) { switch ( _banditAlgoName ) { case BA_RANDOM: _banditAlgo = new Random(); break; case BA_UCBK: _banditAlgo = new UCBK(); break; case BA_UCBKV: _banditAlgo = new UCBKV(); break; case BA_UCBKR: _banditAlgo = new UCBKRandomized(); break; case BA_EXP3: _banditAlgo = new Exp3(); break; case BA_EXP3G: _banditAlgo = new Exp3G(); break; case BA_EXP3G2: _banditAlgo = new Exp3G2(); break; case BA_EXP3P: _banditAlgo = new Exp3P(); break; default: cerr << "There is no bandit algorithm to be given!" << endl; exit( -1 ); } // the bandit algorithm object must be initilaized once only _banditAlgo->initLearningOptions( args ); } }