예제 #1
0
	void BanditSingleStumpLearner::declareArguments(nor_utils::Args& args)
	{		
		FeaturewiseLearner::declareArguments(args);
		
		args.declareArgument("updaterule", 
			"The update weights in the UCT can be the 1-sqrt( 1- edge^2 ) [edge]\n"
			"  or the alpha [alphas]\n"
			"  Default is the first one\n",
			1, "<type>");

		args.declareArgument("rsample", 
			"Number of features to be considered\n"
			"  Default is one\n",
			1, "<K>");

		args.declareArgument("banditalgo", 
			"The bandit algorithm (UCBK, UCBKRandomized, EXP3 )\n"
			"Default is UCBK\n",
			1, "<algoname>");

		args.declareArgument("percent", 
			"The percent of database will be used for estimating the payoffs(EXP3G)\n"
			"  Default is 10%\n",
			1, "<p>");

	}
예제 #2
0
	void StochasticLearner::declareArguments(nor_utils::Args& args)
	{
		BaseLearner::declareArguments(args);
		args.declareArgument("graditer",
							 "Declares the number of randomly drawn training size for SGD"
							 "whereas it declares the number of iteration for the Batch Gradiend Descend"							 
							 " size <num> of training set. "
							 "Example: --graditer 50 -> Uses only 50 randomly chosen training instance",
							 1, "<num>");
		
		args.declareArgument("gradmethod",
							 "Declares the gradient method: "
							 " (sgd) Stochastic Gradient Descent, (bgd) Batch Gradient Descent"
							 "Example: --gradmethod sgd -> Uses stochastic gradient method",
							 1, "<method>");
		
		args.declareArgument("tfunc",
							 "Target function: "
							 "exploss: Exponential Loss, edge: max. edge"
							 "Example: --tfunc exploss -> Uses exponantial loss for minimizing",
							 1, "<function>");
		
		args.declareArgument("initgamma",
							 "The initial learning rate in gradient descent"
							 "Default values is 10.0",
							 1, "<gamma>");
		
		args.declareArgument("gammdivperiod",
							 "The periodicity of decreasing the learning rate \\gamma"
							 "Default values is 1",
							 1, "<period>");
		
				
	}
예제 #3
0
MDDAGClassifier::MDDAGClassifier(const nor_utils::Args &args, int verbose)
    : _verbose(verbose), _args(args)
{
    // The file with the step-by-step information
    if ( args.hasArgument("outputinfo") )
        args.getValue("outputinfo", 0, _outputInfoFile);
}
예제 #4
0
	void TreeLearner::initLearningOptions(const nor_utils::Args& args)
	{
		BaseLearner::initLearningOptions(args);
		
		string baseLearnerName;
		args.getValue("baselearnertype", 0, baseLearnerName);   
		args.getValue("baselearnertype", 1, _numBaseLearners);   
		
		// get the registered weak learner (type from name)
		BaseLearner* pWeakHypothesisSource = BaseLearner::RegisteredLearners().getLearner(baseLearnerName);
		
		//check whether the weak learner is a ScalarLeaerner
		try {
			_pScalaWeakHypothesisSource = dynamic_cast<ScalarLearner*>(pWeakHypothesisSource);
		}
		catch (bad_cast& e) {
			cerr << "The weak hypothesis must be a ScalarLearner!!!" << endl;
			exit(-1);
		}
		
		_pScalaWeakHypothesisSource->initLearningOptions(args);
		
		/*
		 for( int ib = 0; ib < _numBaseLearners; ++ib ) {			
		 vector< int > tmpVector( 2, -1 );
		 _idxPairs.push_back( tmpVector );
		 }
		 */
	}
예제 #5
0
	void EnumLearnerSA::initLearningOptions(const nor_utils::Args& args)
	{
		BaseLearner::initLearningOptions(args);

		if ( args.hasArgument( "uoffset" ) )  
			args.getValue("uoffset", 0, _uOffset);   

	}
예제 #6
0
void MultiMDDAGLearner::getArgs(const nor_utils::Args& args)
{
    MDDAGLearner::getArgs(args);

    // Set the value of theta
    if ( args.hasArgument("updateperc") )
        args.getValue("updateperc", 0, _randomNPercent);

}
예제 #7
0
	void BaseLearner::initLearningOptions(const nor_utils::Args& args)
	{
		if ( args.hasArgument("verbose") )
			args.getValue("verbose", 0, _verbose);

		// Set the value of theta
		if ( args.hasArgument("edgeoffset") )
			args.getValue("edgeoffset", 0, _theta);   
	}
예제 #8
0
void ParasiteLearner::initLearningOptions(const nor_utils::Args& args)
{
   BaseLearner::initLearningOptions(args);

   args.getValue("pool", 0, _nameBaseLearnerFile);   
   args.getValue("pool", 1, _numBaseLearners);   

   if ( args.hasArgument("closed") )
      _closed = 1;
}
예제 #9
0
	void FilterBoostLearner::getArgs(const nor_utils::Args& args)
	{
		AdaBoostMHLearner::getArgs( args );
		// Set the value of the sample size
		if ( args.hasArgument("Cn") )
		{
			args.getValue("C", 0, _Cn);
			if (_verbose > 1)
				cout << "--> Resampling size: " << _Cn << endl;
		}

	}
예제 #10
0
int MultiMDDAGLearner::resumeProcess(const nor_utils::Args& args, InputData* pTestData)
{
    int numPolicies = 0;

    AlphaReal policyAlpha = 0.0;

    if ( args.hasArgument("policyalpha") )
        args.getValue("policyalpha", 0, policyAlpha);

    _policy = new AdaBoostArrayOfPolicyArray(args, _actionNumber);

    return numPolicies;
}
예제 #11
0
 void SoftCascadeLearner::classify(const nor_utils::Args& args)
 {
     SoftCascadeClassifier classifier(args, _verbose);
             
     string testFileName = args.getValue<string>("test", 0);
     string shypFileName = args.getValue<string>("test", 1);
     int numIterations = args.getValue<int>("test", 2);
             
     string outResFileName = "";
     if ( args.getNumValues("test") > 3 )
         args.getValue("test", 3, outResFileName);
             
     classifier.run(testFileName, shypFileName, numIterations, outResFileName);
 }
예제 #12
0
void ParasiteLearner::declareArguments(nor_utils::Args& args)
{
   BaseLearner::declareArguments(args);

   args.declareArgument("pool", 
                        "The name of the shyp file containing the pool of\n"
                        "  weak learners, followed by the number of desired\n"
                        "  weak learners. If -1 or more than the number of \n"
                        "  weak learners, we use all of them",
                        2, "<fileName> <nBaseLearners>");
         
   args.declareArgument("closed", "Include negatives of weak learners (default = false).");

}
예제 #13
0
	void FilterBoostLearner::classify(const nor_utils::Args& args)
	{
		FilterBoostClassifier classifier(args, _verbose);

		// -test <dataFile> <shypFile>
		string testFileName = args.getValue<string>("test", 0);
		string shypFileName = args.getValue<string>("test", 1);
		int numIterations = args.getValue<int>("test", 2);

		string outResFileName;
		if ( args.getNumValues("test") > 3 )
			args.getValue("test", 3, outResFileName);

		classifier.run(testFileName, shypFileName, numIterations, outResFileName);
	}
예제 #14
0
	VJCascadeClassifier::VJCascadeClassifier(const nor_utils::Args &args, int verbose)
	: _verbose(verbose), _args(args), _positiveLabelIndex(-1)
	{
		// The file with the step-by-step information
		if ( args.hasArgument("outputinfo") )
			args.getValue("outputinfo", 0, _outputInfoFile);
		
		if ( args.hasArgument("positivelabel") )
		{
			args.getValue("positivelabel", 0, _positiveLabelName);
		} else {
			cout << "The name of positive label has to be given!!!" << endl;
			exit(-1);
		}				
	}
예제 #15
0
파일: Exp3.cpp 프로젝트: junjiek/cmu-exp
//----------------------------------------------------------------
//----------------------------------------------------------------
    void Exp3::initLearningOptions(const nor_utils::Args& args) 
    {
        if ( args.hasArgument( "gamma" ) ){
            _gamma = args.getValue<double>("gamma", 0 );
        } 

    }
예제 #16
0
파일: main.cpp 프로젝트: busarobi/MDDAG2
/**
 * Show the help. Called when -h argument is provided.
 * \date 11/11/2005
 */
void showHelp(nor_utils::Args& args, const vector<string>& learnersList)
{
	cout << "MultiBoost (v" << CURRENT_VERSION << "). An obvious name for a multi-class AdaBoost learner." << endl;
	cout << "------------------------ HELP SECTION --------------------------" << endl;
	
	args.printGroup("Parameters");
	
	cout << endl;
	cout << "For specific help options type:" << endl;
	cout << "   --h general: General options" << endl;
	cout << "   --h io: I/O options" << endl;
	cout << "   --h algo: Basic algorithm options" << endl;
	cout << "   --h bandits: Bandit algorithm options" << endl;
	cout << "   --h vjcascade: Viola-Jones Cascade options" << endl;
	cout << "   --h softcascade: Soft Cascade options" << endl;

	cout << endl;
	cout << "For weak learners specific options type:" << endl;
	
	vector<string>::const_iterator it;
	for (it = learnersList.begin(); it != learnersList.end(); ++it)
		cout << "   --h " << *it << endl;
	
	exit(0);
}
예제 #17
0
	void StochasticLearner::initLearningOptions(const nor_utils::Args& args)
	{
		BaseLearner::initLearningOptions(args);
		
		if (args.hasArgument("initgamma"))
			args.getValue("initgamma", 0, _initialGammat);   		
		
		if (args.hasArgument("gammdivperiod"))
			args.getValue("gammdivperiod", 0, _gammdivperiod);   		
		
		
		if (args.hasArgument("graditer"))
			args.getValue("graditer", 0, _maxIter);   		
		
		if (args.hasArgument("gradmethod"))
		{
			string gradMethod;
			args.getValue("gradmethod", 0, gradMethod);   		
			
			if ( gradMethod.compare( "sgd" ) == 0 )
				_gMethod = OPT_SGD;
			else if ( gradMethod.compare( "bgd" ) == 0 )
				_gMethod = OPT_BGD;
			else {
				cerr << "SigmoidSingleStumpLearner::Unknown update gradient method" << endl;
				exit( -1 );
			}					
		}		
		
		if (args.hasArgument("tfunc"))
		{
			string targetFunction;
			args.getValue("tfunc", 0, targetFunction);
			
			if ( targetFunction.compare( "exploss" ) == 0 )
				_tFunction = TF_EXPLOSS;
			else if ( targetFunction.compare( "edge" ) == 0 )
				_tFunction = TF_EDGE;
			else {
				cerr << "SigmoidSingleStumpLearner::Unknown target function" << endl;
				exit( -1 );				
			}					
			
		}
		
	}
예제 #18
0
		void EnumLearnerSA::declareArguments(nor_utils::Args& args)
	{
		BaseLearner::declareArguments(args);

		args.declareArgument("uoffset", 
			"The offset of u\n",
			1, "<offset>");

	}
예제 #19
0
	void FeaturewiseLearner::initLearningOptions(const nor_utils::Args& args)
	{
		AbstainableLearner::initLearningOptions(args);
		_maxNumOfDimensions = numeric_limits<int>::max();
		
		// If the sampling is required
		if ( args.hasArgument("rsample") )
			_maxNumOfDimensions = args.getValue<int>("rsample", 0);
	}
예제 #20
0
void TreeLearnerUCT::declareArguments(nor_utils::Args& args)
{
    BaseLearner::declareArguments(args);

    args.declareArgument("baselearnertype",
                         "The name of the learner that serves as a basis for the product\n"
                         "  and the number of base learners to be multiplied\n"
                         "  Don't forget to add its parameters\n",
                         2, "<baseLearnerType> <numBaseLearners>");

    args.declareArgument("updaterule",
                         "The update weights in the UCT can be the 1-sqrt( 1- edge^2 ) [edge]\n"
                         "  or the alpha [alphas]\n"
                         "  or edgesquare [edgesquare]\n"
                         "  Default is the first one\n",
                         1, "<type>");

}
예제 #21
0
	void ProductLearner::initLearningOptions(const nor_utils::Args& args)
	{
		BaseLearner::initLearningOptions(args);

		string baseLearnerName;
		args.getValue("baselearnertype", 0, baseLearnerName);   
		args.getValue("baselearnertype", 1, _numBaseLearners);   

		// get the registered weak learner (type from name)
		BaseLearner* pWeakHypothesisSource = 
			BaseLearner::RegisteredLearners().getLearner(baseLearnerName);
		pWeakHypothesisSource->initLearningOptions(args);

		for( int ib = 0; ib < _numBaseLearners; ++ib ) {
			_baseLearners.push_back(pWeakHypothesisSource->create());
			_baseLearners[ib]->initLearningOptions(args);
		}
	}
예제 #22
0
		void BanditTreeLearner::declareArguments(nor_utils::Args& args)
	{
		BanditLearner::declareArguments(args);

		args.declareArgument("baselearnertype", 
			"The name of the learner that serves as a basis for the product\n"
			"  and the number of base learners to be multiplied\n"
			"  Don't forget to add its parameters\n",
			2, "<baseLearnerType> <numBaseLearners>");
	}
예제 #23
0
	void BaseLearner::declareBaseArguments(nor_utils::Args& args)
	{
		args.declareArgument("shypname", 
			"The name of output strong hypothesis (default: "
			+ string(SHYP_NAME) + "." + string(SHYP_EXTENSION) + ").", 
			1, "<filename>");

		args.declareArgument("shypcomp", 
			"The shyp file will be compressed", 
			1, "<flag 0-1>");

		args.setGroup("Basic Algorithm Options");
		args.declareArgument("resume", 
			"Resumes a training process using the strong hypothesis file.", 
			1, "<shypFile>");   
		args.declareArgument("edgeoffset", 
			"Defines the value of the edge offset (theta) (default: no edge offset).", 
			1, "<val>");        
	}
예제 #24
0
	void AbstainableLearner::declareArguments(nor_utils::Args& args)
	{
		BaseLearner::declareArguments(args);
		
		args.declareArgument("abstention", 
							 "Activate the abstention. Available types are:\n"
							 "  greedy: sorting and checking in O(k^2)\n"
							 "  full: the O(2^k) full search\n"
							 "  real: use the AdaBoost.MH with real valued predictions\n"
							 "  classwise: abstain if classwise edge <= theta",
							 1, "<type>");
	}
예제 #25
0
	void FeaturewiseLearner::declareArguments(nor_utils::Args& args)
	{
		AbstainableLearner::declareArguments(args);
		
		args.declareArgument("rsample",
							 "Instead of searching for a featurewise in all the possible dimensions (features), select a set of "
							 " size <num> of random dimensions. "
							 "Example: -rsample 50 -> Search over only 50 dimensions"
							 "(Turned off for Haar: use -csample instead)",
							 1, "<num>");
		
	}
예제 #26
0
	void FilterBoostLearner::doConfusionMatrix(const nor_utils::Args& args)
	{
		FilterBoostClassifier classifier(args, _verbose);

		// -cmatrix <dataFile> <shypFile>
		if ( args.hasArgument("cmatrix") )
		{
			string testFileName = args.getValue<string>("cmatrix", 0);
			string shypFileName = args.getValue<string>("cmatrix", 1);

			classifier.printConfusionMatrix(testFileName, shypFileName);
		}
		// -cmatrixfile <dataFile> <shypFile> <outFile>
		else if ( args.hasArgument("cmatrixfile") )
		{
			string testFileName = args.getValue<string>("cmatrix", 0);
			string shypFileName = args.getValue<string>("cmatrix", 1);
			string outResFileName = args.getValue<string>("cmatrix", 2);

			classifier.saveConfusionMatrix(testFileName, shypFileName, outResFileName);
		}
	}
예제 #27
0
void TreeLearnerUCT::initLearningOptions(const nor_utils::Args& args)
{
    BaseLearner::initLearningOptions(args);

    string baseLearnerName;
    args.getValue("baselearnertype", 0, baseLearnerName);
    args.getValue("baselearnertype", 1, _numBaseLearners);

    // get the registered weak learner (type from name)
    BaseLearner* pWeakHypothesisSource =
        BaseLearner::RegisteredLearners().getLearner(baseLearnerName);

    for( int ib = 0; ib < _numBaseLearners; ++ib ) {
        _baseLearners.push_back(pWeakHypothesisSource->create());
        _baseLearners[ib]->initLearningOptions(args);

        vector< int > tmpVector( 2, -1 );
        _idxPairs.push_back( tmpVector );
    }

    string updateRule = "";
    if ( args.hasArgument( "updaterule" ) )
        args.getValue("updaterule", 0, updateRule );

    if ( updateRule.compare( "edge" ) == 0 )
        _updateRule = EDGE_SQUARE;
    else if ( updateRule.compare( "alphas" ) == 0 )
        _updateRule = ALPHAS;
    else if ( updateRule.compare( "edgesquare" ) == 0 )
        _updateRule = ESQUARE;
    else {
        cerr << "Unknown update rule in ProductLearnerUCT (set to default [edge]" << endl;
        _updateRule = EDGE_SQUARE;
    }

}
예제 #28
0
	void AdaBoostMHLearner::doPosteriors(const nor_utils::Args& args)
	{
		AdaBoostMHClassifier classifier(args, _verbose);
		int numofargs = args.getNumValues( "posteriors" );
		// -posteriors <dataFile> <shypFile> <outFile> <numIters>
		string testFileName = args.getValue<string>("posteriors", 0);
		string shypFileName = args.getValue<string>("posteriors", 1);
		string outFileName = args.getValue<string>("posteriors", 2);
		int numIterations = args.getValue<int>("posteriors", 3);
		int period = 0;
		
		if ( numofargs == 5 )
			period = args.getValue<int>("posteriors", 4);
		
		classifier.savePosteriors(testFileName, shypFileName, outFileName, numIterations, period);
	}
예제 #29
0
	void AbstainableLearner::initLearningOptions(const nor_utils::Args& args)
	{
		BaseLearner::initLearningOptions(args);
		
		// set abstention
		if ( args.hasArgument("abstention") )
		{
			string abstType = args.getValue<string>("abstention", 0);
			
			if (abstType == "greedy")
				_abstention = ABST_GREEDY;
			else if (abstType == "full")
				_abstention = ABST_FULL;
			else if (abstType == "real")
				_abstention = ABST_REAL;
			else if (abstType == "classwise")
				_abstention = ABST_CLASSWISE;
			else
			{
				cerr << "ERROR: Invalid type of abstention <" << abstType << ">!!" << endl;
				exit(1);
			}
		}
	}
예제 #30
0
파일: main.cpp 프로젝트: busarobi/MDDAG2
/**
 * Show the help for the options.
 * \param args The arguments structure.
 * \date 28/11/2005
 */
void showOptionalHelp(nor_utils::Args& args)
{
	string helpType = args.getValue<string>("h", 0);
	
	cout << "MultiBoost (v" << CURRENT_VERSION << "). An obvious name for a multi-class AdaBoost learner." << endl;
	cout << "---------------------------------------------------------------------------" << endl;
	
	if (helpType == "general")
		args.printGroup("General Options");
	else if (helpType == "io")
		args.printGroup("I/O Options");
	else if (helpType == "algo")
		args.printGroup("Basic Algorithm Options");
	else if (helpType == "bandits")
		args.printGroup("Bandit Algorithm Options");
	else if (helpType == "vjcascade")
		args.printGroup("Viola-Jones Cascade Algorithm Options");
	else if (helpType == "softcascade")
		args.printGroup("SoftCascade Algorithm Options");
	else if ( BaseLearner::RegisteredLearners().hasLearner(helpType) )
		args.printGroup(helpType + " Options");
	else
		cerr << "ERROR: Unknown help section <" << helpType << ">" << endl;
}