Пример #1
0
	void StochasticLearner::declareArguments(nor_utils::Args& args)
	{
		BaseLearner::declareArguments(args);
		args.declareArgument("graditer",
							 "Declares the number of randomly drawn training size for SGD"
							 "whereas it declares the number of iteration for the Batch Gradiend Descend"							 
							 " size <num> of training set. "
							 "Example: --graditer 50 -> Uses only 50 randomly chosen training instance",
							 1, "<num>");
		
		args.declareArgument("gradmethod",
							 "Declares the gradient method: "
							 " (sgd) Stochastic Gradient Descent, (bgd) Batch Gradient Descent"
							 "Example: --gradmethod sgd -> Uses stochastic gradient method",
							 1, "<method>");
		
		args.declareArgument("tfunc",
							 "Target function: "
							 "exploss: Exponential Loss, edge: max. edge"
							 "Example: --tfunc exploss -> Uses exponantial loss for minimizing",
							 1, "<function>");
		
		args.declareArgument("initgamma",
							 "The initial learning rate in gradient descent"
							 "Default values is 10.0",
							 1, "<gamma>");
		
		args.declareArgument("gammdivperiod",
							 "The periodicity of decreasing the learning rate \\gamma"
							 "Default values is 1",
							 1, "<period>");
		
				
	}
Пример #2
0
	void BanditSingleStumpLearner::declareArguments(nor_utils::Args& args)
	{		
		FeaturewiseLearner::declareArguments(args);
		
		args.declareArgument("updaterule", 
			"The update weights in the UCT can be the 1-sqrt( 1- edge^2 ) [edge]\n"
			"  or the alpha [alphas]\n"
			"  Default is the first one\n",
			1, "<type>");

		args.declareArgument("rsample", 
			"Number of features to be considered\n"
			"  Default is one\n",
			1, "<K>");

		args.declareArgument("banditalgo", 
			"The bandit algorithm (UCBK, UCBKRandomized, EXP3 )\n"
			"Default is UCBK\n",
			1, "<algoname>");

		args.declareArgument("percent", 
			"The percent of database will be used for estimating the payoffs(EXP3G)\n"
			"  Default is 10%\n",
			1, "<p>");

	}
Пример #3
0
void ParasiteLearner::declareArguments(nor_utils::Args& args)
{
   BaseLearner::declareArguments(args);

   args.declareArgument("pool", 
                        "The name of the shyp file containing the pool of\n"
                        "  weak learners, followed by the number of desired\n"
                        "  weak learners. If -1 or more than the number of \n"
                        "  weak learners, we use all of them",
                        2, "<fileName> <nBaseLearners>");
         
   args.declareArgument("closed", "Include negatives of weak learners (default = false).");

}
Пример #4
0
void TreeLearnerUCT::declareArguments(nor_utils::Args& args)
{
    BaseLearner::declareArguments(args);

    args.declareArgument("baselearnertype",
                         "The name of the learner that serves as a basis for the product\n"
                         "  and the number of base learners to be multiplied\n"
                         "  Don't forget to add its parameters\n",
                         2, "<baseLearnerType> <numBaseLearners>");

    args.declareArgument("updaterule",
                         "The update weights in the UCT can be the 1-sqrt( 1- edge^2 ) [edge]\n"
                         "  or the alpha [alphas]\n"
                         "  or edgesquare [edgesquare]\n"
                         "  Default is the first one\n",
                         1, "<type>");

}
Пример #5
0
		void EnumLearnerSA::declareArguments(nor_utils::Args& args)
	{
		BaseLearner::declareArguments(args);

		args.declareArgument("uoffset", 
			"The offset of u\n",
			1, "<offset>");

	}
Пример #6
0
	void BaseLearner::declareBaseArguments(nor_utils::Args& args)
	{
		args.declareArgument("shypname", 
			"The name of output strong hypothesis (default: "
			+ string(SHYP_NAME) + "." + string(SHYP_EXTENSION) + ").", 
			1, "<filename>");

		args.declareArgument("shypcomp", 
			"The shyp file will be compressed", 
			1, "<flag 0-1>");

		args.setGroup("Basic Algorithm Options");
		args.declareArgument("resume", 
			"Resumes a training process using the strong hypothesis file.", 
			1, "<shypFile>");   
		args.declareArgument("edgeoffset", 
			"Defines the value of the edge offset (theta) (default: no edge offset).", 
			1, "<val>");        
	}
Пример #7
0
		void BanditTreeLearner::declareArguments(nor_utils::Args& args)
	{
		BanditLearner::declareArguments(args);

		args.declareArgument("baselearnertype", 
			"The name of the learner that serves as a basis for the product\n"
			"  and the number of base learners to be multiplied\n"
			"  Don't forget to add its parameters\n",
			2, "<baseLearnerType> <numBaseLearners>");
	}
Пример #8
0
	void FeaturewiseLearner::declareArguments(nor_utils::Args& args)
	{
		AbstainableLearner::declareArguments(args);
		
		args.declareArgument("rsample",
							 "Instead of searching for a featurewise in all the possible dimensions (features), select a set of "
							 " size <num> of random dimensions. "
							 "Example: -rsample 50 -> Search over only 50 dimensions"
							 "(Turned off for Haar: use -csample instead)",
							 1, "<num>");
		
	}
Пример #9
0
	void AbstainableLearner::declareArguments(nor_utils::Args& args)
	{
		BaseLearner::declareArguments(args);
		
		args.declareArgument("abstention", 
							 "Activate the abstention. Available types are:\n"
							 "  greedy: sorting and checking in O(k^2)\n"
							 "  full: the O(2^k) full search\n"
							 "  real: use the AdaBoost.MH with real valued predictions\n"
							 "  classwise: abstain if classwise edge <= theta",
							 1, "<type>");
	}