Ejemplo n.º 1
0
/*------------------------------------------------------------------*/
void AzTrTreeFeat::printHelp(AzHelp &h) const
{
  h.begin(Azforest_config, "AzTrTreeFeat"); 
  h.item_experimental(kw_doCountRules, help_doCountRules); 
  h.item_experimental(kw_doCheckConsistency, help_doCheckConsistency); 
  h.end(); 
}
Ejemplo n.º 2
0
/*------------------------------------------------*/
void AzRgforest::printHelp(AzHelp &h) const
{
    fs->printHelp(h);

    h.begin(Azforest_config, "AzRgforest", "Forest-wide control");
    h.item(kw_loss, help_loss, AzLoss::lossName(loss_type_dflt));
    AzDataPool<AzBytArr> pool_desc;
    AzLoss::help_lines(h.getLevel(), &pool_desc);
    int ix;
    for (ix = 0; ix < pool_desc.size(); ++ix) {
        h.writeln_desc(pool_desc.point(ix)->c_str());
    }
    h.item(kw_max_lnum, help_max_lnum, max_lnum_dflt);
    h.item_experimental(kw_max_tree_num, help_max_tree_num, "Don't care");
    h.item(kw_lnum_inc_opt, help_lnum_inc_opt, lnum_inc_opt_dflt);
    h.item(kw_lnum_inc_test, help_lnum_inc_test, lnum_inc_test_dflt);
    h.item(kw_s_tree_num, help_s_tree_num, s_tree_num_dflt);

    h.item_experimental(kw_temp_for_trees, help_temp_for_trees);
    h.item_experimental(kw_f_ratio, help_f_ratio);
    h.item_experimental(kw_doPassiveRoot, help_doPassiveRoot);
    h.end();

    reg_depth->printHelp(h);
    opt->printHelp(h);
    ens->printHelp(h);

    h.begin(Azforest_config, "AzRgforest", "Info display");
    h.item(kw_doTime, help_doTime);
    h.item(kw_beVerbose, help_beVerbose);
    h.item(kw_mem_policy, help_mem_policy, mp_not_beTight);
    h.end();
}
Ejemplo n.º 3
0
/*--------------------------------------------------------*/
void AzRgfTree::printHelp(AzHelp &h) const
{
  h.begin(Aztree_config, "AzRgfTree", "Tree-wise control"); 
  h.item(kw_min_size, help_min_size, min_size_dflt); 
  h.item_experimental(kw_max_depth, help_max_depth, "-1: Don't care"); 
  h.item_experimental(kw_max_leaf_num, help_max_leaf_num, "-1: Don't care"); 
  h.item_experimental(kw_doUseInternalNodes, help_doUseInternalNodes); 
  h.item_experimental(kw_tree_beVerbose, help_tree_beVerbose); 
  h.end(); 
}
Ejemplo n.º 4
0
/*--------------------------------------------------------*/
void AzOptOnTree::printHelp(AzHelp &h) const
{
  h.begin(Azopt_config, "AzOptOnTree"); 
  h.item_required_lvl(kw_lambda, help_lambda, 1); 
  h.item_experimental(kw_sigma, help_sigma, sigma_dflt); 
  h.item(kw_doUseAvg, help_doUseAvg); 
  AzBytArr s_dflt; 
  s_dflt.cn(max_ite_num_dflt_oth);  s_dflt.c(help_oth_loss); s_dflt.c("; "); 
  s_dflt.cn(max_ite_num_dflt_expo); s_dflt.c(help_expo_loss); 
  h.item(kw_max_ite_num, help_max_ite_num, s_dflt.c_str()); 
  h.item_experimental(kw_doIntercept, help_doIntercept); 
  h.item(kw_eta, help_eta, eta_dflt); 
  h.item_experimental(kw_exit_delta, help_exit_delta, exit_delta_dflt); 
  h.end(); 
}
Ejemplo n.º 5
0
/* static */
void AzsSvrg::printHelp(AzHelp &h)
{ 
  h.item_required(kw_ite_num, "Number of iterations (i.e., how many times to go through the training data).", " 30"); 
  h.item_required(kw_svrg_interval, "SVRG interval.  E.g., if this value is 2, average gradient is computed after 2 iterations, 4 iterations, and so on.  Note: one iteration goes through the entire training data once.");   
  h.item_required(kw_sgd_ite, "number of initial SGD iterations before starting SVRG."); 
  h.item_required(kw_eta, "Learning rate."); 
  h.item_required(kw_lam, "L2 regularization parameter."); 
  h.item_required(kw_loss, "Loss function.  Logistic | Square", " Logistic"); 
  h.item_noquotes("", "\"Logistic\" with >2 classes: multi-class logistic; one vs. all otherwise.  Use \"Square\" if the task is regression.");   
  h.nl(); 
  h.item_experimental(kw_momentum, "Momentum"); 
  h.item(kw_pred_fn, "File to write predictions at the end of training.  Optional");          
  h.item(kw_rseed, "Seed for randomizing the order of training data points.", " 1"); 
  h.item_experimental(kw_with_replacement, "Randomize the order of training data points with replacement."); 
  h.item(kw_test_interval, "How often to test.  E.g., if this value is 2, test is done after 2 iterations, 4 iterations, and so on.  It must be a multiple of svrg_interval.", 
         " once at the end of training"); 
  h.item(kw_do_compact, "When specified, derivatives with previous weights are not saved and recomputed, which consumes a little less memory and slows down the training a little."); 
  h.item(kw_do_show_loss, "Show training loss (training objective including the regularization term) and test loss when test is done.  If \"Regression\" is on, test loss is always shown irrespective of this switch."); 
  h.item(kw_do_show_timing, "Display time stamps to show progress."); 
}