示例#1
0
/* static */
void AzsSvrg::printHelp(AzHelp &h)
{ 
  h.item_required(kw_ite_num, "Number of iterations (i.e., how many times to go through the training data).", " 30"); 
  h.item_required(kw_svrg_interval, "SVRG interval.  E.g., if this value is 2, average gradient is computed after 2 iterations, 4 iterations, and so on.  Note: one iteration goes through the entire training data once.");   
  h.item_required(kw_sgd_ite, "number of initial SGD iterations before starting SVRG."); 
  h.item_required(kw_eta, "Learning rate."); 
  h.item_required(kw_lam, "L2 regularization parameter."); 
  h.item_required(kw_loss, "Loss function.  Logistic | Square", " Logistic"); 
  h.item_noquotes("", "\"Logistic\" with >2 classes: multi-class logistic; one vs. all otherwise.  Use \"Square\" if the task is regression.");   
  h.nl(); 
  h.item_experimental(kw_momentum, "Momentum"); 
  h.item(kw_pred_fn, "File to write predictions at the end of training.  Optional");          
  h.item(kw_rseed, "Seed for randomizing the order of training data points.", " 1"); 
  h.item_experimental(kw_with_replacement, "Randomize the order of training data points with replacement."); 
  h.item(kw_test_interval, "How often to test.  E.g., if this value is 2, test is done after 2 iterations, 4 iterations, and so on.  It must be a multiple of svrg_interval.", 
         " once at the end of training"); 
  h.item(kw_do_compact, "When specified, derivatives with previous weights are not saved and recomputed, which consumes a little less memory and slows down the training a little."); 
  h.item(kw_do_show_loss, "Show training loss (training objective including the regularization term) and test loss when test is done.  If \"Regression\" is on, test loss is always shown irrespective of this switch."); 
  h.item(kw_do_show_timing, "Display time stamps to show progress."); 
}