void Levenshtein::compute() { reset_weights(); DynProg::compute(); return; }
/*--------------------------------------------------------*/ void AzsSvrg::_train_test() { if (rseed > 0) { srand(rseed); /* initialize the random seed */ } /*--- initialization ---*/ int dim = m_trn_x->rowNum(); reset_weights(dim); /*--- iterate ... ---*/ AzTimeLog::print("--- Training begins ... ", log_out); AzsSvrgData_fast prev_fast; AzsSvrgData_compact prev_compact; int ite; for (ite = 0; ite < ite_num; ++ite) { if (do_show_timing) AzTimeLog::print("--- iteration#", ite+1, log_out); if (doing_svrg(ite) && (ite-sgd_ite) % svrg_interval == 0) { if (do_show_timing) AzTimeLog::print("Computing gradient average ... ", log_out); if (do_compact) get_avg_gradient_compact(&prev_compact); else get_avg_gradient_fast(&prev_fast); } if (do_show_timing) AzTimeLog::print("Updating weights ... ", log_out); AzIntArr ia_dxs; const int *dxs = gen_seq(dataSize(), ia_dxs); int ix; for (ix = 0; ix < dataSize(); ++ix) { int dx = dxs[ix]; /* data point index */ AzDvect v_deriv(class_num); get_deriv(dx, &v_deriv); /* compute the derivatives */ if (doing_svrg(ite)) { if (do_compact) updateDelta_svrg_compact(dx, &v_deriv, prev_compact); else updateDelta_svrg_fast(dx, &v_deriv, prev_fast); } else { updateDelta_sgd(dx, &v_deriv); } flushDelta(); } show_perf(ite); } if (do_show_timing) AzTimeLog::print("--- End of training ... ", log_out); /*--- write predictions to a file if requested ---*/ if (s_pred_fn.length() > 0) { AzTimeLog::print("Writing predictions to ", s_pred_fn.c_str(), log_out); write_pred(m_tst_x, s_pred_fn.c_str()); } }