virtual void solve() { auto lines = paracel_load(input); local_parser(item_vects, lines); std::cout << "local parser done" << std::endl; auto handler = [&](const std::vector<std::string> & linelst) { std::unordered_map<std::string, std::vector<double> > other_item_vects; local_parser(other_item_vects, linelst); local_learning(other_item_vects); }; paracel_sequential_loadall(input, handler); select_top(); std::cout << "learning done" << std::endl; }
void predict(const std::string & pred_fn) { auto lines = pt->paracel_load(pred_fn); local_parser(lines); for(size_t i = 0; i < samples.size(); ++i) { predv.push_back(lr_hypothesis(samples[i])); } }
virtual void solve() { auto lines = paracel_load(input); local_parser(item_vects, lines); std::cout << "parser done" << std::endl; if(learning_method == "default") { auto all_lines = paracel_loadall(input); local_parser(all_item_vects, all_lines); std::cout << "loadall done" << std::endl; normalize(item_vects); normalize(all_item_vects); std::cout << "normalize done" << std::endl; sync(); learning(); } else if(learning_method == "limit_storage") { normalize(item_vects); // normalize here to reduce calculation init_paras(); sync(); mls_learning(); } else {} }
void logistic_regression::solve() { auto lines = paracel_load(input); local_parser(lines); // init data sync(); if(learning_method == "dgd") { dgd_learning(); } else if(learning_method == "ipm") { ipm_learning(); } else if(learning_method == "agd") { agd_learning(); } else { std::cout << "learning method not supported." << std::endl; return; } sync(); //print(theta); }
void test(const std::string & test_fn) { auto lines = pt->paracel_load(test_fn); local_parser(lines); std::cout << "loss in test dataset is:" << calc_loss() << std::endl; }
void solve() { auto lines = pt->paracel_load(input); local_parser(lines); learning(); }
void logistic_regression::predict(const std::string & pred_fn) { auto lines = paracel_load(input); local_parser(lines); // re-init samples, labels std::cout << "mean loss" << calc_loss() << std::endl; }