int recommender_serv::clear_row(std::string id) { check_set_config(); ++clear_row_cnt_; rcmdr_.get_model()->clear_row(id); return 0; }
similar_result recommender_serv::similar_row_from_id(std::string id, size_t ret_num) { check_set_config(); similar_result ret; rcmdr_.get_model()->similar_row(id, ret, ret_num); return ret; }
similar_result recommender_serv::similar_row_from_id( std::string id, size_t ret_num) { check_set_config(); return recommender_->similar_row_from_id(id, ret_num); }
std::vector<std::vector<estimate_result> > classifier_serv::classify(std::vector<jubatus::datum> data) const { std::vector<std::vector<estimate_result> > ret; check_set_config(); sfv_t v; fv_converter::datum d; for (size_t i = 0; i < data.size(); ++i) { convert<datum, fv_converter::datum>(data[i], d); converter_->convert(d, v); wm_.wm_->get_weight(v); classify_result scores; clsfer_.classifier_->classify_with_scores(v, scores); vector<estimate_result> r; for (vector<classify_result_elem>::const_iterator p = scores.begin(); p != scores.end(); ++p){ estimate_result e; e.label = p->label; e.prob = p->score; r.push_back(e); if( !isfinite(p->score) ){ LOG(WARNING) << p->label << ":" << p->score; } } ret.push_back(r); } return ret; //std::vector<estimate_results> >::ok(ret); }
float anomaly_serv::update(const string& id, const datum& data) { check_set_config(); float score = anomaly_->update(id, data); DLOG(INFO) << "point updated: " << id; return score; }
float anomaly_serv::overwrite(const string& id, const datum& data) { check_set_config(); float score = anomaly_->overwrite(id, data); DLOG(INFO) << "point overwritten: " << id; return score; }
std::vector<std::string> recommender_serv::get_all_rows() { check_set_config(); std::vector<std::string> ret; rcmdr_.get_model()->get_all_row_ids(ret); return ret; }
bool recommender_serv::clear_row(std::string id) { check_set_config(); ++clear_row_cnt_; rcmdr_.get_model()->clear_row(id); DLOG(INFO) << "row cleared: " << id; return true; }
bool recommender_serv::update_row(std::string id, datum dat) { check_set_config(); ++update_row_cnt_; recommender_->update_row(id, dat); DLOG(INFO) << "row updated: " << id; return true; }
int recommender_serv::clear() { check_set_config(); clear_row_cnt_ = 0; update_row_cnt_ = 0; build_cnt_ = 0; mix_cnt_ = 0; rcmdr_.get_model()->clear(); return 0; }
float recommender_serv::calc_l2norm(const datum& q) { check_set_config(); fv_converter::datum d0; convert<datum, fv_converter::datum>(q, d0); sfv_t v0; converter_->convert(d0, v0); return recommender::recommender_base::calc_l2norm(v0); }
vector<float> regression_serv::estimate( const vector<datum>& data) const { check_set_config(); vector<float> ret; for (size_t i = 0; i < data.size(); ++i) { ret.push_back(regression_->estimate(data[i])); } return ret; // vector<estimate_results> >::ok(ret); }
bool recommender_serv::clear() { check_set_config(); clear_row_cnt_ = 0; update_row_cnt_ = 0; recommender_->clear(); LOG(INFO) << "model cleared: " << argv().name; return true; }
int recommender_serv::update_row(std::string id,datum dat) { check_set_config(); ++update_row_cnt_; fv_converter::datum d; convert<jubatus::datum, fv_converter::datum>(dat, d); sfv_diff_t v; converter_->convert_and_update_weight(d, v); rcmdr_.get_model()->update_row(id, v); return 0; }
float recommender_serv::calc_similarity(const datum& l, const datum& r) { check_set_config(); fv_converter::datum d0, d1; convert<datum, fv_converter::datum>(l, d0); convert<datum, fv_converter::datum>(r, d1); sfv_t v0, v1; converter_->convert(d0, v0); converter_->convert(d1, v1); return recommender::recommender_base::calc_similality(v0, v1); }
similar_result recommender_serv::similar_row_from_datum(datum data, size_t s) { check_set_config(); similar_result ret; fv_converter::datum d; convert<datum, fv_converter::datum>(data, d); sfv_t v; converter_->convert(d, v); rcmdr_.get_model()->similar_row(v, ret, s); return ret; }
bool recommender_serv::clear() { check_set_config(); clear_row_cnt_ = 0; update_row_cnt_ = 0; build_cnt_ = 0; mix_cnt_ = 0; rcmdr_.get_model()->clear(); wm_.clear(); LOG(INFO) << "model cleared: " << argv().name; return true; }
vector<float> regression_serv::estimate(const vector<jubatus::datum>& data) const { check_set_config(); vector<float> ret; sfv_t v; fv_converter::datum d; for (size_t i = 0; i < data.size(); ++i) { convert<datum, fv_converter::datum>(data[i], d); converter_->convert(d, v); ret.push_back(regression_->estimate(v)); } return ret; //vector<estimate_results> >::ok(ret); }
datum recommender_serv::decode_row(std::string id) { check_set_config(); sfv_t v; fv_converter::datum ret; rcmdr_.get_model()->decode_row(id, v); fv_converter::revert_feature(v, ret); datum ret0; convert<fv_converter::datum, datum>(ret, ret0); return ret0; }
int regression_serv::train(const vector<scored_datum>& data) { check_set_config(); int count = 0; core::fv_converter::datum d; for (size_t i = 0; i < data.size(); ++i) { // TODO(unno): change interface of driver? regression_->train(std::make_pair(data[i].score, data[i].data)); DLOG(INFO) << "trained: " << data[i].score; count++; } // TODO(kuenishi): send count incrementation to mixer return count; }
std::vector<id_with_score> recommender_serv::similar_row_from_datum( datum data, size_t s) { check_set_config(); // TODO(unno): remove conversion code vector<pair<string, float> > res( recommender_->similar_row_from_datum(data, s)); vector<id_with_score> result(res.size()); for (size_t i = 0; i < res.size(); ++i) { result[i].id = res[i].first; result[i].score = res[i].second; } return result; }
datum recommender_serv::complete_row_from_datum(datum dat) { check_set_config(); fv_converter::datum d; convert<jubatus::datum, fv_converter::datum>(dat, d); sfv_t u, v; fv_converter::datum ret; converter_->convert(d, u); rcmdr_.get_model()->complete_row(u, v); fv_converter::revert_feature(v, ret); datum ret0; convert<fv_converter::datum, datum>(ret, ret0); return ret0; }
int regression_serv::train(const vector<pair<float, jubatus::datum> >& data) { check_set_config(); int count = 0; sfv_t v; fv_converter::datum d; for (size_t i = 0; i < data.size(); ++i) { convert<jubatus::datum, fv_converter::datum>(data[i].second, d); converter_->convert_and_update_weight(d, v); regression_->train(v, data[i].first); count++; } // FIXME: send count incrementation to mixer return count; }
// nolock, random id_with_score anomaly_serv::add(const datum& data) { check_set_config(); uint64_t id = idgen_->generate(); string id_str = jubatus::util::lang::lexical_cast<string>(id); #ifdef HAVE_ZOOKEEPER_H if (argv().is_standalone()) { #endif jubatus::util::concurrent::scoped_wlock lk(rw_mutex()); event_model_updated(); // TODO(unno): remove conversion code pair<string, float> res = anomaly_->add(id_str, data); return id_with_score(res.first, res.second); #ifdef HAVE_ZOOKEEPER_H } else { return add_zk(id_str, data); } #endif }
int classifier_serv::train(std::vector<std::pair<std::string, jubatus::datum> > data) { check_set_config(); int count = 0; sfv_t v; fv_converter::datum d; for (size_t i = 0; i < data.size(); ++i) { convert<jubatus::datum, fv_converter::datum>(data[i].second, d); converter_->convert(d, v); sort_and_merge(v); wm_.wm_->update_weight(v); wm_.wm_->get_weight(v); clsfer_.classifier_->train(v, data[i].first); count++; } // FIXME: send count incrementation to mixer return count; }
vector<string> anomaly_serv::get_all_rows() const { check_set_config(); return anomaly_->get_all_rows(); }
float anomaly_serv::calc_score(const datum& data) const { check_set_config(); return anomaly_->calc_score(data); }
bool anomaly_serv::clear() { check_set_config(); anomaly_->clear(); LOG(INFO) << "model cleared: " << argv().name; return true; }
bool anomaly_serv::clear_row(const string& id) { check_set_config(); anomaly_->clear_row(id); DLOG(INFO) << "row cleared: " << id; return true; }
string anomaly_serv::get_config() const { check_set_config(); return config_; }