Example FunctionUsingIf(int i) { if (i == 1) return Example(1); else return Example(2); }
TEST(naive_bayes, serialization) { NaiveBayes bayes; std::vector<std::string> words1{"hello", "world", "world"}; std::vector<std::string> words2{"f**k", "world", "world"}; std::vector<Example> examples = { Example(words1, "positive"), Example(words2, "negative") }; bayes.fit(examples); auto scores = bayes.scores(examples[0]); TempFile tmp_file; std::ofstream ofs(tmp_file.filename); boost::archive::text_oarchive oa(ofs); oa << bayes; ofs.close(); NaiveBayes bayes_restore; std::ifstream ifs(tmp_file.filename); boost::archive::text_iarchive ia(ifs); ia >> bayes_restore; auto scores_restore = bayes_restore.scores(examples[0]); EXPECT_EQ(scores.get("positive"), scores_restore.get("positive")); EXPECT_EQ(scores.get("negative"), scores_restore.get("negative")); }
TEST(naive_bayes, example) { NaiveBayes bayes; std::vector<std::string> words1{"hello", "world", "world"}; std::vector<std::string> words2{"f**k", "world", "world"}; std::vector<Example> examples = { Example(words1, "positive"), Example(words2, "negative") }; bayes.fit(examples); print_scores(bayes.scores(examples[0])); print_scores(bayes.scores(examples[1])); }
TEST(naive_bayes, nomatching_words_example) { // When nothing in dict matches, we want an empty map returned, // rather than smoothed class distributions. NaiveBayes bayes; std::vector<std::string> words1{"hello", "world"}; std::vector<std::string> words2{"good", "morning"}; std::vector<Example> examples = { Example(words1, "positive"), Example(words2, "negative") }; bayes.fit(examples); std::vector<std::string> missing_words{"escape", "notfound"}; Example missing_example(missing_words, "positive"); auto scores = bayes.scores(missing_example); EXPECT_EQ(0, scores.size()); EXPECT_EQ(0, scores.sum()); }
void readFile(FILE *fp, vector<Example> * examles) { while (!feof(fp)) { double x[V_SIZE]; int o; for (int i = 0; i < V_SIZE; i++) fscanf(fp, "%lf,", &x[i]); fscanf(fp, "%d\n", &o); examles->push_back(Example(x, o)); } }
void RunExample() { __itt_task_begin(pD, __itt_null, __itt_null, __itt_string_handle_create(L"End of run")); for(int i = 0; i < 50; ++i) { Example(i); } __itt_task_end(pD); }
TEST(naive_bayes, fast_scores) { NaiveBayes bayes; std::vector<std::string> words1{"hello", "world"}; std::vector<std::string> words2{"good", "morning"}; std::vector<Example> examples = { Example(words1, "positive"), Example(words2, "negative") }; bayes.fit(examples); auto scores1 = bayes.fast_scores(examples[0]); EXPECT_EQ(1, scores1.size()); EXPECT_EQ(2, scores1.get("positive")); auto scores2 = bayes.fast_scores(examples[1]); EXPECT_EQ(1, scores2.size()); EXPECT_EQ(2, scores2.get("negative")); std::vector<std::string> missing_words{"escape", "notfound"}; Example missing_example(missing_words, "positive"); auto missing_scores = bayes.fast_scores(missing_example); EXPECT_EQ(0, missing_scores.size()); EXPECT_EQ(0, missing_scores.sum()); }
//------------------------------------------------------------------------------ int main(void) { try { std::srand(std::time(0)); Example().Run(); return 0; } catch(oglplus::ProgramBuildError& pbe) { std::cerr << "Program build error (in " << pbe.GLSymbol() << ", " << pbe.ClassName() << ": '" << pbe.ObjectDescription() << "'): " << pbe.what() << " [" << pbe.File() << ":" << pbe.Line() << "] "; std::cerr << std::endl; std::cerr << "Build log:" << std::endl; std::cerr << pbe.Log() << std::endl; pbe.Cleanup(); } catch(oglplus::Error& err) { std::cerr << "Error (in " << err.GLSymbol() << ", " << err.ClassName() << ": '" << err.ObjectDescription() << "'): " << err.what() << " [" << err.File() << ":" << err.Line() << "] "; std::cerr << std::endl; err.Cleanup(); } catch(std::exception& se) { std::cerr << "Unspecified error: '" << se.what() << "'." << std::endl; } return 0; }
/************************************************************************ * float K_MeansPredict::EvaluatePattern( const vector< float >& Pat )const * purpose: * returns predicted value of pattern * params: * Pat: pattern - first element is target value * ************************************************************************/ float K_MeansPredict::EvaluatePattern( const vector< float >& Pat )const{ // convert pattern into coord vector< float > temp( Pat.size()-1 ); for( int j=0; j< temp.size(); j++ ){ temp[j] = Pat[j+1]; } Coord< float > Example( temp ); // Find closest mean float minDist; int closestMean=0; for( int i=0; i< _means.size(); i++ ) { float dist = Example.EuclideanDist( _means[i] ); if( i==0 || dist<minDist) { minDist = dist; closestMean = i; } } return( KeyMean(closestMean) ); }
Example FunctionUsingTernaryOperator(int i) { return (i == 1) ? Example(1) : Example(2); }
// read input examples vector<Example> read_input_examples(string input_filename, Sparm &sparm) { vector<Example> sample; std::string line; ifstream inputStream(input_filename.c_str()); size_t maxFeatureNum = 0; if (inputStream.fail()) { //cout << "Cannot read from input file " << input_filename << "!" << endl; cerr << "Cannot read from input file " << input_filename << "!" << endl; exit(1); } const vector<double>& qt_time = sparm.GetQuantTime(); while (!getline(inputStream, line, '\n').eof()) { // process line std::string::size_type lastPos = line.find_first_of(" \n",0); double survival_time = 0; survival_time = atof((line.substr(0, lastPos).c_str())); // censoring status std::string::size_type censoredPos = line.find_first_of(" \n", lastPos+1); int censoring_status = atoi(line.substr(lastPos+1,censoredPos-lastPos).c_str()); bool c; if (censoring_status==1) { c = true; } else { c= false; } lastPos = censoredPos; vector<pair<size_t,double> > feature_vec; std::string::size_type pos = line.find_first_of(':', lastPos); while (std::string::npos != pos || std::string::npos != lastPos) { size_t i = (size_t) atoi((line.substr(lastPos, pos - lastPos).c_str())); lastPos = line.find_first_of(" \n", pos); double v = atof((line.substr(pos+1, lastPos - pos).c_str())); pos = line.find_first_of(':', lastPos); if (i>maxFeatureNum) maxFeatureNum = i; feature_vec.push_back(make_pair(i,v)); } SparseVector fvec(feature_vec); int n = 0; while ((n<sparm.GetMaxMonth())&&(survival_time>qt_time[n])) { n++; } sample.push_back(Example(fvec, n, c, survival_time)); } sparm.SetSizePsi(maxFeatureNum+1); inputStream.close(); return(sample); }
void LogManagerExample() { return Example(); }