void BipartiteGraph::eraseEdge( size_t n1, size_t n2 ) { DAI_ASSERT( n1 < nrNodes1() ); DAI_ASSERT( n2 < nrNodes2() ); size_t iter; // Search for edge among neighbors of n1 for( iter = 0; iter < nb1(n1).size(); iter++ ) if( nb1(n1, iter).node == n2 ) { // Remove it nb1(n1).erase( nb1(n1).begin() + iter ); break; } // Change the iter and dual values of the subsequent neighbors for( ; iter < nb1(n1).size(); iter++ ) { Neighbor &m2 = nb1( n1, iter ); m2.iter = iter; nb2( m2.node, m2.dual ).dual = iter; } // Search for edge among neighbors of n2 for( iter = 0; iter < nb2(n2).size(); iter++ ) if( nb2(n2, iter).node == n1 ) { // Remove it nb2(n2).erase( nb2(n2).begin() + iter ); break; } // Change the iter and node values of the subsequent neighbors for( ; iter < nb2(n2).size(); iter++ ) { Neighbor &m1 = nb2( n2, iter ); m1.iter = iter; nb1( m1.node, m1.dual ).dual = iter; } }
double nball(double *x, double *par) { // if (x[0]>=0.0 && x[0]<1.0) return nb0( par[0], par[1], 0.0, (1-par[4])*ntotal*(1-par[2]), (1-par[4])*ntotal*par[2], par[4]*ntotal*(1-par[2]), par[4]*ntotal*par[2], 2, par[3]); // else if (x[0]>=1.0 && x[0]<2.0) return nb1( par[0], par[1], 0.0, (1-par[4])*ntotal*(1-par[2]), (1-par[4])*ntotal*par[2], par[4]*ntotal*(1-par[2]), par[4]*ntotal*par[2], 2, par[3]); // else if (x[0]>=2.0 && x[0]<3.0) return nb2( par[0], par[1], 0.0, (1-par[4])*ntotal*(1-par[2]), (1-par[4])*ntotal*par[2], par[4]*ntotal*(1-par[2]), par[4]*ntotal*par[2], 2, par[3]); // else if (x[0]>=3.0 && x[0]<4.0) return nb3( par[0], par[1], 0.0, (1-par[4])*ntotal*(1-par[2]), (1-par[4])*ntotal*par[2], par[4]*ntotal*(1-par[2]), par[4]*ntotal*par[2], 2, par[3]); if (x[0]>=0.0 && x[0]<1.0) return nb0( par[0], par[1], 0.0, (1-par[4])*ntotal*(1-par[2]), (1-par[4])*ntotal*par[2], par[4]*ntotal*0.14, par[4]*ntotal*0.80, 0, par[4]*ntotal*0.03, 2, par[3]); else if (x[0]>=1.0 && x[0]<2.0) return nb1( par[0], par[1], 0.0, (1-par[4])*ntotal*(1-par[2]), (1-par[4])*ntotal*par[2], par[4]*ntotal*0.14, par[4]*ntotal*0.80, 0, par[4]*ntotal*0.03, 2, par[3]); else if (x[0]>=2.0 && x[0]<3.0) return nb2( par[0], par[1], 0.0, (1-par[4])*ntotal*(1-par[2]), (1-par[4])*ntotal*par[2], par[4]*ntotal*0.14, par[4]*ntotal*0.80, 0, par[4]*ntotal*0.03, 2, par[3]); else if (x[0]>=3.0 && x[0]<4.0) return nb3( par[0], par[1], 0.0, (1-par[4])*ntotal*(1-par[2]), (1-par[4])*ntotal*par[2], par[4]*ntotal*0.14, par[4]*ntotal, 0, par[4]*ntotal*0.03, 2, par[3]); else if (x[0]>=4.0 && x[0]<5.0) return nb4( par[0], par[1], 0.0, (1-par[4])*ntotal*(1-par[2]), (1-par[4])*ntotal*par[2], par[4]*ntotal*0.14, par[4]*ntotal, 0, par[4]*ntotal*0.03, 2, par[3]); }
void BipartiteGraph::eraseNode1( size_t n1 ) { DAI_ASSERT( n1 < nrNodes1() ); // Erase neighbor entry of node n1 _nb1.erase( _nb1.begin() + n1 ); // Adjust neighbor entries of nodes of type 2 for( size_t n2 = 0; n2 < nrNodes2(); n2++ ) { for( size_t iter = 0; iter < nb2(n2).size(); ) { Neighbor &m1 = nb2(n2, iter); if( m1.node == n1 ) { // delete this entry, because it points to the deleted node nb2(n2).erase( nb2(n2).begin() + iter ); } else if( m1.node > n1 ) { // update this entry and the corresponding dual of the neighboring node of type 1 m1.iter = iter; m1.node--; nb1( m1.node, m1.dual ).dual = iter; iter++; } else { // skip iter++; } } } }
void BipartiteGraph::eraseNode2( size_t n2 ) { DAI_ASSERT( n2 < nrNodes2() ); // Erase neighbor entry of node n2 _nb2.erase( _nb2.begin() + n2 ); // Adjust neighbor entries of nodes of type 1 for( size_t n1 = 0; n1 < nrNodes1(); n1++ ) { for( size_t iter = 0; iter < nb1(n1).size(); ) { Neighbor &m2 = nb1(n1, iter); if( m2.node == n2 ) { // delete this entry, because it points to the deleted node nb1(n1).erase( nb1(n1).begin() + iter ); } else if( m2.node > n2 ) { // update this entry and the corresponding dual of the neighboring node of type 2 m2.iter = iter; m2.node--; nb2( m2.node, m2.dual ).dual = iter; iter++; } else { // skip iter++; } } } }
int main() { std::ofstream out("prediction.txt"); Dataset dataMy; Dataset dataMisha; Dataset dataTeach; Dataset dataTest, dataTest1; std::string str = "train1.csv"; std::string str1 = "test1.csv"; LoadTeachDataset(str, &dataTeach, 2); LoadTestDataset(str1, &dataTest, 2); LoadTestDataset(str1, &dataTest1, 2); vector<double> h, h1; // дл¤ ¤дра ≈пачечникова // h.push_back(3.111); // h.push_back(2.349); // дл¤ квартического ¤дра // h.push_back(3.83); // h.push_back(3.2); h.push_back(3.9); h.push_back(3.1); NaiveBayesKernelOtherDensity nb2(2, h); nb2.Learn(dataTeach); nb2.Classify(&dataTest); // NaiveBayesKernelOtherDensity nb22(2, h1); // nb22.Learn(dataTeach); // nb22.Classify(&dataTest1); //size_t err = LOO_h_vector<NaiveBayesKernelOtherDensity>(h, dataTeach); //std::cout << err << std::endl; // double minH = searchHvector_Force<NaiveBayesKernelOtherDensity>(2.1, 4.1, 0.1, h, 1, dataTeach); // std::cout << minH << std::endl; for (size_t index = 0; index < dataTest.size(); ++index) { out << dataTest[index].class_label << std::endl; dataMy.push_back(dataTest[index]); //dataMisha.push_back(dataTest1[index]); } Dataset _dataTeach; Dataset _dataTest, _dataTest1; std::string _str = "train2.csv"; std::string _str1 = "test2.csv"; LoadTeachDataset(_str, &_dataTeach, 10); LoadTestDataset(_str1, &_dataTest, 10); LoadTestDataset(_str1, &_dataTest1, 10); vector<double> _h, _h1; // дл¤ ¤дра ≈пачечникова // _h.push_back(10); // _h.push_back(10.8); // _h.push_back(0.2); // _h.push_back(0.156); // _h.push_back(0.1325); // _h.push_back(171); // _h.push_back(89.7); // _h.push_back(52.4); // _h.push_back(16.8); // _h.push_back(81.3); // дл¤ квартического ¤дра // _h.push_back(10.3102); // _h.push_back(10.1); // _h.push_back(0.2); // _h.push_back(0.157); // _h.push_back(0.1325); // _h.push_back(171); // _h.push_back(89.6); // _h.push_back(53.1); // _h.push_back(16.8); // _h.push_back(81); _h.push_back(10.3112); _h.push_back(10.1); _h.push_back(0.2); _h.push_back(0.157); _h.push_back(0.1335); _h.push_back(169); _h.push_back(90.5); _h.push_back(53.1); _h.push_back(16.8); _h.push_back(81.4); // size_t _err = LOO_h_vector<NaiveBayesKernelOtherDensity>(_h, _dataTeach); //std::cout << _err << std::endl; // double _minH = searchHvector_Force<NaiveBayesKernelOtherDensity>(80.4, 82.4, 0.1, _h, 9, _dataTeach); // std::cout << _minH << std::endl; NaiveBayesKernelOtherDensity _nb(2, _h); _nb.Learn(_dataTeach); _nb.Classify(&_dataTest); // NaiveBayesKernelOtherDensity _nb2(2, _h1); // _nb2.Learn(_dataTeach); // _nb2.Classify(&_dataTest1); for (size_t index = 0; index < _dataTest.size(); ++index) { out << _dataTest[index].class_label << std::endl; dataMy.push_back(_dataTest[index]); //dataMisha.push_back(_dataTest1[index]); } out.close(); // vector<vector<size_t>> result(2, vector<size_t>(2)); // size_t res = DataTest(dataMisha, dataMy, result); // std::cout << (double)res << std::endl; //std::ifstream fin("out1.csv"); std::ifstream fin("ans.txt"); if (!fin) { std::cout << "err"; return 0; } std::cout << "begin" << std::endl; while (!fin.eof()) { size_t class_label; fin >> class_label; Object obj; obj.class_label = class_label; dataMisha.push_back(obj); } vector<vector<size_t>> result(2, vector<size_t>(2)); size_t res = DataTest(dataMisha, dataMy, result); std::cout << (double)res / 8340.0 << std::endl; return 0; }