void TestsPnlHigh::TestGetDiscreteParentValuesIndexes() { printf("TestGetDiscreteParentValuesIndexes\n"); BayesNet *net = SimpleCGM1(); net->SetPGaussian("Cont1", "0.0", "2.5", "1.0 3.0", "Tab0^State0"); net->SetPGaussian("Cont1", "-1.5", "0.75", "0.5 2.5", "Tab0^State1"); WCondGaussianDistribFun *pCGDF = dynamic_cast<WCondGaussianDistribFun *>(net->m_pNet->m_paDistribution->Distribution(1)); TokArr ta("Tab0^State0"); Vector<int> dpInd = pCGDF->GetDiscreteParentValuesIndexes(ta); if (dpInd.size()!=1) { PNL_THROW(pnl::CAlgorithmicException, "Size of dpInd is wrong"); }; if (dpInd[0] != 0) { PNL_THROW(pnl::CAlgorithmicException, "GetDiscreteParentValuesIndexes works incorrectly"); }; TokArr ta2("Tab0^State1"); dpInd = pCGDF->GetDiscreteParentValuesIndexes(ta2); if (dpInd[0] != 1) { PNL_THROW(pnl::CAlgorithmicException, "GetDiscreteParentValuesIndexes works incorrectly"); }; delete net; };
void CrashTestJtreeInferenceSoftMax() { BayesNet *net = SimpleSoftMaxModel(); net->SetProperty("Inference", "jtree"); TokArr jpd5 = net->GetJPD("node5"); std::cout<< "jpd node5:\t"<<jpd5 << "\n"; delete net; }
void TestAddArc() { BayesNet *net = SevenNodesModel(); net->AddArc("node2", "node5"); // net->SetPTabular("node6^True node6^False", "0.2 0.8", "node4^True"); // net->SetPTabular("node6^True node6^False", "0.1 0.9", "node4^False"); // all continuous nodes are observed net->EditEvidence("node0^0.3"); net->EditEvidence("node1^0.2"); net->EditEvidence("node5^0.9"); net->EditEvidence("node4^True"); net->SetProperty("Inference", "jtree"); TokArr jpd3 = net->GetJPD("node3"); std::cout<< "jpd node3:\t"<<jpd3 << "\n"; TokArr jpd6 = net->GetJPD("node6"); std::cout<< "jpd node6:\t"<<jpd6 << "\n"; TokArr jpd2 = net->GetJPD("node2"); std::cout<< "jpd node2:\t"<<jpd2 << "\n"; delete net; std::cout << "TestDelArc is completed successfully" << std::endl; }
void TestsPnlHigh::TestSetPGaussian() { printf("TestSetPGaussian\n"); BayesNet *net = SimpleCGM1(); net->SetPGaussian("Cont1", "0.0", "2.5", "1.0 3.0", "Tab0^State0"); net->SetPGaussian("Cont1", "-1.5", "0.75", "0.5 2.5", "Tab0^State1"); WCondGaussianDistribFun *pCGDF = dynamic_cast<WCondGaussianDistribFun *>(net->m_pNet->m_paDistribution->Distribution(1)); int IndexTab0; int ValueTab0; Tok t("Tab0^State0"); pCGDF->desc()->getIndexAndValue(&IndexTab0, &ValueTab0, t); Vector<int> aIndex(pCGDF->desc()->nNode()+3, 0); aIndex[IndexTab0] = 0; aIndex[4] = 0; aIndex[5] = 0; if ((pCGDF->GetAValue(pnl::matMean, aIndex) != 0.0f)|| (pCGDF->GetAValue(pnl::matCovariance, aIndex) != 2.5f)|| (pCGDF->GetAValue(pnl::matWeights, aIndex) != 1.0f)) { PNL_THROW(pnl::CAlgorithmicException, "There is an error in FillData"); }; aIndex[5] = 1; if (pCGDF->GetAValue(pnl::matWeights, aIndex) != 3.0f) { PNL_THROW(pnl::CAlgorithmicException, "There is an error in FillData"); }; aIndex[IndexTab0] = 1; aIndex[5] = 0; if ((pCGDF->GetAValue(pnl::matMean, aIndex) != -1.5f)|| (pCGDF->GetAValue(pnl::matCovariance, aIndex) != 0.75f)|| (pCGDF->GetAValue(pnl::matWeights, aIndex) != 0.5f)) { PNL_THROW(pnl::CAlgorithmicException, "There is an error in FillData"); }; aIndex[5] = 1; if (pCGDF->GetAValue(pnl::matWeights, aIndex) != 2.5f) { PNL_THROW(pnl::CAlgorithmicException, "There is an error in FillData"); }; delete net; };
BayesNet *BayesNet::loadFromTextFile(const string & filename) { BayesNet* bn = nullptr; uint nNodes; // number of nodes in network vector<uint> nodeSizes; // number of the values each node can take (discrete variables) vector<vector<uint>> mMapParents; // each node has a set of parent-nodes vector<vector<double>> mapCpt; // each node has a conditional probability table (mMapCpt) vector<double> listJointProbabilities; // use for pre-compute all joint probability over network uint value; double dValue; ifstream inf(filename); if (!inf.is_open()) { cout << "Open file error!" << "\n"; return nullptr; } /// nodeSizes inf >> nNodes; for (uint i = 0; i < nNodes; ++i) { inf >> value; nodeSizes.push_back(value); mMapParents.push_back(vector<uint>()); mapCpt.push_back(vector<double>()); } /// mMapParents uint nodeIndex, nParents; for (uint i = 0; i < nNodes; ++i) { inf >> nodeIndex >> nParents; for (uint j = 0; j < nParents; ++j) { inf >> value; mMapParents[nodeIndex].push_back(value); } } /// CPTs for (uint i = 0; i < nNodes; ++i) { inf >> nodeIndex >> nParents; for (uint j = 0; j < nParents; ++j) { inf >> dValue; mapCpt[nodeIndex].push_back(dValue); } } bn = new BayesNet(nNodes, nodeSizes, mMapParents, mapCpt); /// jointProbabilities uint size = 0; inf >> size; if (size > 0) { for (uint i = 0; i < size; ++i) { inf >> dValue; listJointProbabilities.push_back(dValue); } bn->setJointDistribution(listJointProbabilities); } else {
//Cont0(3) Tab0(0) // | | // \/ \/ // Cont1(1) // | // \/ // Cont2(2) (with parameters) BayesNet *TestsPnlHigh::SimpleCGM2() { BayesNet *net = SimpleCGM1(); net->SetPGaussian("Cont0", "1.5 -0.5", "1.0 0.3 0.3 2.0", TokArr(), TokArr()); net->SetPGaussian("Cont1", "0.0", "2.5", "1.0 3.0", "Tab0^State0"); net->SetPGaussian("Cont1", "-1.5", "0.75", "0.5 2.5", "Tab0^State1"); net->SetPGaussian("Cont2", "0.1", "1.1", "0.0"); return net; }
void TestCondSoftMaxParamLearning(bool DeleteNet) { // BayesNet *net = SimpleCondSoftMaxModel(); BayesNet *netToLearn = SimpleCondSoftMaxModel(); float eps = 1e-1f; int nEvid = 100; netToLearn->GenerateEvidences(nEvid); netToLearn->LearnParameters(); String nodes[] = {"node0", "node1", "node2"}; /* int i, j; TokArr LearnParam, Param; for(i = 0; i < 3; i++) { LearnParam = netToLearn->GetGaussianMean(nodes[i]); Param = net->GetGaussianMean(nodes[i]); if(LearnParam[0].fload.size() != Param[0].fload.size()) { PNL_THROW(pnl::CAlgorithmicException, "Parameters learning is wrong"); } for(j = 0; j < LearnParam[0].fload.size(); j++) { if( LearnParam[0].FltValue(j).fl - Param[0].FltValue(j).fl > eps) { PNL_THROW(pnl::CAlgorithmicException, "Parameters learning is wrong"); } } LearnParam = netToLearn->GetGaussianCovar(nodes[i]); Param = net->GetGaussianCovar(nodes[i]); if(LearnParam[0].fload.size() != Param[0].fload.size()) { PNL_THROW(pnl::CAlgorithmicException, "Parameters learning is wrong"); } for(j = 0; j < LearnParam[0].fload.size(); j++) { if( LearnParam[0].FltValue(j).fl - Param[0].FltValue(j).fl > eps) { PNL_THROW(pnl::CAlgorithmicException, "Parameters learning is wrong"); } } } */ if (DeleteNet) { delete netToLearn; }; std::cout << "TestCondSoftMaxParamLearning is completed successfully" << std::endl; }
void run(unsigned int size, unsigned int select) { BayesNet<> bn; map<vector<int>, map<int, double>> cpt; map<int, double> dist; map<vector<int>, map<int, double>> sinkCPT; for (int i = 0; i < size; i++) { if (i == select) { dist.insert(make_pair(0, 0.0)); dist.insert(make_pair(1, 1.0)); cpt.insert(CondProb<>::CondCase(vector<int>(), dist)); bn.add_node(i, CondProb<>(cpt)); cpt.clear(); dist.insert(make_pair(0, 0.0)); dist.insert(make_pair(1, 1.0)); vector<int> parents(size, 0); parents[i] = 1; sinkCPT.insert(CondProb<>::CondCase(parents, dist)); } else { dist.insert(make_pair(0, 1.0)); dist.insert(make_pair(1, 0.0)); cpt.insert(CondProb<>::CondCase(vector<int>(), dist)); bn.add_node(i, CondProb<>(cpt)); cpt.clear(); dist.insert(make_pair(0, 1.0)); dist.insert(make_pair(1, 0.0)); vector<int> parents(size, 0); parents[i] = 1; sinkCPT.insert(CondProb<>::CondCase(parents, dist)); } cpt.clear(); dist.clear(); } set<int> parents; for (int i = 0; i < size; i++) parents.insert(i); bn.add_node(size, parents, CondProb<>(sinkCPT)); map<int, int> values; for (int i = 0; i < size; i++) values.insert(make_pair(0, 0)); values.insert(make_pair(select, 1)); map<std::map<int, int>, double> marginal_dist = bn.marginal_dist(values, 512, SampleStrategy::GIBBS); assertEquals(marginal_dist[values], (double)1.0); }
void TestSetDistributionSoftMax() { BayesNet *net = SimpleSoftMaxModel(); if (net->GetGaussianMean("node0")[0].FltValue() != 0.1f) { PNL_THROW(pnl::CAlgorithmicException, "node0 : Setting or getting gaussian parameters is wrong"); } if (net->GetGaussianMean("node1")[0].FltValue() != 0.2f) { PNL_THROW(pnl::CAlgorithmicException, "node1 : Setting or getting gaussian parameters is wrong"); } if (net->GetGaussianMean("node2")[0].FltValue() != 0.3f) { PNL_THROW(pnl::CAlgorithmicException, "node2 : Setting or getting gaussian parameters is wrong"); } if (net->GetGaussianCovar("node0")[0].FltValue() != 0.9f) { PNL_THROW(pnl::CAlgorithmicException, "node0 : Setting or getting gaussian parameters is wrong"); } if (net->GetGaussianCovar("node1")[0].FltValue() != 0.8f) { PNL_THROW(pnl::CAlgorithmicException, "node1 : Setting or getting gaussian parameters is wrong"); } if (net->GetGaussianCovar("node2")[0].FltValue() != 0.7f) { PNL_THROW(pnl::CAlgorithmicException, "node2 : Setting or getting gaussian parameters is wrong"); } if ((net->GetSoftMaxOffset("node5")[0].FltValue(0).fl != 0.1f)|| (net->GetSoftMaxOffset("node5")[0].FltValue(1).fl != 0.1f)) { PNL_THROW(pnl::CAlgorithmicException, "node5 : Setting or getting gaussian parameters is wrong"); }; TokArr node5= net->GetSoftMaxWeights("node5"); float val0 = node5[0].FltValue(0).fl; float val1 = node5[0].FltValue(1).fl; float val2 = node5[0].FltValue(2).fl; float val3 = node5[0].FltValue(3).fl; float val4 = node5[0].FltValue(4).fl; float val5 = node5[0].FltValue(5).fl; if ((node5[0].FltValue(0).fl != 0.3f)|| (node5[0].FltValue(1).fl != 0.4f)|| (node5[0].FltValue(2).fl != 0.5f)|| (node5[0].FltValue(3).fl != 0.6f)|| (node5[0].FltValue(4).fl != 0.7f)|| (node5[0].FltValue(5).fl != 0.8f)) { PNL_THROW(pnl::CAlgorithmicException, "node5 : Setting or getting gaussian parameters is wrong"); }; delete net; std::cout << "TestSetDistributionSoftMax is completed successfully" << std::endl; }
void TestJtreeInference1SevenNodesModel() { BayesNet *net = SevenNodesModel(); // all continuous nodes are observed net->EditEvidence("node0^0.3"); net->EditEvidence("node1^0.2"); net->EditEvidence("node5^0.9"); net->EditEvidence("node4^True"); net->SetProperty("Inference", "jtree"); TokArr jpd3 = net->GetJPD("node3"); std::cout<< "jpd node3:\t"<<jpd3 << "\n"; TokArr jpd6 = net->GetJPD("node6"); std::cout<< "jpd node6:\t"<<jpd6 << "\n"; TokArr jpd2 = net->GetJPD("node2"); std::cout<< "jpd node2:\t"<<jpd2 << "\n"; delete net; std::cout << "TestJtreeInference1SevenNodesModel is completed successfully" << std::endl; }
void canMarginalizeNode() { BayesNet<> bn; map<vector<int>, map<int, double>> cpt; map<int, double> dist; dist.insert(make_pair(0, 0.7)); dist.insert(make_pair(1, 0.3)); cpt.insert(CondProb<>::CondCase(vector<int>(), dist)); bn.add_node(0, CondProb<>(cpt)); map<int, double> marginal_dist = bn.marginal_dist(0, 512); assertTrue(marginal_dist[0] > 0.65 && marginal_dist[0] < 0.75); assertTrue(marginal_dist[1] > 0.25 && marginal_dist[1] < 0.35); }
void canMarginalizeNetwork() { BayesNet<> bn; map<vector<int>, map<int, double>> cpt; map<int, double> dist; dist.insert(make_pair(0, 0.7)); dist.insert(make_pair(1, 0.3)); cpt.insert(CondProb<>::CondCase(vector<int>(), dist)); bn.add_node(0, CondProb<>(cpt)); map<int, int> values; values.insert(make_pair(0, 0)); map<std::map<int, int>, double> marginal_dist = bn.marginal_dist(values, 512, SampleStrategy::GIBBS); assertTrue(marginal_dist[values] > 0.65 && marginal_dist[values] < 0.75); }
BayesNet *TestsPnlHigh::CreateCondGaussianModel1() { // Cont1 Tab1 // | | // \/ \/ // Cont2 BayesNet *net; net = new BayesNet(); net->AddNode(continuous^"Cont1 Cont2", "dim1"); net->AddNode(discrete^"Tab1", "dim1 dim2"); net->AddArc("Cont1 Tab1", "Cont2"); return net; }
void TestJtreeInferenceSoftMax1() { BayesNet *net = SimpleSoftMaxModel(); // all continuous nodes are observed net->EditEvidence("node0^0.3"); net->EditEvidence("node1^0.2"); net->EditEvidence("node2^0.9"); net->SetProperty("Inference", "jtree"); TokArr jpd5 = net->GetJPD("node5"); std::cout<< "jpd node5:\t"<<jpd5 << "\n"; delete net; std::cout << "TestJtreeInferenceSoftMax1 is completed successfully" << std::endl; }
BayesNet *VerySimpleMultivariateGaussianModel() { // NodeA -> NodeB -> NodeC BayesNet *net; net = new BayesNet(); net->AddNode(continuous^"NodeB NodeA NodeC", "dim1 dim2"); // net->AddNode(continuous^"NodeA NodeB NodeC", "dim1"); net->AddArc("NodeA", "NodeB"); net->AddArc("NodeB", "NodeC"); net->SetPGaussian("NodeA", "1.0 2.0", "10.0 0.1 0.1 10.0"); net->SetPGaussian("NodeB", "0.5 1.5", "7.0 0.0 0.0 7.0", "0.1 0.2 0.3 0.4"); net->SetPGaussian("NodeC", "0.8 1.8", "3.5 0.5 0.5 3.5", "0.5 0.6 0.7 0.8"); return net; }
BayesNet *VerySimpleGaussianModel() { // NodeA -> NodeB -> NodeC BayesNet *net; net = new BayesNet(); net->AddNode(continuous^"NodeB NodeA NodeC", "dim1"); // net->AddNode(continuous^"NodeA NodeB NodeC", "dim1"); net->AddArc("NodeA", "NodeB"); net->AddArc("NodeB", "NodeC"); net->SetPGaussian("NodeA", "1.0", "10.0"); net->SetPGaussian("NodeB", "0.5", "7.0", "0.1"); net->SetPGaussian("NodeC", "0.8", "3.5", "0.4"); return net; }
BayesNet *TestsPnlHigh::SimpleCGM1() { //Cont0(3) Tab0(0) // | | // \/ \/ // Cont1(1) // | // \/ // Cont2(2) BayesNet *net = new BayesNet(); net->AddNode(discrete^"Tab0", "State0 State1"); net->AddNode(continuous^"Cont1 Cont2", "Dim0"); net->AddNode(continuous^"Cont0", "Dim0 Dim1"); net->AddArc("Cont0 Tab0", "Cont1"); net->AddArc("Cont1", "Cont2"); return net; };
void canSample() { BayesNet<> bn; map<vector<int>, map<int, double>> cpt; map<int, double> dist; dist.insert(make_pair(0, 1.0)); dist.insert(make_pair(1, 0.0)); cpt.insert(CondProb<>::CondCase(vector<int>(), dist)); bn.add_node(0, CondProb<>(cpt)); assertEquals(bn.sample_node(0), 0); cpt.clear(); dist.clear(); dist.insert(make_pair(0, 0.0)); dist.insert(make_pair(1, 1.0)); cpt.insert(CondProb<>::CondCase(vector<int>(), dist)); bn.add_node(1, CondProb<>(cpt)); assertEquals(bn.sample_node(1), 1); }
void TestNodeTypes() { BayesNet *net = SimpleCondSoftMaxModel(); TokArr n0t = net->GetNodeType("node0"); TokArr n1t = net->GetNodeType("node1"); TokArr n2t = net->GetNodeType("node2"); TokArr n3t = net->GetNodeType("node3"); TokArr n5t = net->GetNodeType("node5"); TokArr n6t = net->GetNodeType("node6"); printf("\nNodes types\n"); printf("Node 0 type: %s\n", String(n0t).c_str()); printf("Node 1 type: %s\n",String(n1t).c_str()); printf("Node 2 type: %s\n",String(n2t).c_str()); printf("Node 3 type: %s\n",String(n3t).c_str()); printf("Node 5 type: %s\n",String(n5t).c_str()); printf("Node 6 type: %s\n",String(n6t).c_str()); delete net; }
/* ************************************************************************* */ TEST( SymbolicBayesNet, constructor ) { Ordering o; o += X(2),L(1),X(1); // Create manually IndexConditional::shared_ptr x2(new IndexConditional(o[X(2)],o[L(1)], o[X(1)])), l1(new IndexConditional(o[L(1)],o[X(1)])), x1(new IndexConditional(o[X(1)])); BayesNet<IndexConditional> expected; expected.push_back(x2); expected.push_back(l1); expected.push_back(x1); // Create from a factor graph GaussianFactorGraph factorGraph = createGaussianFactorGraph(o); SymbolicFactorGraph fg(factorGraph); // eliminate it SymbolicBayesNet actual = *SymbolicSequentialSolver(fg).eliminate(); CHECK(assert_equal(expected, actual)); }
BayesNet* RPSModel() { BayesNet *net; net = new BayesNet(); TokArr aChoice = "Rock Paper Scissors";// possible values for nodes // build Graph // add nodes to net net->AddNode(discrete ^ "PreviousCompTurn PreviousHumanTurn CurrentHumanTurn", aChoice); // add arcs to create following Bayes net: // // PreviousCompTurn PreviousHumanTurn // | | // V V // CurrentHumanTurn net->AddArc("PreviousCompTurn", "CurrentHumanTurn"); net->AddArc("PreviousHumanTurn", "CurrentHumanTurn"); return net; }
void canAverage() { BayesNet<> bn; map<vector<int>, map<int, double>> cpt; map<int, double> dist; dist.insert(make_pair(0, 1.0)); dist.insert(make_pair(1, 0.0)); cpt.insert(CondProb<>::CondCase(vector<int>(), dist)); bn.add_node(0, CondProb<>(cpt)); cpt.clear(); dist.clear(); dist.insert(make_pair(0, 1.0)); dist.insert(make_pair(1, 0.0)); cpt.insert(CondProb<>::CondCase(vector<int>(), dist)); bn.add_node(1, CondProb<>(cpt)); cpt.clear(); dist.clear(); dist.insert(make_pair(1, 0.7)); dist.insert(make_pair(0, 0.3)); cpt.insert(CondProb<>::CondCase(vector<int> {0, 0}, dist)); dist.insert(make_pair(1, 0.5)); dist.insert(make_pair(0, 0.5)); cpt.insert(CondProb<>::CondCase(vector<int> {0, 1}, dist)); dist.insert(make_pair(1, 0.5)); dist.insert(make_pair(0, 0.5)); cpt.insert(CondProb<>::CondCase(vector<int> {1, 0}, dist)); dist.insert(make_pair(1, 0.3)); dist.insert(make_pair(0, 0.7)); cpt.insert(CondProb<>::CondCase(vector<int> {1, 1}, dist)); bn.add_node(2, {0, 1}, CondProb<int>({1, 2}, cpt)); double ev = bn.average_value(2, 1024); assertTrue(ev > 0.6 && ev < 0.8); }
void TestsPnlHigh::TestGetGaussianMeanCovarWeights() { printf("TestGetGaussianMeanCovarWeights\n"); BayesNet *net = SimpleCGM1(); net->SetPGaussian("Cont1", "0.0", "2.5", "1.0 3.0", "Tab0^State0"); net->SetPGaussian("Cont1", "-1.5", "0.75", "0.5 2.5", "Tab0^State1"); if (net->GetGaussianMean("Cont1", "Tab0^State0")[0].FltValue() != 0.0f) { PNL_THROW(pnl::CAlgorithmicException, "GetGaussianMean works incorrectly"); }; if (net->GetGaussianMean("Cont1", "Tab0^State1")[0].FltValue() != -1.5f) { PNL_THROW(pnl::CAlgorithmicException, "GetGaussianMean works incorrectly"); }; if (net->GetGaussianCovar("Cont1", "Tab0^State0")[0].FltValue() != 2.5f) { PNL_THROW(pnl::CAlgorithmicException, "GetGaussianCovar works incorrectly"); }; if (net->GetGaussianCovar("Cont1", "Tab0^State1")[0].FltValue() != 0.75f) { PNL_THROW(pnl::CAlgorithmicException, "GetGaussianCovar works incorrectly"); }; if (String(net->GetGaussianWeights("Cont1", "Cont0", "Tab0^State0")[0]) != "1.000000^3.000000") { PNL_THROW(pnl::CAlgorithmicException, "GetGaussianCovar works incorrectly"); }; if (String(net->GetGaussianWeights("Cont1", "Cont0", "Tab0^State1")[0]) != "0.500000^2.500000") { PNL_THROW(pnl::CAlgorithmicException, "GetGaussianCovar works incorrectly"); }; delete net; };
BayesNet *SimpleSoftMaxModel() { BayesNet *net; net = new BayesNet(); net->AddNode(continuous^"node0 node1 node2"); net->AddNode(discrete^"node5", "True False"); net->AddArc("node0", "node5"); net->AddArc("node1", "node5"); net->AddArc("node2", "node5"); net->SetPGaussian("node0", "0.1", "0.9"); net->SetPGaussian("node1", "0.2", "0.8"); net->SetPGaussian("node2", "0.3", "0.7"); net->SetPSoftMax("node5^True node5^False", "0.3 0.4 0.5 0.6 0.7 0.8", "0.1 0.1"); return net; }
void TestJtreeInference2SevenNodesModel() { BayesNet *net = SevenNodesModel(); // all discrete nodes are observed net->EditEvidence("node2^True"); net->EditEvidence("node3^False"); net->EditEvidence("node4^False"); net->EditEvidence("node6^True"); net->EditEvidence("node1^0.55"); net->SetProperty("Inference", "jtree"); TokArr jpd0 = net->GetJPD("node0"); std::cout<< "jpd node0:\t"<<jpd0 << "\n"; TokArr jpd5 = net->GetJPD("node1"); std::cout<< "jpd node5:\t"<<jpd5 << "\n"; delete net; std::cout << "TestJtreeInference2SevenNodesModel is completed successfully" << std::endl; }
void canMarkovBlanket() { BayesNet<> bn; map<vector<int>, map<int, double>> cpt; map<int, double> dist; dist.insert(make_pair(0, 1.0)); dist.insert(make_pair(1, 0.0)); cpt.insert(CondProb<>::CondCase(vector<int>(), dist)); bn.add_node(0, CondProb<>(cpt)); cpt.clear(); dist.clear(); dist.insert(make_pair(0, 1.0)); dist.insert(make_pair(1, 0.0)); cpt.insert(CondProb<>::CondCase(vector<int>(), dist)); bn.add_node(1, CondProb<>(cpt)); cpt.clear(); dist.clear(); dist.insert(make_pair(1, 0.7)); dist.insert(make_pair(0, 0.3)); cpt.insert(CondProb<>::CondCase(vector<int> {0, 0}, dist)); dist.insert(make_pair(1, 0.5)); dist.insert(make_pair(0, 0.5)); cpt.insert(CondProb<>::CondCase(vector<int> {0, 1}, dist)); dist.insert(make_pair(1, 0.5)); dist.insert(make_pair(0, 0.5)); cpt.insert(CondProb<>::CondCase(vector<int> {1, 0}, dist)); dist.insert(make_pair(1, 0.3)); dist.insert(make_pair(0, 0.7)); cpt.insert(CondProb<>::CondCase(vector<int> {1, 1}, dist)); bn.add_node(2, {0, 1}, CondProb<int>({1, 2}, cpt)); cpt.clear(); dist.clear(); dist.insert(make_pair(1, 0.7)); dist.insert(make_pair(0, 0.3)); cpt.insert(CondProb<>::CondCase(vector<int> {0}, dist)); dist.insert(make_pair(1, 0.5)); dist.insert(make_pair(0, 0.5)); cpt.insert(CondProb<>::CondCase(vector<int> {1}, dist)); bn.add_node(3, {2}, CondProb<int>({2}, cpt)); set<int> blanket = bn.markov_blanket(2); assertEquals(blanket, set<int> {2, 0, 1, 3}); }
void TestsPnlHigh::Test2EditEvidence() { BayesNet *net = VerySimpleMultivariateGaussianModel(); net->EditEvidence("NodeB^dim1^0.0 NodeB^dim2^1.0"); net->EditEvidence("NodeA^dim1^0.7 NodeA^dim2^1.7"); net->EditEvidence("NodeC^dim1^0.7 NodeC^dim2^1.7"); net->ClearEvid(); net->EditEvidence("NodeB^dim1^0.0 NodeB^dim2^1.0"); net->EditEvidence("NodeA^dim1^0.7 NodeA^dim2^1.7"); net->EditEvidence("NodeC^dim1^0.7 NodeC^dim2^1.7"); delete net; net = VerySimpleGaussianModel(); net->EditEvidence("NodeB^dim1^0.0"); net->EditEvidence("NodeA^dim1^0.7"); net->EditEvidence("NodeC^dim1^1.4"); delete net; };
void TestJtreeInferenceCondSoftMax2() { BayesNet *net = SimpleCondSoftMaxModel(); // all discrete nodes are observed net->EditEvidence("node3^False"); net->EditEvidence("node6^True"); net->EditEvidence("node5^True"); net->EditEvidence("node1^0.2"); net->SetProperty("Inference", "jtree"); TokArr jpd0 = net->GetJPD("node0"); std::cout<< "jpd node0:\t"<<jpd0 << "\n"; TokArr jpd2 = net->GetJPD("node2"); std::cout<< "jpd node2:\t"<<jpd2 << "\n"; delete net; std::cout << "TestJtreeInferenceCondSoftMax2 is completed successfully" << std::endl; }
int main() { int hold; int prob = 0; vector<double> probVec = { .7, .1 }; vector<double> childProbVec = { .8 }; BayesNet myNet; myNet.AddNode("someNode", probVec); myNet.AddNode("someNode", "childNode", childProbVec); myNet.PrintAllNodes(myNet.head); //change calcprobs to return an int Node* someNode = myNet.GetNode(myNet.head, "someNode"); myNet.CalcProbs(someNode); cin >> hold; return 0; }
void TestGibbsInferenceSoftMax() { BayesNet *net = SimpleSoftMaxModel(); // no observed nodes net->SetProperty("Inference", "gibbs"); TokArr jpd0 = net->GetJPD("node0"); std::cout<< "jpd node0:\t"<<jpd0 << "\n"; TokArr jpd1 = net->GetJPD("node1"); std::cout<< "jpd node1:\t"<<jpd1 << "\n"; TokArr jpd2 = net->GetJPD("node2"); std::cout<< "jpd node2:\t"<<jpd2 << "\n"; TokArr jpd5 = net->GetJPD("node5"); std::cout<< "jpd node5:\t"<<jpd5 << "\n"; delete net; std::cout << "TestGibbsInferenceSoftMax is completed successfully" << std::endl; }