void TestSetDistributionSoftMax() { BayesNet *net = SimpleSoftMaxModel(); if (net->GetGaussianMean("node0")[0].FltValue() != 0.1f) { PNL_THROW(pnl::CAlgorithmicException, "node0 : Setting or getting gaussian parameters is wrong"); } if (net->GetGaussianMean("node1")[0].FltValue() != 0.2f) { PNL_THROW(pnl::CAlgorithmicException, "node1 : Setting or getting gaussian parameters is wrong"); } if (net->GetGaussianMean("node2")[0].FltValue() != 0.3f) { PNL_THROW(pnl::CAlgorithmicException, "node2 : Setting or getting gaussian parameters is wrong"); } if (net->GetGaussianCovar("node0")[0].FltValue() != 0.9f) { PNL_THROW(pnl::CAlgorithmicException, "node0 : Setting or getting gaussian parameters is wrong"); } if (net->GetGaussianCovar("node1")[0].FltValue() != 0.8f) { PNL_THROW(pnl::CAlgorithmicException, "node1 : Setting or getting gaussian parameters is wrong"); } if (net->GetGaussianCovar("node2")[0].FltValue() != 0.7f) { PNL_THROW(pnl::CAlgorithmicException, "node2 : Setting or getting gaussian parameters is wrong"); } if ((net->GetSoftMaxOffset("node5")[0].FltValue(0).fl != 0.1f)|| (net->GetSoftMaxOffset("node5")[0].FltValue(1).fl != 0.1f)) { PNL_THROW(pnl::CAlgorithmicException, "node5 : Setting or getting gaussian parameters is wrong"); }; TokArr node5= net->GetSoftMaxWeights("node5"); float val0 = node5[0].FltValue(0).fl; float val1 = node5[0].FltValue(1).fl; float val2 = node5[0].FltValue(2).fl; float val3 = node5[0].FltValue(3).fl; float val4 = node5[0].FltValue(4).fl; float val5 = node5[0].FltValue(5).fl; if ((node5[0].FltValue(0).fl != 0.3f)|| (node5[0].FltValue(1).fl != 0.4f)|| (node5[0].FltValue(2).fl != 0.5f)|| (node5[0].FltValue(3).fl != 0.6f)|| (node5[0].FltValue(4).fl != 0.7f)|| (node5[0].FltValue(5).fl != 0.8f)) { PNL_THROW(pnl::CAlgorithmicException, "node5 : Setting or getting gaussian parameters is wrong"); }; delete net; std::cout << "TestSetDistributionSoftMax is completed successfully" << std::endl; }
void TestSetDistributionSevenNodesModel() { BayesNet *net = SevenNodesModel(); if (net->GetGaussianMean("node0")[0].FltValue() != 0.5f) { PNL_THROW(pnl::CAlgorithmicException, "node0 : Setting or getting gaussian parameters is wrong"); } if (net->GetGaussianMean("node1")[0].FltValue() != 0.5f) { PNL_THROW(pnl::CAlgorithmicException, "node1 : Setting or getting gaussian parameters is wrong"); } if (net->GetGaussianCovar("node0")[0].FltValue() != 1.0f) { PNL_THROW(pnl::CAlgorithmicException, "node0 : Setting or getting gaussian parameters is wrong"); } if (net->GetGaussianCovar("node1")[0].FltValue() != 1.0f) { PNL_THROW(pnl::CAlgorithmicException, "node1 : Setting or getting gaussian parameters is wrong"); } float val12 = net->GetPTabular("node2")[0].FltValue(); float val22 = net->GetPTabular("node2")[1].FltValue(); if ((net->GetPTabular("node2")[0].FltValue() != 0.7f)|| (net->GetPTabular("node2")[1].FltValue() != 0.3f)) { PNL_THROW(pnl::CAlgorithmicException, "node2 : Setting or getting tabular parameters is wrong"); }; TokArr off5True = net->GetSoftMaxOffset("node3", "node2^True"); if ((off5True[0].FltValue(0).fl != 0.3f)|| (off5True[0].FltValue(1).fl != 0.5f)) { PNL_THROW(pnl::CAlgorithmicException, "node3 : Setting or getting softmax parameters is wrong"); }; TokArr off5False = net->GetSoftMaxOffset("node3", "node2^False"); float val1off = off5False[0].FltValue(0).fl; float val2off = off5False[0].FltValue(1).fl; if ((off5False[0].FltValue(0).fl != 0.3f)|| (off5False[0].FltValue(1).fl != 0.5f)) { PNL_THROW(pnl::CAlgorithmicException, "node3 : Setting or getting softmax parameters is wrong"); }; TokArr node5True = net->GetSoftMaxWeights("node3", "node2^True"); if ((node5True[0].FltValue(0).fl != 0.5f)|| (node5True[0].FltValue(1).fl != 0.1f)|| (node5True[0].FltValue(2).fl != 0.5f)|| (node5True[0].FltValue(3).fl != 0.7f)) { PNL_THROW(pnl::CAlgorithmicException, "node3 : Setting or getting softmax parameters is wrong"); }; TokArr node5False = net->GetSoftMaxWeights("node3", "node2^False"); float val0 = node5False[0].FltValue(0).fl; float val1 = node5False[0].FltValue(1).fl; float val2 = node5False[0].FltValue(2).fl; float val3 = node5False[0].FltValue(3).fl; if ((node5False[0].FltValue(0).fl != 0.5f)|| (node5False[0].FltValue(1).fl != 0.4f)|| (node5False[0].FltValue(2).fl != 0.5f)|| (node5False[0].FltValue(3).fl != 0.7f)) { PNL_THROW(pnl::CAlgorithmicException, "node3 : Setting or getting softmax parameters is wrong"); }; float val40 = net->GetPTabular("node4", "node3^False")[0].FltValue(); float val41 = net->GetPTabular("node4", "node3^False")[1].FltValue(); float val42 = net->GetPTabular("node4", "node3^True")[0].FltValue() ; float val43 = net->GetPTabular("node4", "node3^True")[1].FltValue() ; if ((net->GetPTabular("node4", "node3^False")[0].FltValue() != 0.7f)|| (net->GetPTabular("node4", "node3^False")[1].FltValue() != 0.3f)|| (net->GetPTabular("node4", "node3^True")[0].FltValue() != 0.2f)|| (net->GetPTabular("node4", "node3^True")[1].FltValue() != 0.8f)) { PNL_THROW(pnl::CAlgorithmicException, "node4 : Setting or getting tabular parameters is wrong"); }; if ((net->GetGaussianMean("node5", "node3^True")[0].FltValue() != 0.5f)|| (net->GetGaussianMean("node5", "node3^False")[0].FltValue() != 1.0f)) { PNL_THROW(pnl::CAlgorithmicException, "node5 : Setting or getting gaussian parameters is wrong"); } if ((net->GetGaussianCovar("node5", "node3^True")[0].FltValue() != 0.5f)|| (net->GetGaussianCovar("node5", "node3^False")[0].FltValue() != 1.0f)) { PNL_THROW(pnl::CAlgorithmicException, "node5 : Setting or getting gaussian parameters is wrong"); } TokArr off6True = net->GetSoftMaxOffset("node6", "node4^True"); if ((off6True[0].FltValue(0).fl != 0.1f)|| (off6True[0].FltValue(1).fl != 0.9f)) { PNL_THROW(pnl::CAlgorithmicException, "node6 : Setting or getting softmax parameters is wrong"); }; TokArr off6False = net->GetSoftMaxOffset("node6", "node4^False"); if ((off6False[0].FltValue(0).fl != 0.7f)|| (off6False[0].FltValue(1).fl != 0.3f)) { PNL_THROW(pnl::CAlgorithmicException, "node6 : Setting or getting softmax parameters is wrong"); }; TokArr node6True = net->GetSoftMaxWeights("node6", "node4^True"); if ((node6True[0].FltValue(0).fl != 0.8f)|| (node6True[0].FltValue(1).fl != 0.2f)) { PNL_THROW(pnl::CAlgorithmicException, "node6 : Setting or getting softmax parameters is wrong"); }; TokArr node6False = net->GetSoftMaxWeights("node6", "node4^False"); float val06 = node5False[0].FltValue(0).fl; float val16 = node5False[0].FltValue(1).fl; if ((node6False[0].FltValue(0).fl != 0.5f)|| (node6False[0].FltValue(1).fl != 0.9f)) { PNL_THROW(pnl::CAlgorithmicException, "node6 : Setting or getting softmax parameters is wrong"); }; delete net; std::cout << "TestSetDistributionSevenNodesModel is completed successfully" << std::endl; }
void TestSetDistributionCondSoftMax() { BayesNet *net = SimpleCondSoftMaxModel(); if (net->GetGaussianMean("node0")[0].FltValue() != 0.1f) { PNL_THROW(pnl::CAlgorithmicException, "node0 : Setting or getting gaussian parameters is wrong"); } if (net->GetGaussianMean("node1")[0].FltValue() != 0.2f) { PNL_THROW(pnl::CAlgorithmicException, "node1 : Setting or getting gaussian parameters is wrong"); } if (net->GetGaussianMean("node2")[0].FltValue() != 0.3f) { PNL_THROW(pnl::CAlgorithmicException, "node2 : Setting or getting gaussian parameters is wrong"); } if (net->GetGaussianCovar("node0")[0].FltValue() != 0.9f) { PNL_THROW(pnl::CAlgorithmicException, "node0 : Setting or getting gaussian parameters is wrong"); } if (net->GetGaussianCovar("node1")[0].FltValue() != 0.8f) { PNL_THROW(pnl::CAlgorithmicException, "node1 : Setting or getting gaussian parameters is wrong"); } if (net->GetGaussianCovar("node2")[0].FltValue() != 0.7f) { PNL_THROW(pnl::CAlgorithmicException, "node2 : Setting or getting gaussian parameters is wrong"); } if ((net->GetPTabular("node6")[0].FltValue() != 0.3f)|| (net->GetPTabular("node6")[1].FltValue() != 0.7f)|| (net->GetPTabular("node6")[2].FltValue() != 0.5f)|| (net->GetPTabular("node6")[3].FltValue() != 0.5f)) { PNL_THROW(pnl::CAlgorithmicException, "node6 : Setting or getting gaussian parameters is wrong"); }; TokArr off5True = net->GetSoftMaxOffset("node5", "node3^True"); if ((off5True[0].FltValue(0).fl != 0.1f)|| (off5True[0].FltValue(1).fl != 0.1f)) { PNL_THROW(pnl::CAlgorithmicException, "node5 : Setting or getting gaussian parameters is wrong"); }; TokArr off5False = net->GetSoftMaxOffset("node5", "node3^False"); float val1off = off5False[0].FltValue(0).fl; float val2off = off5False[0].FltValue(1).fl; if ((off5False[0].FltValue(0).fl != 0.21f)|| (off5False[0].FltValue(1).fl != 0.21f)) { PNL_THROW(pnl::CAlgorithmicException, "node5 : Setting or getting gaussian parameters is wrong"); }; TokArr node5True = net->GetSoftMaxWeights("node5", "node3^True"); if ((node5True[0].FltValue(0).fl != 0.3f)|| (node5True[0].FltValue(1).fl != 0.4f)|| (node5True[0].FltValue(2).fl != 0.5f)|| (node5True[0].FltValue(3).fl != 0.6f)|| (node5True[0].FltValue(4).fl != 0.7f)|| (node5True[0].FltValue(5).fl != 0.8f)) { PNL_THROW(pnl::CAlgorithmicException, "node5 : Setting or getting gaussian parameters is wrong"); }; TokArr node5False = net->GetSoftMaxWeights("node5", "node3^False"); float val0 = node5False[0].FltValue(0).fl; float val1 = node5False[0].FltValue(1).fl; float val2 = node5False[0].FltValue(2).fl; float val3 = node5False[0].FltValue(3).fl; float val4 = node5False[0].FltValue(4).fl; float val5 = node5False[0].FltValue(5).fl; if ((node5False[0].FltValue(0).fl != 0.23f)|| (node5False[0].FltValue(1).fl != 0.24f)|| (node5False[0].FltValue(2).fl != 0.25f)|| (node5False[0].FltValue(3).fl != 0.26f)|| (node5False[0].FltValue(4).fl != 0.27f)|| (node5False[0].FltValue(5).fl != 0.28f)) { PNL_THROW(pnl::CAlgorithmicException, "node5 : Setting or getting gaussian parameters is wrong"); }; delete net; std::cout << "TestSetDistributionCondSoftMax is completed successfully" << std::endl; }