示例#1
0
void TestCondSoftMaxParamLearning(bool DeleteNet)
{
//    BayesNet *net = SimpleCondSoftMaxModel();
    BayesNet *netToLearn = SimpleCondSoftMaxModel();
    float eps = 1e-1f;

    int nEvid = 100;
    netToLearn->GenerateEvidences(nEvid);

    netToLearn->LearnParameters();

    String nodes[] = {"node0", "node1", "node2"};

/*    int i, j;
    TokArr LearnParam, Param;
    for(i = 0; i < 3; i++)
    {
        LearnParam = netToLearn->GetGaussianMean(nodes[i]);
        Param = net->GetGaussianMean(nodes[i]);
        if(LearnParam[0].fload.size() != Param[0].fload.size())
        {
            PNL_THROW(pnl::CAlgorithmicException, "Parameters learning is wrong");
        }
        for(j = 0; j < LearnParam[0].fload.size(); j++)
        {
            if( LearnParam[0].FltValue(j).fl - Param[0].FltValue(j).fl > eps)
            {
                PNL_THROW(pnl::CAlgorithmicException, "Parameters learning is wrong");
            }
        }

        LearnParam = netToLearn->GetGaussianCovar(nodes[i]);
        Param = net->GetGaussianCovar(nodes[i]);
        if(LearnParam[0].fload.size() != Param[0].fload.size())
        {
            PNL_THROW(pnl::CAlgorithmicException, "Parameters learning is wrong");
        }
        for(j = 0; j < LearnParam[0].fload.size(); j++)
        {
            if( LearnParam[0].FltValue(j).fl - Param[0].FltValue(j).fl > eps)
            {
                PNL_THROW(pnl::CAlgorithmicException, "Parameters learning is wrong");
            }
        }
    }
*/
    if (DeleteNet)
    {
	delete netToLearn;
    };

	std::cout << "TestCondSoftMaxParamLearning is completed successfully" << std::endl;
}
示例#2
0
void TestsPnlHigh::TestCondGaussianParamLearning()
{
    BayesNet *net = SimpleCGM2();
    BayesNet *netToLearn = SimpleCGM2();

    float eps = 1e-2f;

    int nEvidences = 5000;
    netToLearn->GenerateEvidences(nEvidences);

    netToLearn->LearnParameters();
/*
    int nNodes = netToLearn->Net().Graph().nNode();

    //Checking step
    int i; 
    int j;
    TokArr LearnParam, Param;
    for (i = 0; i < nNodes; i++)
    {
	//if it is gaussian without tabular parents
	if (true)
	{
	    LearnParam = netToLearn->GetGaussianMean(nodes[i]);
	    Param = net->GetGaussianMean(nodes[i]);
	    if(LearnParam[0].fload.size() != Param[0].fload.size())
	    {
		PNL_THROW(pnl::CAlgorithmicException, "Parameters learning is wrong");
	    };
	    for(j = 0; j < LearnParam[0].fload.size(); j++)
	    {
		if( LearnParam[0].FltValue(j).fl - Param[0].FltValue(j).fl > eps)
		{
		    PNL_THROW(pnl::CAlgorithmicException, "Parameters learning is wrong");
		}
	    };

	    LearnParam = netToLearn->GetGaussianCovar(nodes[i]);
	    Param = net->GetGaussianCovar(nodes[i]);
	    if(LearnParam[0].fload.size() != Param[0].fload.size())
	    {
		PNL_THROW(pnl::CAlgorithmicException, "Parameters learning is wrong");
	    };
	    for(j = 0; j < LearnParam[0].fload.size(); j++)
	    {
		if( LearnParam[0].FltValue(j).fl - Param[0].FltValue(j).fl > eps)
		{
		    PNL_THROW(pnl::CAlgorithmicException, "Parameters learning is wrong");
		}
	    };

	    TokArr parents = netToLearn->GetParents(nodes[i]);
	    LearnParam = netToLearn->GetGaussianWeights(nodes[i], parents);
	    Param = net->GetGaussianWeights(nodes[i], parents);
	    if(LearnParam[0].fload.size() != Param[0].fload.size())
	    {
		PNL_THROW(pnl::CAlgorithmicException, "Parameters learning is wrong");
	    };
	    for(j = 0; j < LearnParam[0].fload.size(); j++)
	    {
		if( LearnParam[0].FltValue(j).fl - Param[0].FltValue(j).fl > eps)
		{
		    PNL_THROW(pnl::CAlgorithmicException, "Parameters learning is wrong");
		}
	    };	
	};
    };
*/
    delete net;
    delete netToLearn;
}