Example #1
0
int main()
{
    CBNet *pBNetForArHMM = pnlExCreateRndArHMM();
    CDBN *pArHMM = CDBN::Create( pBNetForArHMM );

     //Create an inference engine
    C1_5SliceJtreeInfEngine* pInfEng;
    pInfEng = C1_5SliceJtreeInfEngine::Create(pArHMM);

    //Number of time slices for unrolling
    int nTimeSlices = 5;
    const CPotential* pQueryJPD;

    //Crate evidence for every slice
    CEvidence** pEvidences;
    pEvidences = new CEvidence*[nTimeSlices];

    //Let node 1 is always observed
    const int obsNodesNums[] = { 1 };
    valueVector obsNodesVals(1);

    int i;
    for( i = 0; i < nTimeSlices; i++ )
    {
        // Generate random value
	obsNodesVals[0].SetInt(rand()%2);
        pEvidences[i] = CEvidence::Create( pArHMM, 1, obsNodesNums,
            obsNodesVals );
    }

    // Create smoothing procedure
    pInfEng->DefineProcedure(ptSmoothing, nTimeSlices);
    // Enter created evidences
    pInfEng->EnterEvidence(pEvidences, nTimeSlices);
    // Start smoothing process
    pInfEng->Smoothing();

    // Choose query set of nodes for every slice
    int queryPrior[] = { 0 };
    int queryPriorSize = 1;
    int query[] = { 0, 2 };
    int querySize = 2;

    std::cout << " Results of smoothing " << std::endl;

    int slice = 0;
    pInfEng->MarginalNodes( queryPrior, queryPriorSize, slice );
    pQueryJPD = pInfEng->GetQueryJPD();

    ShowResultsForInference(pQueryJPD, slice);
    //pQueryJPD->Dump();


    std::cout << std::endl;

    for( slice = 1; slice < nTimeSlices; slice++ )
    {
        pInfEng->MarginalNodes( query, querySize, slice );
        pQueryJPD = pInfEng->GetQueryJPD();
        ShowResultsForInference(pQueryJPD, slice);
	//pQueryJPD->Dump();
    }

    slice = 0;

    //Create filtering procedure
    pInfEng->DefineProcedure( ptFiltering );
    pInfEng->EnterEvidence( &(pEvidences[slice]), 1 );
    pInfEng->Filtering( slice );
    pInfEng->MarginalNodes( queryPrior, queryPriorSize );
    pQueryJPD = pInfEng->GetQueryJPD();

    std::cout << " Results of filtering " << std::endl;
    ShowResultsForInference(pQueryJPD, slice);
    //pQueryJPD->Dump();


    for( slice = 1; slice < nTimeSlices; slice++ )
    {
        pInfEng->EnterEvidence( &(pEvidences[slice]), 1 );
        pInfEng->Filtering( slice );
        pInfEng->MarginalNodes( query, querySize );
        pQueryJPD = pInfEng->GetQueryJPD();
        ShowResultsForInference(pQueryJPD, slice);
	//pQueryJPD->Dump();
    }

    //Create fixed-lag smoothing (online)
    int lag = 2;
    pInfEng->DefineProcedure( ptFixLagSmoothing, lag );

    for (slice = 0; slice < lag + 1; slice++)
    {
        pInfEng->EnterEvidence( &(pEvidences[slice]), 1 );
    }
    std::cout << " Results of fixed-lag smoothing " << std::endl;

    pInfEng->FixLagSmoothing( slice );
    pInfEng->MarginalNodes( queryPrior, queryPriorSize );
    pQueryJPD = pInfEng->GetQueryJPD();
    ShowResultsForInference(pQueryJPD, slice);
    //pQueryJPD->Dump();

    std::cout << std::endl;

    for( ; slice < nTimeSlices; slice++ )
    {
        pInfEng->EnterEvidence( &(pEvidences[slice]), 1 );
        pInfEng->FixLagSmoothing( slice );
        pInfEng->MarginalNodes( query, querySize );
        pQueryJPD = pInfEng->GetQueryJPD();
        ShowResultsForInference(pQueryJPD, slice);
	//pQueryJPD->Dump();
    }

    delete pInfEng;

    for( slice = 0; slice < nTimeSlices; slice++)
    {
        delete pEvidences[slice];
    }

    //Create learning procedure for DBN
    pEvidencesVecVector evidencesOut;


    const int nTimeSeries = 500;
    printf("nTimeSlices: %d\n", nTimeSlices);
    intVector nSlices(nTimeSeries);
    printf("nSlices_len: %d\n", nSlices.size());
    //define number of slices in the every time series
    pnlRand(nTimeSeries, &nSlices.front(), 3, 20);
    printf("nSlices_len: %d\n", nSlices.size());
    for(int i=0; i<10; i++){
        printf("%d ", nSlices[i]);
    }
    printf("]\n");
    printf("es_len: %d\n", nSlices.size());
    // Generate evidences in a random way
    pArHMM->GenerateSamples( &evidencesOut, nSlices);
    printf("v1: %d\n", evidencesOut.size());
    printf("v2: %d\n", evidencesOut[0].size());

    // Create DBN for learning
    CDBN *pDBN = CDBN::Create(pnlExCreateRndArHMM());

    // Create learning engine
    CEMLearningEngineDBN *pLearn = CEMLearningEngineDBN::Create( pDBN );

    // Set data for learning
    pLearn->SetData( evidencesOut );

    // Start learning
    try
    {
        pLearn->Learn();
    }
    catch(CAlgorithmicException except)
    {
        std::cout << except.GetMessage() << std::endl;
    }

    std::cout<<"Leraning procedure"<<std::endl;

    const CCPD *pCPD1, *pCPD2;
    for( i = 0; i < 4; i++ )
    {
        std::cout<<" initial model"<<std::endl;
        pCPD1 = static_cast<const CCPD*>( pArHMM->GetFactor(i) );
        ShowCPD( pCPD1 );


	std::cout<<" model after learning"<<std::endl;
        pCPD2 = static_cast<const CCPD*>( pDBN->GetFactor(i) );
	ShowCPD( pCPD2 );

    }

    for( i = 0; i < evidencesOut.size(); i++ )
    {
        int j;
        for( j = 0; j < evidencesOut[i].size(); j++ )
        {
            delete evidencesOut[i][j];
        }
    }
    delete pDBN;
    delete pArHMM;
    delete pLearn;

    return 0;
}
int CompareFixLagSmoothingArHMM(CDBN* pDBN, int nTimeSlice, float eps)
{
    CBNet * pUnrolledDBN;
    pUnrolledDBN = static_cast<CBNet *>(pDBN->UnrollDynamicModel(nTimeSlice));

    int itogResult = 1;
    int result = 0;

    int lag = rand()%(nTimeSlice-1);
    lag = 2;
    /////////////////////////////////////////////////////////////////////////////
    //Create inference for unrolled DBN
    ////////////////////////////////////////////////////////////////////////////
    C1_5SliceJtreeInfEngine *pDynamicJTree;
    pDynamicJTree = C1_5SliceJtreeInfEngine::Create(pDBN);
    pDynamicJTree->DefineProcedure(ptFixLagSmoothing, lag);

    CEvidence *myEvidenceForUnrolledDBN;
    pEvidencesVector myEvidencesForDBN;
    CreateEvidencesArHMM(pDBN, nTimeSlice, &myEvidencesForDBN);
    intVector queryForDBN, queryForDBNPrior;
    intVecVector queryForUnrollBnet;
    DefineQueryArHMM(pDBN, nTimeSlice,
	&queryForDBNPrior,	&queryForDBN, &queryForUnrollBnet);

    int slice;
    for(slice = 0; slice < nTimeSlice; slice++)
    {

	//////////////////////////////////////////////////////////////////////////////

	/////////////////////////////////////////////////////////////////////////////
	//Create inference (smoothing) for DBN
	pDynamicJTree->EnterEvidence(&(myEvidencesForDBN[slice]), 1);


	if(slice >= lag)
	{
	    pDynamicJTree->FixLagSmoothing(slice);
	    CBNet *	pUnrolledOnISlice =
		static_cast<CBNet *>(pDBN->UnrollDynamicModel(slice+1));
	    myEvidenceForUnrolledDBN =
		CreateEvidenceForUnrolledArHMM(pUnrolledOnISlice , slice+1, myEvidencesForDBN);
	    CJtreeInfEngine *pUnrolJTree =
		CJtreeInfEngine::Create(pUnrolledOnISlice);


	    pUnrolJTree->EnterEvidence(myEvidenceForUnrolledDBN);

	    pUnrolJTree->MarginalNodes(&(queryForUnrollBnet[slice-lag]).front(),
		(queryForUnrollBnet[slice-lag]).size());


	    if(slice-lag)
	    {
		pDynamicJTree->MarginalNodes(&queryForDBN.front(),
		    queryForDBN.size());
	    }
	    else
	    {
		pDynamicJTree->MarginalNodes(&queryForDBNPrior.front(),
		    queryForDBNPrior.size());
	    }
	    const CPotential* qJPD1 = pUnrolJTree->GetQueryJPD();
	    const CPotential* qJPD2 = pDynamicJTree->GetQueryJPD();

	    result =  CompareArHMM( qJPD1, qJPD2, eps);
	    if(!result)
	    {
		itogResult = FALSE;
	    }

	    delete myEvidenceForUnrolledDBN;
	    //CJtreeInfEngine::Release(&pUnrolJTree);
            delete pUnrolJTree;
	    delete (pUnrolledOnISlice);
	   

	}
    }

    for( slice = 0; slice < (myEvidencesForDBN).size(); slice++ )
    {
	delete myEvidencesForDBN[slice];
    }
    delete pDynamicJTree;
    delete pUnrolledDBN;

    return itogResult;
}