int testBKInfUsingClusters() { int ret = TRS_OK; int seed = pnlTestRandSeed(); pnlSeed( seed ); std::cout<<"seed"<<seed<<std::endl; int nTimeSlices = -1; while(nTimeSlices <= 5) { trsiRead (&nTimeSlices, "10", "Number of slices"); } float eps = -1.0f; while( eps <= 0 ) { trssRead( &eps, "1.0e-1f", "accuracy in test"); } CDBN *pDBN = CDBN::Create( pnlExCreateBatNetwork() ); intVecVector clusters; intVector interfNds; pDBN->GetInterfaceNodes( &interfNds ); int numIntNds = interfNds.size(); int numOfClusters = pnlRand( 1, numIntNds ); clusters.resize(numOfClusters); int i; for( i = 0; i < numIntNds; i++ ) { ( clusters[pnlRand( 0, numOfClusters-1 )] ).push_back( interfNds[i] ); } intVecVector validClusters; validClusters.reserve(numOfClusters); for( i = 0; i < clusters.size(); i++ ) { if(! clusters[i].empty()) { validClusters.push_back(clusters[i]); } } CBKInfEngine *pBKInf; pBKInf = CBKInfEngine::Create( pDBN, validClusters ); C1_5SliceJtreeInfEngine *pJTreeInf; pJTreeInf = C1_5SliceJtreeInfEngine::Create( pDBN ); intVector nSlices( 1, nTimeSlices ); pEvidencesVecVector pEvid; pDBN->GenerateSamples( &pEvid, nSlices); int nnodesPerSlice = pDBN->GetNumberOfNodes(); intVector nodes(nnodesPerSlice, 0); for( i = 0; i < nnodesPerSlice; i++ ) { nodes[i] = i; } intVector ndsToToggle; for( i = 0; i < nTimeSlices; i++ ) { std::random_shuffle( nodes.begin(), nodes.end() ); ndsToToggle.resize( pnlRand(1, nnodesPerSlice) ); int j; for( j = 0; j < ndsToToggle.size(); j++ ) { ndsToToggle[j] = nodes[j]; } (pEvid[0])[i]->ToggleNodeState( ndsToToggle ); } pBKInf->DefineProcedure( ptSmoothing, nTimeSlices ); pBKInf->EnterEvidence( &(pEvid[0]).front(), nTimeSlices ); pBKInf->Smoothing(); pJTreeInf->DefineProcedure( ptSmoothing, nTimeSlices ); pJTreeInf->EnterEvidence( &pEvid[0].front(), nTimeSlices ); pJTreeInf->Smoothing(); int querySlice = pnlRand( 0, nTimeSlices - 1 ); int queryNode = pnlRand( 0, nnodesPerSlice - 1); queryNode += (querySlice ? nnodesPerSlice : 0); intVector query; pDBN->GetGraph()->GetParents( queryNode, &query ); query.push_back( queryNode ); std::random_shuffle( query.begin(), query.end() ); query.resize( pnlRand(1, query.size()) ); pBKInf->MarginalNodes(&query.front(), query.size(), querySlice); pJTreeInf->MarginalNodes(&query.front(), query.size(), querySlice); const CPotential *potBK = pBKInf->GetQueryJPD(); const CPotential *potJTree = pJTreeInf->GetQueryJPD(); if( !potBK->IsFactorsDistribFunEqual( potJTree , eps ) ) { std::cout<<"BK query JPD \n"; potBK->Dump(); std::cout<<"JTree query JPD \n"; potJTree->Dump(); ret = TRS_FAIL; } for( i = 0; i < nTimeSlices; i++ ) { delete (pEvid[0])[i]; } delete pBKInf; delete pJTreeInf; delete pDBN; return trsResult( ret, ret == TRS_OK ? "No errors" : "Bad test on BK Inference using clusters"); }
int main() { CBNet *pBNetForArHMM = pnlExCreateRndArHMM(); CDBN *pArHMM = CDBN::Create( pBNetForArHMM ); //Create an inference engine C1_5SliceJtreeInfEngine* pInfEng; pInfEng = C1_5SliceJtreeInfEngine::Create(pArHMM); //Number of time slices for unrolling int nTimeSlices = 5; const CPotential* pQueryJPD; //Crate evidence for every slice CEvidence** pEvidences; pEvidences = new CEvidence*[nTimeSlices]; //Let node 1 is always observed const int obsNodesNums[] = { 1 }; valueVector obsNodesVals(1); int i; for( i = 0; i < nTimeSlices; i++ ) { // Generate random value obsNodesVals[0].SetInt(rand()%2); pEvidences[i] = CEvidence::Create( pArHMM, 1, obsNodesNums, obsNodesVals ); } // Create smoothing procedure pInfEng->DefineProcedure(ptSmoothing, nTimeSlices); // Enter created evidences pInfEng->EnterEvidence(pEvidences, nTimeSlices); // Start smoothing process pInfEng->Smoothing(); // Choose query set of nodes for every slice int queryPrior[] = { 0 }; int queryPriorSize = 1; int query[] = { 0, 2 }; int querySize = 2; std::cout << " Results of smoothing " << std::endl; int slice = 0; pInfEng->MarginalNodes( queryPrior, queryPriorSize, slice ); pQueryJPD = pInfEng->GetQueryJPD(); ShowResultsForInference(pQueryJPD, slice); //pQueryJPD->Dump(); std::cout << std::endl; for( slice = 1; slice < nTimeSlices; slice++ ) { pInfEng->MarginalNodes( query, querySize, slice ); pQueryJPD = pInfEng->GetQueryJPD(); ShowResultsForInference(pQueryJPD, slice); //pQueryJPD->Dump(); } slice = 0; //Create filtering procedure pInfEng->DefineProcedure( ptFiltering ); pInfEng->EnterEvidence( &(pEvidences[slice]), 1 ); pInfEng->Filtering( slice ); pInfEng->MarginalNodes( queryPrior, queryPriorSize ); pQueryJPD = pInfEng->GetQueryJPD(); std::cout << " Results of filtering " << std::endl; ShowResultsForInference(pQueryJPD, slice); //pQueryJPD->Dump(); for( slice = 1; slice < nTimeSlices; slice++ ) { pInfEng->EnterEvidence( &(pEvidences[slice]), 1 ); pInfEng->Filtering( slice ); pInfEng->MarginalNodes( query, querySize ); pQueryJPD = pInfEng->GetQueryJPD(); ShowResultsForInference(pQueryJPD, slice); //pQueryJPD->Dump(); } //Create fixed-lag smoothing (online) int lag = 2; pInfEng->DefineProcedure( ptFixLagSmoothing, lag ); for (slice = 0; slice < lag + 1; slice++) { pInfEng->EnterEvidence( &(pEvidences[slice]), 1 ); } std::cout << " Results of fixed-lag smoothing " << std::endl; pInfEng->FixLagSmoothing( slice ); pInfEng->MarginalNodes( queryPrior, queryPriorSize ); pQueryJPD = pInfEng->GetQueryJPD(); ShowResultsForInference(pQueryJPD, slice); //pQueryJPD->Dump(); std::cout << std::endl; for( ; slice < nTimeSlices; slice++ ) { pInfEng->EnterEvidence( &(pEvidences[slice]), 1 ); pInfEng->FixLagSmoothing( slice ); pInfEng->MarginalNodes( query, querySize ); pQueryJPD = pInfEng->GetQueryJPD(); ShowResultsForInference(pQueryJPD, slice); //pQueryJPD->Dump(); } delete pInfEng; for( slice = 0; slice < nTimeSlices; slice++) { delete pEvidences[slice]; } //Create learning procedure for DBN pEvidencesVecVector evidencesOut; const int nTimeSeries = 500; printf("nTimeSlices: %d\n", nTimeSlices); intVector nSlices(nTimeSeries); printf("nSlices_len: %d\n", nSlices.size()); //define number of slices in the every time series pnlRand(nTimeSeries, &nSlices.front(), 3, 20); printf("nSlices_len: %d\n", nSlices.size()); for(int i=0; i<10; i++){ printf("%d ", nSlices[i]); } printf("]\n"); printf("es_len: %d\n", nSlices.size()); // Generate evidences in a random way pArHMM->GenerateSamples( &evidencesOut, nSlices); printf("v1: %d\n", evidencesOut.size()); printf("v2: %d\n", evidencesOut[0].size()); // Create DBN for learning CDBN *pDBN = CDBN::Create(pnlExCreateRndArHMM()); // Create learning engine CEMLearningEngineDBN *pLearn = CEMLearningEngineDBN::Create( pDBN ); // Set data for learning pLearn->SetData( evidencesOut ); // Start learning try { pLearn->Learn(); } catch(CAlgorithmicException except) { std::cout << except.GetMessage() << std::endl; } std::cout<<"Leraning procedure"<<std::endl; const CCPD *pCPD1, *pCPD2; for( i = 0; i < 4; i++ ) { std::cout<<" initial model"<<std::endl; pCPD1 = static_cast<const CCPD*>( pArHMM->GetFactor(i) ); ShowCPD( pCPD1 ); std::cout<<" model after learning"<<std::endl; pCPD2 = static_cast<const CCPD*>( pDBN->GetFactor(i) ); ShowCPD( pCPD2 ); } for( i = 0; i < evidencesOut.size(); i++ ) { int j; for( j = 0; j < evidencesOut[i].size(); j++ ) { delete evidencesOut[i][j]; } } delete pDBN; delete pArHMM; delete pLearn; return 0; }