int main() { CBNet *pBNetForArHMM = pnlExCreateRndArHMM(); CDBN *pArHMM = CDBN::Create( pBNetForArHMM ); //Create an inference engine C1_5SliceJtreeInfEngine* pInfEng; pInfEng = C1_5SliceJtreeInfEngine::Create(pArHMM); //Number of time slices for unrolling int nTimeSlices = 5; const CPotential* pQueryJPD; //Crate evidence for every slice CEvidence** pEvidences; pEvidences = new CEvidence*[nTimeSlices]; //Let node 1 is always observed const int obsNodesNums[] = { 1 }; valueVector obsNodesVals(1); int i; for( i = 0; i < nTimeSlices; i++ ) { // Generate random value obsNodesVals[0].SetInt(rand()%2); pEvidences[i] = CEvidence::Create( pArHMM, 1, obsNodesNums, obsNodesVals ); } // Create smoothing procedure pInfEng->DefineProcedure(ptSmoothing, nTimeSlices); // Enter created evidences pInfEng->EnterEvidence(pEvidences, nTimeSlices); // Start smoothing process pInfEng->Smoothing(); // Choose query set of nodes for every slice int queryPrior[] = { 0 }; int queryPriorSize = 1; int query[] = { 0, 2 }; int querySize = 2; std::cout << " Results of smoothing " << std::endl; int slice = 0; pInfEng->MarginalNodes( queryPrior, queryPriorSize, slice ); pQueryJPD = pInfEng->GetQueryJPD(); ShowResultsForInference(pQueryJPD, slice); //pQueryJPD->Dump(); std::cout << std::endl; for( slice = 1; slice < nTimeSlices; slice++ ) { pInfEng->MarginalNodes( query, querySize, slice ); pQueryJPD = pInfEng->GetQueryJPD(); ShowResultsForInference(pQueryJPD, slice); //pQueryJPD->Dump(); } slice = 0; //Create filtering procedure pInfEng->DefineProcedure( ptFiltering ); pInfEng->EnterEvidence( &(pEvidences[slice]), 1 ); pInfEng->Filtering( slice ); pInfEng->MarginalNodes( queryPrior, queryPriorSize ); pQueryJPD = pInfEng->GetQueryJPD(); std::cout << " Results of filtering " << std::endl; ShowResultsForInference(pQueryJPD, slice); //pQueryJPD->Dump(); for( slice = 1; slice < nTimeSlices; slice++ ) { pInfEng->EnterEvidence( &(pEvidences[slice]), 1 ); pInfEng->Filtering( slice ); pInfEng->MarginalNodes( query, querySize ); pQueryJPD = pInfEng->GetQueryJPD(); ShowResultsForInference(pQueryJPD, slice); //pQueryJPD->Dump(); } //Create fixed-lag smoothing (online) int lag = 2; pInfEng->DefineProcedure( ptFixLagSmoothing, lag ); for (slice = 0; slice < lag + 1; slice++) { pInfEng->EnterEvidence( &(pEvidences[slice]), 1 ); } std::cout << " Results of fixed-lag smoothing " << std::endl; pInfEng->FixLagSmoothing( slice ); pInfEng->MarginalNodes( queryPrior, queryPriorSize ); pQueryJPD = pInfEng->GetQueryJPD(); ShowResultsForInference(pQueryJPD, slice); //pQueryJPD->Dump(); std::cout << std::endl; for( ; slice < nTimeSlices; slice++ ) { pInfEng->EnterEvidence( &(pEvidences[slice]), 1 ); pInfEng->FixLagSmoothing( slice ); pInfEng->MarginalNodes( query, querySize ); pQueryJPD = pInfEng->GetQueryJPD(); ShowResultsForInference(pQueryJPD, slice); //pQueryJPD->Dump(); } delete pInfEng; for( slice = 0; slice < nTimeSlices; slice++) { delete pEvidences[slice]; } //Create learning procedure for DBN pEvidencesVecVector evidencesOut; const int nTimeSeries = 500; printf("nTimeSlices: %d\n", nTimeSlices); intVector nSlices(nTimeSeries); printf("nSlices_len: %d\n", nSlices.size()); //define number of slices in the every time series pnlRand(nTimeSeries, &nSlices.front(), 3, 20); printf("nSlices_len: %d\n", nSlices.size()); for(int i=0; i<10; i++){ printf("%d ", nSlices[i]); } printf("]\n"); printf("es_len: %d\n", nSlices.size()); // Generate evidences in a random way pArHMM->GenerateSamples( &evidencesOut, nSlices); printf("v1: %d\n", evidencesOut.size()); printf("v2: %d\n", evidencesOut[0].size()); // Create DBN for learning CDBN *pDBN = CDBN::Create(pnlExCreateRndArHMM()); // Create learning engine CEMLearningEngineDBN *pLearn = CEMLearningEngineDBN::Create( pDBN ); // Set data for learning pLearn->SetData( evidencesOut ); // Start learning try { pLearn->Learn(); } catch(CAlgorithmicException except) { std::cout << except.GetMessage() << std::endl; } std::cout<<"Leraning procedure"<<std::endl; const CCPD *pCPD1, *pCPD2; for( i = 0; i < 4; i++ ) { std::cout<<" initial model"<<std::endl; pCPD1 = static_cast<const CCPD*>( pArHMM->GetFactor(i) ); ShowCPD( pCPD1 ); std::cout<<" model after learning"<<std::endl; pCPD2 = static_cast<const CCPD*>( pDBN->GetFactor(i) ); ShowCPD( pCPD2 ); } for( i = 0; i < evidencesOut.size(); i++ ) { int j; for( j = 0; j < evidencesOut[i].size(); j++ ) { delete evidencesOut[i][j]; } } delete pDBN; delete pArHMM; delete pLearn; return 0; }
int testBKInfUsingClusters() { int ret = TRS_OK; int seed = pnlTestRandSeed(); pnlSeed( seed ); std::cout<<"seed"<<seed<<std::endl; int nTimeSlices = -1; while(nTimeSlices <= 5) { trsiRead (&nTimeSlices, "10", "Number of slices"); } float eps = -1.0f; while( eps <= 0 ) { trssRead( &eps, "1.0e-1f", "accuracy in test"); } CDBN *pDBN = CDBN::Create( pnlExCreateBatNetwork() ); intVecVector clusters; intVector interfNds; pDBN->GetInterfaceNodes( &interfNds ); int numIntNds = interfNds.size(); int numOfClusters = pnlRand( 1, numIntNds ); clusters.resize(numOfClusters); int i; for( i = 0; i < numIntNds; i++ ) { ( clusters[pnlRand( 0, numOfClusters-1 )] ).push_back( interfNds[i] ); } intVecVector validClusters; validClusters.reserve(numOfClusters); for( i = 0; i < clusters.size(); i++ ) { if(! clusters[i].empty()) { validClusters.push_back(clusters[i]); } } CBKInfEngine *pBKInf; pBKInf = CBKInfEngine::Create( pDBN, validClusters ); C1_5SliceJtreeInfEngine *pJTreeInf; pJTreeInf = C1_5SliceJtreeInfEngine::Create( pDBN ); intVector nSlices( 1, nTimeSlices ); pEvidencesVecVector pEvid; pDBN->GenerateSamples( &pEvid, nSlices); int nnodesPerSlice = pDBN->GetNumberOfNodes(); intVector nodes(nnodesPerSlice, 0); for( i = 0; i < nnodesPerSlice; i++ ) { nodes[i] = i; } intVector ndsToToggle; for( i = 0; i < nTimeSlices; i++ ) { std::random_shuffle( nodes.begin(), nodes.end() ); ndsToToggle.resize( pnlRand(1, nnodesPerSlice) ); int j; for( j = 0; j < ndsToToggle.size(); j++ ) { ndsToToggle[j] = nodes[j]; } (pEvid[0])[i]->ToggleNodeState( ndsToToggle ); } pBKInf->DefineProcedure( ptSmoothing, nTimeSlices ); pBKInf->EnterEvidence( &(pEvid[0]).front(), nTimeSlices ); pBKInf->Smoothing(); pJTreeInf->DefineProcedure( ptSmoothing, nTimeSlices ); pJTreeInf->EnterEvidence( &pEvid[0].front(), nTimeSlices ); pJTreeInf->Smoothing(); int querySlice = pnlRand( 0, nTimeSlices - 1 ); int queryNode = pnlRand( 0, nnodesPerSlice - 1); queryNode += (querySlice ? nnodesPerSlice : 0); intVector query; pDBN->GetGraph()->GetParents( queryNode, &query ); query.push_back( queryNode ); std::random_shuffle( query.begin(), query.end() ); query.resize( pnlRand(1, query.size()) ); pBKInf->MarginalNodes(&query.front(), query.size(), querySlice); pJTreeInf->MarginalNodes(&query.front(), query.size(), querySlice); const CPotential *potBK = pBKInf->GetQueryJPD(); const CPotential *potJTree = pJTreeInf->GetQueryJPD(); if( !potBK->IsFactorsDistribFunEqual( potJTree , eps ) ) { std::cout<<"BK query JPD \n"; potBK->Dump(); std::cout<<"JTree query JPD \n"; potJTree->Dump(); ret = TRS_FAIL; } for( i = 0; i < nTimeSlices; i++ ) { delete (pEvid[0])[i]; } delete pBKInf; delete pJTreeInf; delete pDBN; return trsResult( ret, ret == TRS_OK ? "No errors" : "Bad test on BK Inference using clusters"); }
PNL_USING int main() { int nnodes = 16; int nodeslice = 8; CBNet* pKjaerulf = pnlExCreateKjaerulfsBNet(); CDBN* pKj = CDBN::Create(pKjaerulf); int nSeries = 50; int nslices = 101; int i; intVector nS(nSeries); for(i = 0; i < nSeries; i++) { nS[i] = nslices; } valueVector vValues; vValues.resize(nodeslice); intVector obsNodes(nodeslice); for( i=0; i<nodeslice; i++)obsNodes[i] = i; CEvidence ***pEv; pEv = new CEvidence **[nSeries]; int series, slice, node, val; FILE * fp; fp = fopen("../Data/kjaerulff.dat", "r"); if( !fp ) { std::cout<<"can't open cases file"<<std::endl; exit(1); } for( series = 0; series < nSeries; series++ ) { pEv[series] = new CEvidence*[ nslices ]; for( slice = 0; slice < nslices; slice++ ) { for( node = 0; node < nodeslice; node++) { fscanf(fp, "%d,", &val); vValues[node].SetFlt(val); } (pEv[series])[slice] = CEvidence::Create(pKj->GetModelDomain(), obsNodes, vValues ); } } fclose(fp); CGraph *pGraph = CGraph::Create(nnodes, NULL, NULL, NULL); for(i=0; i<nnodes-1; i++) { pGraph->AddEdge(i, i+1,1); } CNodeType *nodeTypes = new CNodeType[1]; nodeTypes[0].SetType(1, 2); int *nodeAssociation = new int[nnodes]; for ( i = 0; i < nnodes; i++ ) { nodeAssociation[i] = 0; } CBNet *pBnet = CBNet::Create( nnodes, 1, nodeTypes, nodeAssociation, pGraph ); pBnet -> AllocFactors(); floatVector data; data.assign(64, 0.0f); for ( node = 0; node < nnodes; node++ ) { pBnet->AllocFactor( node ); (pBnet->GetFactor( node )) ->AllocMatrix( &data.front(), matTable ); } CDBN* pDBN = CDBN::Create(pBnet); CMlDynamicStructLearn *pLearn = CMlDynamicStructLearn::Create(pDBN, itDBNStructLearnML, StructLearnHC, BIC, 4, 1, 30); pLearn -> SetData(nSeries, &nS.front(), pEv); // pLearn->SetLearnPriorSlice(true); // pLearn->SetMinProgress((float)1e-4); pLearn ->Learn(); const CDAG* pDAG = pLearn->GetResultDag(); pDAG->Dump(); //////////////////////////////////////////////////////////////////////////// delete pLearn; delete pDBN; delete pKj; for(series = 0; series < nSeries; series++ ) { for( slice = 0; slice < nslices; slice++ ) { delete (pEv[series])[slice]; } delete[] pEv[series]; } delete[] pEv; return 0; }