void CGibbsSamplingInfEngine:: Sampling( int statTime, int endTime ) { intVector ndsForSampling; GetNdsForSampling( &ndsForSampling ); boolVector sampleIsNeed; GetSamplingNdsFlags( &sampleIsNeed ); int numNdsForSampling = ndsForSampling.size(); pEvidencesVector currentEvidences; GetCurrentEvidences( ¤tEvidences ); CEvidence * pCurrentEvidence; int t; int i; for( t = statTime; t < endTime; t++ ) { int series; for( series = 0; series < GetNumStreams(); series++ ) { pCurrentEvidence = currentEvidences[series]; for( i = 0; i < numNdsForSampling; i++ ) { if( sampleIsNeed[i] ) { pCurrentEvidence->ToggleNodeStateBySerialNumber(1, &i); bool canBeSample = ConvertingFamilyToPot( ndsForSampling[i], pCurrentEvidence ); if(canBeSample) { GetPotToSampling(ndsForSampling[i])->GenerateSample( pCurrentEvidence, m_bMaximize ); } else { pCurrentEvidence->ToggleNodeStateBySerialNumber(1, &i); } } } } if( t > GetBurnIn()) { pFactorVector queryFactors; GetQueryFactors( &queryFactors ); int i; for( i = 0; i < queryFactors.size(); i++ ) { queryFactors[i]->UpdateStatisticsML( &(GetCurrentEvidences()->front()), GetNumStreams() ); } } } }
void CPersistEvidence::TraverseSubobject(CPNLBase *pObj, CContext *pContext) { CPersistNodeValues::TraverseSubobject(pObj, pContext); CEvidence *pEvidence = static_cast<CEvidence*>(pObj); intVector *pV = new intVector; pV->resize(pEvidence->GetNumberObsNodes()); memcpy((void*)&pV->front(), pEvidence->GetAllObsNodes(), pEvidence->GetNumberObsNodes()*sizeof(pV->front())); pContext->Put(const_cast<CModelDomain*>(pEvidence->GetModelDomain()), "ModelDomain"); pContext->Put(new CCoverDel<intVector>(pV), "ObservedNodes", true); }
int testEvidence() { int ret = TRS_OK; int nnodes = 0; int nObsNodes = 0; int i,j; while(nnodes <= 0) { trsiRead( &nnodes, "10", "Number of nodes in Model" ); } while((nObsNodes <= 0)||(nObsNodes>nnodes)) { trsiRead( &nObsNodes, "2", "Number of Observed nodes from all nodes in model"); } int seed1 = pnlTestRandSeed(); /*create string to display the value*/ char value[42]; sprintf(value, "%i", seed1); trsiRead(&seed1, value, "Seed for srand to define NodeTypes etc."); trsWrite(TW_CON|TW_RUN|TW_DEBUG|TW_LST, "seed for rand = %d\n", seed1); CNodeType *modelNodeType = new CNodeType[2]; modelNodeType[0] = CNodeType( 1, 4 ); modelNodeType[1] = CNodeType( 0, 3 ); int *NodeAssociat=new int [nnodes+1]; for(i=0; i<(nnodes+1)/2; i++) { NodeAssociat[2*i]=0; NodeAssociat[2*i+1]=1; } //create random graph - number of nodes for every node is rand too int lowBorder = nnodes - 1; int upperBorder = int((nnodes * (nnodes - 1)) / 2); int numEdges = rand()%(upperBorder - lowBorder)+lowBorder; CGraph* theGraph = tCreateRandomDAG( nnodes, numEdges, 1 ); CBNet *grModel = CBNet::Create(nnodes, 2, modelNodeType, NodeAssociat,theGraph); int *obsNodes = (int*)trsGuardcAlloc(nObsNodes, sizeof(int)); srand ((unsigned int)seed1); intVector residuaryNodes; for (i=0; i<nnodes; i++) { residuaryNodes.push_back(i); } int num = 0; valueVector Values; Values.reserve(3*nnodes); Value val; for (i = 0; i<nObsNodes; i++) { num = rand()%(nnodes-i); obsNodes[i] = residuaryNodes[num]; residuaryNodes.erase(residuaryNodes.begin()+num); CNodeType nt = modelNodeType[NodeAssociat[obsNodes[i]]]; if(nt.IsDiscrete()) { val.SetInt(1); Values.push_back(val); } else { val.SetFlt(1.0f); Values.push_back(val); val.SetFlt(2.0f); Values.push_back(val); val.SetFlt(3.0f); Values.push_back(val); } } residuaryNodes.clear(); CEvidence *pMyEvid = CEvidence::Create(grModel, nObsNodes, obsNodes, Values) ; int nObsNodesFromEv = pMyEvid->GetNumberObsNodes(); const int *pObsNodesNow = pMyEvid->GetObsNodesFlags(); // const int *myOffset = pMyEvid->GetOffset(); const int *myNumAllObsNodes = pMyEvid->GetAllObsNodes(); valueVector ev; pMyEvid->GetRawData(&ev); const Value* vall = pMyEvid->GetValue(obsNodes[0]); if( NodeAssociat[obsNodes[0]] == 0 ) { if( (vall)[0].GetInt() != 1 ) { ret = TRS_FAIL; } } else { for( j=0; j<3; j++) { if( (vall)[j].GetFlt() != (j+1)*1.0f ) { ret = TRS_FAIL; break; } } } if(nObsNodesFromEv == nObsNodes) { intVector numbersOfReallyObsNodes; int numReallyObsNodes=0; for ( i=0; i<nObsNodesFromEv; i++) { if (pObsNodesNow[i]) { numbersOfReallyObsNodes.push_back(myNumAllObsNodes[i]); numReallyObsNodes++; } } #if 0 const CNodeType ** AllNodeTypesFromModel= new const CNodeType*[nnodes]; for (i=0; i<nnodes; i++) { AllNodeTypesFromModel[i] = grModel->GetNodeType(i); } for (i=0; i<nObsNodesFromEv; i++) { //Test the values which are keep in Evidence CNodeType nt = *AllNodeTypesFromModel[myNumAllObsNodes[i]]; int IsDiscreteNode = nt.IsDiscrete(); if(IsDiscreteNode) { int valFromEv = (ev[myOffset[i]].GetInt()); if(!(Values[i].GetInt() == valFromEv)) { ret=TRS_FAIL; break; } } else { ; for (j=0; j<3; j++) { if(!((ev[myOffset[i]+j]).GetFlt() == Values[i+j].GetFlt())) { ret=TRS_FAIL; break; } } } } delete []AllNodeTypesFromModel; #endif } else { ret = TRS_FAIL; } //Toggle some Node int someNumber = (int)(rand()*nObsNodesFromEv/RAND_MAX); int *someOfNodes = new int[someNumber]; intVector residuaryNums = intVector(myNumAllObsNodes, myNumAllObsNodes+nObsNodesFromEv); num=0; for(i=0; i<someNumber;i++) { num = (int)(rand()%(nObsNodes-i)); someOfNodes[i] = residuaryNums[num]; residuaryNums.erase(residuaryNums.begin()+num); } residuaryNums.clear(); pMyEvid->ToggleNodeState(someNumber, someOfNodes); const int *pObsNodesAfterToggle = pMyEvid->GetObsNodesFlags(); for (i=0; i<nObsNodesFromEv; i++) { //Test the ToggleNode method... if(pObsNodesAfterToggle[i]) { for(j=0; j<someNumber;j++) { if(myNumAllObsNodes[i]==someOfNodes[j]) { ret=TRS_FAIL; break; } } } } delete grModel; delete pMyEvid; delete []modelNodeType; delete []NodeAssociat; delete []someOfNodes; int obsNodes_memory_flag = trsGuardCheck( obsNodes ); if( obsNodes_memory_flag) { return trsResult( TRS_FAIL, "Dirty memory"); } trsGuardFree( obsNodes ); return trsResult( ret, ret == TRS_OK ? "No errors" : "Bad test on Values"); }
void Infer_Process(const CBNet* pBnet) { //create simple evidence for node 0 from BNet CEvidence* pEvidForWS = CreateEvidenceForBNet(pBnet); //create Naive inference for BNet CNaiveInfEngine* pNaiveInf = CNaiveInfEngine::Create( pBnet ); //enter evidence created before pNaiveInf->EnterEvidence( pEvidForWS ); //set the query node int numQueryNds = 1;//*<- int queryNds[] = { 3 };//*<- //get a marginal for query set of nodes pNaiveInf->MarginalNodes( queryNds, numQueryNds ); const CPotential* pMarg = pNaiveInf->GetQueryJPD(); //display the evidence node and such velue of BNet intVector obsNds; pConstValueVector obsVls; pEvidForWS->GetObsNodesWithValues(&obsNds, &obsVls); int i; for( i = 0; i < obsNds.size(); i++ ) { std::cout<<" observed value for node "<<obsNds[i]; std::cout<<" is "<<obsVls[i]->GetInt()<<std::endl; } //display the query node and such velue of BNet int nnodes; const int* domain; pMarg->GetDomain( &nnodes, &domain ); std::cout<<" inference results: \n"; std::cout<<" probability distribution for nodes [ "; for( i = 0; i < nnodes; i++ ) { std::cout<<domain[i]<<" "; } std::cout<<"]"<<std::endl; CMatrix<float>* pMat = pMarg->GetMatrix(matTable); // graphical model hase been created using dense matrix // so, the marginal is also dense EMatrixClass type = pMat->GetMatrixClass(); if( ! ( type == mcDense || type == mcNumericDense || type == mc2DNumericDense ) ) { assert(0); } int nEl; const float* data; static_cast<CNumericDenseMatrix<float>*>(pMat)->GetRawData(&nEl, &data); for( i = 0; i < nEl; i++ ) { std::cout<<" "<<data[i]; } std::cout<<std::endl; delete pEvidForWS; delete pNaiveInf; }
void CLWSamplingInfEngine:: MarginalNodes( const int *queryIn, int querySz, int notExpandJPD) { PNL_CHECK_IS_NULL_POINTER(queryIn); if( m_bNormalized == false ) NormalizeWeight(); int i, j, k; int offset; int nsamples = m_particleCount; const CBNet *pBNet = static_cast<const CBNet *>( m_pGraphicalModel ); int type = 0; int totalnodesizes = 0; intVector nodesize(querySz); intVector mulnodesize(querySz, 1); for( i = querySz; --i >=0; ) { const CNodeType* pNodeType = pBNet->GetNodeType(queryIn[i]); nodesize[i] = pNodeType->GetNodeSize(); if(i == querySz-1) mulnodesize[i] = 1; else mulnodesize[i] *= nodesize[i+1]; if(pNodeType->IsDiscrete()) { type++; if(totalnodesizes == 0) totalnodesizes = nodesize[i]; else totalnodesizes = totalnodesizes * nodesize[i]; } else { totalnodesizes = totalnodesizes + nodesize[i]; } } if(type == querySz) { //all query nodes are discrete float *tab = new float[totalnodesizes]; for(i = 0; i < totalnodesizes; i++) tab[i] = 0; for( i = 0; i < nsamples; i++) { CEvidence* pEvidence = m_currentEvVec[i]; int index = 0; for(j = 0; j < querySz; j++) { Value* pValue = pEvidence->GetValue(queryIn[j]); index += pValue->GetInt() * mulnodesize[j]; } tab[index] += m_particleWeight[i]; } m_pQueryJPD = CTabularPotential::Create(queryIn, querySz, pBNet->GetModelDomain () ); m_pQueryJPD->AllocMatrix( tab, matTable ); delete []tab; } else if(type == 0) { //all query nodes are gaussian float* val = new float[totalnodesizes]; float* mean = new float[totalnodesizes]; float* cov = new float[totalnodesizes * totalnodesizes]; for( i = 0; i < totalnodesizes; i++) mean[i] = 0; for( i = 0; i < totalnodesizes * totalnodesizes; i++) cov[i] = 0; // mean for( i = 0; i < nsamples; i++) { CEvidence* pEvidence = m_currentEvVec[i]; for(j = 0, offset = 0; j < querySz; j++) { Value* pValue = pEvidence->GetValue(queryIn[j]); for(k = 0; k < nodesize[j]; k++) { mean[offset] += m_particleWeight[i] * pValue[k].GetFlt(); offset++; } } } // covariance for( i = 0; i < nsamples; i++) { CEvidence* pEvidence = m_currentEvVec[i]; for(j = 0, offset = 0; j < querySz; j++) { Value* pValue = pEvidence->GetValue(queryIn[j]); for(k = 0; k < nodesize[j]; k++) { val[offset] = pValue[k].GetFlt(); offset++; } } for(j = 0; j < totalnodesizes; j++) { for(k = j; k < totalnodesizes; k++) { cov[k*totalnodesizes+j] += ( m_particleWeight[i] * ( val[j]- mean[j]) * (val[k] - mean[k]) ); cov[j*totalnodesizes+k] = cov[k*totalnodesizes+j]; } } } m_pQueryJPD = CGaussianPotential::Create( queryIn, querySz, pBNet->GetModelDomain () ); m_pQueryJPD->AllocMatrix( mean, matMean ); m_pQueryJPD->AllocMatrix( cov, matCovariance); delete []val; delete []mean; delete []cov; } //Get MPE delete m_pEvidenceMPE; m_pEvidenceMPE = NULL; m_pEvidenceMPE = m_pQueryJPD->GetMPE(); }
//----------------------------------------------------------------------------- CPotential *CSoftMaxCPD::ConvertToTabularPotential(const CEvidence* pEvidence) const { //searching discrite nodes in domain intVector discriteNodesPosInDom; int domSize = m_Domain.size(); int numSoftMaxNode; int discrDomSize = 0; int *parentIndexes; int SoftMaxSize; const pConstNodeTypeVector* ntVec = GetDistribFun()->GetNodeTypesVector(); int i; for (i = 0; i < domSize; i++) { if ((*ntVec)[i]->IsDiscrete()) { discriteNodesPosInDom.push_back(m_Domain[i]); SoftMaxSize = (*ntVec)[i]->GetNodeSize(); numSoftMaxNode = i; } }; discrDomSize = discriteNodesPosInDom.size(); //fill parents indexes vector parentIndexes = new int[discrDomSize-1]; for( i = 0; i < discrDomSize-1; i++ ) { parentIndexes[i] = discriteNodesPosInDom[i]; } // creating new evidece that contain all observed nodes in this domain intVector pObsNodes; pConstValueVector pObsValues; pConstNodeTypeVector pNodeTypes; pEvidence->GetObsNodesWithValues(&pObsNodes,&pObsValues,&pNodeTypes); int *obsNodes; int obsNodesSize; obsNodesSize=pObsNodes.size(); obsNodes = new int[obsNodesSize]; for(i = 0;i < obsNodesSize; i++) { obsNodes[i] = pObsNodes[i]; } CEvidence *pCopyEvidence; valueVector cpyValVect(0); for(i = 0; i < obsNodesSize; i++) { cpyValVect.push_back(*(pObsValues[i])); } pCopyEvidence = CEvidence::Create(pEvidence->GetModelDomain(), obsNodesSize, obsNodes,(const valueVector&)cpyValVect); for(i = 0; i < pObsNodes.size(); i++) { if((std::find(m_Domain.begin(),m_Domain.end(), pObsNodes[i])) == m_Domain.end()) { pCopyEvidence->MakeNodeHidden(pObsNodes[i]); } }; //creating tabular potential CTabularPotential *resFactor = CTabularPotential::Create( GetModelDomain(),discriteNodesPosInDom); if( m_DistributionType == dtSoftMax) { resFactor->AttachMatrix(((CSoftMaxDistribFun*)m_CorrespDistribFun)-> GetProbMatrix(pCopyEvidence),matTable); } else { if(m_DistributionType == dtCondSoftMax) { resFactor->AttachMatrix(((CCondSoftMaxDistribFun*)m_CorrespDistribFun)-> GetProbMatrix(pCopyEvidence),matTable); } else { PNL_THROW( CInconsistentType, "distribution must be SoftMax or conditional SoftMax" ) } } delete [] parentIndexes; delete [] obsNodes; delete pCopyEvidence; return resFactor; }