void CGaussianCPD::UpdateStatisticsEM( const CPotential *pMargPot,
		                     const CEvidence *pEvidence )
{
    if( !pMargPot )
    {
	PNL_THROW( CNULLPointer, "evidences" )//no corresp evidences
    }

    intVector obsPos;
    pMargPot->GetObsPositions(&obsPos);

    if( obsPos.size() && (this->GetDistribFun()->GetDistributionType() != dtCondGaussian) )
    {
	PNL_CHECK_IS_NULL_POINTER(pEvidence);
	CPotential *pExpandPot = pMargPot->ExpandObservedNodes(pEvidence, 0);
	m_CorrespDistribFun->UpdateStatisticsEM(pExpandPot->GetDistribFun(), pEvidence, 1.0f,
	    &m_Domain.front());
	delete pExpandPot;
    }
    else
    {
	m_CorrespDistribFun->UpdateStatisticsEM( pMargPot->GetDistribFun(), pEvidence, 1.0f,
	    &m_Domain.front() );
    }

}
Пример #2
0
//------------------------------------------------------------------------------
CPotential* CSoftMaxCPD::ConvertWithEvidenceToTabularPotential(
    const CEvidence* pEvidence, int flagSumOnMixtureNode ) const
{
    //need to convert to potential and after that add evidence
    CPotential* potWithoutEv = ConvertToTabularPotential(pEvidence);
    CPotential* potWithEvid = potWithoutEv->ShrinkObservedNodes(pEvidence);
    delete potWithoutEv;
    return potWithEvid;
}
void CJtreeInfEngine::
DivideJTreeNodePotByDistribFun( int clqPotNum, const int *domain,
			       const CDistribFun *pDistrFun )
{
    // bad-args check
    PNL_CHECK_RANGES( clqPotNum, 0, m_pJTree->GetNumberOfNodes() - 1 );
    PNL_CHECK_IS_NULL_POINTER(domain);
    PNL_CHECK_IS_NULL_POINTER(pDistrFun);
    // bad-args check end

    CPotential *pNodePot = m_pJTree->GetNodePotential(clqPotNum);

    int       nodePotDomSz;
    const int *nodePotDomain;

    pNodePot->GetDomain( &nodePotDomSz, &nodePotDomain );

    pNodePot->GetDistribFun()->DivideInSelfData( nodePotDomain, domain,
	pDistrFun );
}
Пример #4
0
void CSoftMaxCPD::GenerateSample(CEvidence* evidence, int maximize) const
{
  //need to check
  //is all parents observed
  int NNodes = m_Domain.size();
  bool isObserved = true;

  for (int node = 0; node < NNodes - 1; node++) 
    if (!evidence->IsNodeObserved(m_Domain[node]))
      isObserved = false;
     
  CPotential *pTabPot;

  if (!isObserved) {
		PNL_THROW(CAlgorithmicException, "all parents must be observed");
	}

  pTabPot = ConvertWithEvidenceToTabularPotential(evidence);

  pTabPot->GenerateSample(evidence);

  delete pTabPot;
}
float CJtreeInfEngine::GetLogLik() const
{
    if( m_norm == -1.0f )
    {
	PNL_THROW( CInvalidOperation, " can't call GetLogLik before calling inferences procedure collect" );
    }

    //////////////////////////////////////////////////////////////////////////
    float ll = 0.0f;

    intVector obsDomains;
    GetObservedDomains(m_pEvidence, &obsDomains);
    const CStaticGraphicalModel *pGrModel = GetModel();

    int i;
    for( i = 0; i < obsDomains.size(); i++ )
    {
	ll += pGrModel->GetFactor(obsDomains[i])->GetLogLik(m_pEvidence);
    }

    CPotential *pPot;
    int root = GetJTreeRootNode();
    pPot=m_pJTree->GetNodePotential(root);
    if( pPot->GetDistribFun()->GetDistributionType() == dtGaussian )
    {
	CGaussianDistribFun *pDistr = static_cast<CGaussianDistribFun *>(pPot->GetDistribFun());
	pDistr->UpdateMomentForm();
	float koeff = pDistr->GetCoefficient(0);
	ll += (float)log( koeff );
    }
    else
    {
	ll += m_norm < FLT_EPSILON ? (float)log( FLT_EPSILON ) : (float)log( m_norm );
    }

    return ll;
}
CPotential*
CGaussianCPD::ConvertWithEvidenceToPotential(const CEvidence* pEvidence,
					     int flagSumOnMixtureNode )const
{
    if( m_CorrespDistribFun->GetDistributionType() == dtGaussian )
    {
	    //need to convert to potential and after that add evidence
	    CPotential* potWithoutEv = ConvertToPotential();
	    CPotential* potWithEvid = potWithoutEv->ShrinkObservedNodes(pEvidence);
	    delete potWithoutEv;
	    return potWithEvid;
    }
    else //it means m_CorrespDistribFun->GetDistributionType == dtCondGaussian
    {
        //need to enter discrete & continuous evidence separetly
        //before it create node types - if all nodes discrete
        //or there are only nodes of size 0 from continuous -
        // - result distribution type is Tabular
        
        //collect information for enter discrete evidence & continuous
        int domSize = m_Domain.size();
        intVector obsDiscreteIndex;
        obsDiscreteIndex.reserve( domSize );
        //observed discrete values put into int vector
        intVector obsDiscrVals;
        obsDiscrVals.reserve( domSize );
        //collect information about Gaussian observed indices
        intVector obsGauIndex;
        obsGauIndex.reserve( domSize );
        //continuous observed values into vector of matrices
        pnlVector<C2DNumericDenseMatrix<float>*> obsGauVals;
        obsGauVals.reserve( domSize );
        //create matrix to store observed value of node
        C2DNumericDenseMatrix<float>* obsSelfVal = NULL;
        //create vectors for storage temporary objects
        int i;
        int isTab;
        for( i = 0; i < domSize; i++ )
        {
            int curNum = m_Domain[i];
            if( pEvidence->IsNodeObserved(curNum) )
            {
                const CNodeType* nt = GetModelDomain()->GetVariableType( curNum );
                isTab = nt->IsDiscrete();
                if( isTab )
                {
                    obsDiscreteIndex.push_back( i );
                    obsDiscrVals.push_back( pEvidence->GetValue(curNum)->GetInt() );
                }
                else
                {
                    int contSize = nt->GetNodeSize();
                    //create matrices to call Enter continuous evidence
                    floatVector val;
                    val.resize(contSize);
                    const Value* vFromEv = pEvidence->GetValue(curNum);
                    for( int j = 0; j < contSize; j++ )
                    {
                        val[j] = vFromEv[j].GetFlt();
                    }
                    intVector dims;
                    dims.assign( 2, 1 );
                    dims[0] = contSize;
                    if( i == domSize - 1 )
                    {
                        obsSelfVal = C2DNumericDenseMatrix<float>::Create(
                            &dims.front(), &val.front());
                    }
                    else
                    {
                        //store only parent indices
                        obsGauIndex.push_back( i );
                        C2DNumericDenseMatrix<float>* obsGauVal =
                            C2DNumericDenseMatrix<float>::Create(
                            &dims.front(), &val.front() );
                        obsGauVals.push_back( obsGauVal );
                    }
                }
            }
        } //        for( i = 0; i < domSize; i++ )

        CModelDomain* pMD = GetModelDomain();
        
        CPotential* resPot = NULL;
        int isLastNodeObs = pEvidence->IsNodeObserved(m_Domain[m_Domain.size()-1]); 
        if( (obsDiscreteIndex.size() + obsGauIndex.size() == m_Domain.size()-1) && isLastNodeObs)
        {
            //result distribution is scalar
            obsDiscreteIndex.insert(obsDiscreteIndex.end(), obsGauIndex.begin(),
                obsGauIndex.end());
            //child node is observed
            obsDiscreteIndex.insert(obsDiscreteIndex.end(), domSize-1);  
            resPot = CScalarPotential::Create( m_Domain, GetModelDomain(), obsDiscreteIndex );
        }

        else
        {
            const CNodeType* nt;
            //if all discrete nodes are observed then distribution will be Gaussian (Bader - comment)
            int allDiscrObs = 1;
            int allContObs = 1;
            int i;
            int isTab;
            int isCon;
            for( i = 0; i < domSize; i++ )
            {
                int curNum = m_Domain[i];
                nt = GetModelDomain()->GetVariableType( curNum );
                isTab = nt->IsDiscrete();
                isCon = !(isTab);
                if( isTab )
                    if( !(pEvidence->IsNodeObserved(curNum)) )
                        allDiscrObs = 0;
                    if( isCon )
                        if( !(pEvidence->IsNodeObserved(curNum)) )
                            allContObs = 0;
            }
            if (allContObs && (!allDiscrObs) )
            {
                CCondGaussianDistribFun* withDiscrEv =
                    (static_cast<CCondGaussianDistribFun*>(m_CorrespDistribFun))->
                    EnterDiscreteEvidence(obsDiscreteIndex.size(),
                    &obsDiscreteIndex.front(), &obsDiscrVals.front(),
                    pMD->GetObsTabVarType() );
                
                CTabularDistribFun* resDistr = withDiscrEv->
                    EnterFullContinuousEvidence( obsGauIndex.size(),
                    &obsGauIndex.front(), obsSelfVal, &obsGauVals.front(),
                    pMD->GetObsGauVarType() );

                //need to unite gaussian and tabular observed index
                obsDiscreteIndex.insert(obsDiscreteIndex.end(), obsGauIndex.begin(),
                    obsGauIndex.end());
                //child node is observed
                obsDiscreteIndex.insert(obsDiscreteIndex.end(), domSize-1);
                resPot = CTabularPotential::Create(
                    &m_Domain.front(), m_Domain.size(), GetModelDomain(), NULL,
                    obsDiscreteIndex );
                resPot->SetDistribFun( resDistr );
                delete withDiscrEv;
                delete resDistr;
            }
            else
            {
                if (allDiscrObs && !allContObs)
                {
                    intVector discParents;
                    
                    for ( i = 0; i < m_Domain.size(); i++)
                    {
                        nt = GetModelDomain()->GetVariableType( m_Domain[i] );
                        if (nt->IsDiscrete())
                            discParents.push_back(m_Domain[i]);
                    }
                    
                    int *parentComb = new int [discParents.size()];
                    
                    intVector pObsNodes;
                    pConstValueVector pObsValues;
                    pConstNodeTypeVector pNodeTypes;
                    pEvidence->GetObsNodesWithValues(&pObsNodes, &pObsValues, &pNodeTypes);
                    
                    int j;
                    int location;
                    for ( j = 0; j < discParents.size(); j++)
                    {
                        location = 
                            std::find(pObsNodes.begin(), pObsNodes.end(), discParents[j]) 
                            - pObsNodes.begin();
                        parentComb[j] = pObsValues[location]->GetInt();
                    }
                    
                    const CGaussianDistribFun* resDistr = 
                        static_cast<CCondGaussianDistribFun*>(m_CorrespDistribFun)->GetDistribution(parentComb);
                    
                    CDistribFun* newResDistr = resDistr->ConvertCPDDistribFunToPot();

                    obsGauIndex.insert(obsGauIndex.end(), obsDiscreteIndex.begin(),
                        obsDiscreteIndex.end());
                    intVector gauSubDomain;
                    for( j = 0; j < m_Domain.size(); j++)
                    {
                        nt = GetModelDomain()->GetVariableType( m_Domain[j] );
                        if(!(nt->IsDiscrete()))
                            gauSubDomain.push_back(m_Domain[j]);
                    }
                    resPot = CGaussianPotential::Create( &gauSubDomain.front(), 
                        gauSubDomain.size(), GetModelDomain());
                    resPot->SetDistribFun( newResDistr );
                    delete newResDistr;
                    delete [] parentComb;
                }
                else
                {
                   //can enter discrete evidence first if all continuous nodes observed
                   //need to check if all them observed!
                    CCondGaussianDistribFun* withDiscrEv =
                        (static_cast<CCondGaussianDistribFun*>(m_CorrespDistribFun))->
                        EnterDiscreteEvidence(obsDiscreteIndex.size(),
                        &obsDiscreteIndex.front(), &obsDiscrVals.front(),
                        pMD->GetObsTabVarType() );
                    //need to enter continuous evidence
                    CTabularDistribFun* resDistr = withDiscrEv->
                        EnterFullContinuousEvidence( obsGauIndex.size(),
                        &obsGauIndex.front(), obsSelfVal, &obsGauVals.front(),
                        pMD->GetObsGauVarType() );
                    //need to unite gaussian and tabular observed index
                    obsDiscreteIndex.insert(obsDiscreteIndex.end(), obsGauIndex.begin(),
                        obsGauIndex.end());
                    //child node is observed
                    obsDiscreteIndex.insert(obsDiscreteIndex.end(), domSize-1);
                    resPot = CTabularPotential::Create(
                        &m_Domain.front(), m_Domain.size(), GetModelDomain(), NULL,
                        obsDiscreteIndex );
                    resPot->SetDistribFun( resDistr );
                    delete withDiscrEv;
                    delete resDistr;
                }
            }
        }
        delete obsSelfVal;
        for( i = 0; i < obsGauVals.size(); i++ )
        {
            delete obsGauVals[i];
        }
        return resPot;
    }
}
bool CSamplingInfEngine::
ConvertingFamilyToPot( int node, const CEvidence* pEv )
{
    bool ret = false;
    CPotential* potToSample = m_potsToSampling[node];
    Normalization(potToSample);
    int i;
    if( !IsAllNdsTab() )
    {
	
	if( GetModel()->GetModelType() == mtBNet )
	{
	    
	    for( i = 0; i < m_environment[node].size(); i++ )
	    {            
		int num = m_environment[node][i];
		CPotential *pot1 = static_cast< CCPD* >( m_currentFactors[ num ] )
		    ->ConvertWithEvidenceToPotential(pEv);
		CPotential *pot2 = pot1->Marginalize(&node, 1);
		delete pot1;
		*potToSample *= *pot2;
		delete pot2;
	    }
	}
	
	else
	{
	    for( i = 0; i < m_environment[node].size(); i++ )
	    {
		int num = m_environment[node][i];
		CPotential *pot1 = static_cast< CPotential* >( m_currentFactors[ num ] )
		    ->ShrinkObservedNodes(pEv);
		CPotential *pot2 = pot1->Marginalize(&node, 1);
		delete pot1;
		*potToSample *= *pot2;
		delete pot2;
	    }
	}
	
    }
    else
    {
	
	CMatrix< float > *pMatToSample;
	pMatToSample = static_cast<CTabularDistribFun*>(potToSample->GetDistribFun())
	    ->GetMatrix(matTable);
	
	intVector dims;
	intVector vls;
	intVector domain;
	
	for( i = 0; i < m_environment[node].size(); i++ )
	{            
	    int num = m_environment[node][i];
	    m_currentFactors[ num ]->GetDomain(&domain);
	    GetObsDimsWithVls( domain, node, pEv, &dims, &vls); 
	    CMatrix< float > *pMat;
	    pMat = static_cast<CTabularDistribFun*>(m_currentFactors[ num ]->
		GetDistribFun())->GetMatrix(matTable);
	    pMat->ReduceOp( &dims.front(), dims.size(), 2, &vls.front(),
		pMatToSample, PNL_ACCUM_TYPE_MUL );
	    dims.clear();
	    vls.clear();
	    domain.clear();
	    
	}
    }
    
    //check for non zero elements
    CMatrix<float> *pMat;
    if( potToSample->GetDistributionType()==dtTabular )
    {	
	pMat = potToSample->GetDistribFun()->GetMatrix(matTable);
    }
    else
    {
	CGaussianDistribFun* pDistr = static_cast<CGaussianDistribFun*>(potToSample->GetDistribFun());
	if(pDistr->GetMomentFormFlag())
	{
	    pMat = pDistr->GetMatrix(matCovariance);
	}
	else
	{
	    pMat = pDistr->GetMatrix(matK);

	}
    }
    CMatrixIterator<float>* iter = pMat->InitIterator();
    for( iter; pMat->IsValueHere( iter ); pMat->Next(iter) )
    {
	
	if(*(pMat->Value( iter )) > FLT_EPSILON)
	{
	    ret = true;
	    break;
	}
    }
    delete iter;
    return ret;
}
Пример #8
0
int testSEGaussian()
{
    PNL_USING
   
    int i;
    int ret = TRS_OK;
    float eps = 1e-4f;
    //create Gaussian distribution (inside the potential) and try to multiply
    //is by Delta
    
    //create Model Domain
    int nSimNodes = 3;
    CNodeType simNT = CNodeType(0, 2);
    CModelDomain* pSimDomain = CModelDomain::Create( nSimNodes, simNT );
    //create 2 potentials
    intVector dom;
    dom.assign(3,0);
    dom[1] = 1;
    dom[2] = 2;
    floatVector mean;
    mean.assign(6, 0);
    floatVector cov;
    cov.assign(36, 0.1f);
    for( i = 0; i < 6; i++ )
    {
        mean[i] = 1.1f*i;
        cov[i*7] = 2.0f;
    }

	CGaussianPotential* pPot = CGaussianPotential::Create( dom, pSimDomain, 1,
        mean, cov, 1.0f );

    //create gaussian CPD with gaussian parent
    const pConstNodeTypeVector* pTypes = pPot->GetArgType();
    //create data weigth
    floatVector weights1;
    weights1.assign(4, 1.0f);
    floatVector weights2;
    weights2.assign(4, 2.0f);
    const float* weights[] = { &weights1.front(), &weights2.front()};
    CGaussianDistribFun* pGauCPD = CGaussianDistribFun::CreateInMomentForm( 0,
        3, &pTypes->front(), &mean.front(), &cov.front(), weights );

    pPot->Dump();
    pPot->Normalize();
    //try to get multiplied delta
    intVector pos;
    floatVector valuesDelta;
    intVector offsets;
    pPot->GetMultipliedDelta(&pos, &valuesDelta, &offsets);
    if( pos.size() != 0 )
    {
        ret = TRS_FAIL;
    }


    //enter evidence to the pot and after that expand and compare results
    //create evidence object
/*    valueVector obsVals;
    obsVals.assign(6,Value(0) );
    obsVals[0].SetFlt(1.0f);
    obsVals[1].SetFlt(1.5f);
    obsVals[2].SetFlt(2.0f);
    obsVals[3].SetFlt(2.5f);
    obsVals[4].SetFlt(3.0f);
    obsVals[5].SetFlt(3.5f);
    CEvidence* pEvid = CEvidence::Create( pSimDomain, 3, &dom.front(), obsVals );
    CPotential* pShrPot = pPot->ShrinkObservedNodes( pEvid );*/
    
    
    CGaussianPotential* pCanonicalPot = CGaussianPotential::Create( dom,
        pSimDomain, 0, mean, cov, 1.0f );
    CMatrix<float>* matrK = pCanonicalPot->GetMatrix(matK);
    intVector multIndex;
    multIndex.assign(2,5);
    matrK->SetElementByIndexes( 3.0f, &multIndex.front() );
    CMatrix<float>* matrH = pCanonicalPot->GetMatrix(matH);
    multIndex[0] = 0;
    matrH->SetElementByIndexes(0.0f, &multIndex.front());
    pPot->ConvertToSparse();
    pPot->ConvertToDense();
    //create other Gaussian potential for division
    i = 1;
    floatVector meanSmall;
    meanSmall.assign(2, 1.0f);
    floatVector covSmall;
    covSmall.assign(4, 0.0f);
    covSmall[0] = 1.0f;
    covSmall[3] = 1.0f;
    CGaussianPotential* pSmallPot = CGaussianPotential::Create( &i, 1,
        pSimDomain, 1, &meanSmall.front(), &covSmall.front(), 1.0f );
    //divide by distribution in moment form
    (*pCanonicalPot) /= (*pSmallPot);

    //create big unit function distribution and marginalize it
    CGaussianPotential* pBigUnitPot = 
        CGaussianPotential::CreateUnitFunctionDistribution(dom, pSimDomain, 1);
	CGaussianPotential* pCloneBigUniPot = static_cast<CGaussianPotential*>(
		pBigUnitPot->CloneWithSharedMatrices());
	if( !pCloneBigUniPot->IsFactorsDistribFunEqual(pBigUnitPot, eps) )
	{
		ret = TRS_FAIL;
	}
    CPotential* pMargUniPot = pBigUnitPot->Marginalize(&dom.front(), 1, 0);
    (*pBigUnitPot) /= (*pSmallPot);
    (*pBigUnitPot) *= (*pSmallPot);
    
    //check if there are some problems
    static_cast<CGaussianDistribFun*>(pBigUnitPot->GetDistribFun())->CheckCanonialFormValidity();

    if( pBigUnitPot->IsDistributionSpecific() != 1 )
    {
        ret = TRS_FAIL;
    }

    (*pBigUnitPot) /= (*pCanonicalPot);
    (*pBigUnitPot) *= (*pCanonicalPot);

    static_cast<CGaussianDistribFun*>(pBigUnitPot->GetDistribFun())->CheckCanonialFormValidity();

    if( pBigUnitPot->IsDistributionSpecific() != 1 )
    {
        ret = TRS_FAIL;
    }

    static_cast<CGaussianDistribFun*>(pSmallPot->GetDistribFun())->
        UpdateCanonicalForm();
    (*pPot) /= (*pSmallPot);
    pSmallPot->SetCoefficient(0.0f,1);
    pSmallPot->Dump();

    //create canonical potential without coefficient
    i = 0;

    CDistribFun* pCopyPotDistr = pPot->GetDistribFun()->ConvertCPDDistribFunToPot();
    CGaussianPotential* pCanSmallPot = CGaussianPotential::Create( &i, 1,
        pSimDomain, 0, &meanSmall.front(), &covSmall.front(), 1.0f );
    CGaussianPotential* pCanPotCopy = 
        static_cast<CGaussianPotential*>(pCanSmallPot->Clone());
    CGaussianPotential* pCanPotCopy1 = 
        static_cast<CGaussianPotential*>(pCanPotCopy->CloneWithSharedMatrices());

    (*pPot) /= (*pCanSmallPot);
    (*pPot) *= (*pCanSmallPot);
    //can compare results, if we want
    CDistribFun* pMultDivRes = pPot->GetDistribFun();
    float diff = 0;
    if( !pMultDivRes->IsEqual(pCopyPotDistr, eps,1, &diff) )
    {
        ret = TRS_FAIL;
        std::cout<<"the diff is "<<diff<<std::endl;
    }
    delete pCopyPotDistr;
    
    //create delta distribution
    floatVector deltaMean;
    deltaMean.assign( 2, 1.5f );
    i = 0;
    CGaussianPotential* pDeltaPot = CGaussianPotential::CreateDeltaFunction( 
        &i, 1, pSimDomain, &deltaMean.front(), 1 );
    
    CGaussianDistribFun* pDeltaDistr = static_cast<CGaussianDistribFun*>(
        pDeltaPot->GetDistribFun());
    pDeltaDistr->CheckMomentFormValidity();
    pDeltaDistr->CheckCanonialFormValidity();
    pDeltaPot->Dump();


    //multiply some potential by delta
    (*pCanSmallPot) *= (*pDeltaPot);

    (*pPot) *= (*pDeltaPot);
    //(*pPot) *= (*pDeltaPot);
    
    pPot->GetMultipliedDelta(&pos, &valuesDelta, &offsets);
    (*pCanonicalPot) *= (*pDeltaPot);
    //marginalize this distribFun multiplied by delta
    intVector margDims;
    margDims.assign(2,1);
    margDims[1] = 2;
    CPotential* pMargPot = pPot->Marginalize( &margDims.front(), 2 );
    i = 0;
    CPotential* pSmallMargPot = pPot->Marginalize(&i, 1);
    //marginalize in canonical form
    CPotential* pMargCanPot = pCanonicalPot->Marginalize( &margDims.front(), 2 );
    CPotential* pSmallCanPot = pCanonicalPot->Marginalize(&i,1);
    
    //create unit function distribution in canonical form
    i = 0;
    CGaussianPotential* pUnitPot = 
        CGaussianPotential::CreateUnitFunctionDistribution( &i, 1, pSimDomain,
        1);
    pUnitPot->Dump();
    CGaussianDistribFun* pUnitDistr = static_cast<CGaussianDistribFun*>(
        pUnitPot->GetDistribFun());
    pUnitDistr->CheckCanonialFormValidity();
    pUnitDistr->CheckMomentFormValidity();

    (*pPot) *= (*pUnitPot);

    if( pUnitPot->IsFactorsDistribFunEqual(pBigUnitPot, eps, 1) )
    {
        ret = TRS_FAIL;
    }

    deltaMean.resize(6);
    deltaMean[2] = 2.5f;
    deltaMean[3] = 2.5f;
    deltaMean[4] = 3.5f;
    deltaMean[5] = 3.5f;
    CGaussianPotential* pBigDeltaPot = CGaussianPotential::CreateDeltaFunction( 
        dom, pSimDomain, deltaMean, 1 );
    CGaussianPotential* pCloneBigDeltaPot = static_cast<CGaussianPotential*>(
		pBigDeltaPot->CloneWithSharedMatrices());
    //we can shrink observed nodes in this potential
    valueVector vals;
    vals.resize(2);
    vals[0].SetFlt(1.5f);
    vals[1].SetFlt(1.5f);
    i = 0;
    CEvidence* pDeltaEvid = CEvidence::Create( pSimDomain, 1, &i,vals ); 
    CGaussianPotential* pShrGauDeltaPot = static_cast<CGaussianPotential*>(
        pBigDeltaPot->ShrinkObservedNodes( pDeltaEvid ));
    delete pDeltaEvid;
    pShrGauDeltaPot->Dump();
    delete pShrGauDeltaPot;
    CPotential* pSmallDeltaMarg = pBigDeltaPot->Marginalize( &dom.front(), 1, 0 );
    pSmallDeltaMarg->Normalize();
    pDeltaPot->Normalize();
    if( !pSmallDeltaMarg->IsFactorsDistribFunEqual( pDeltaPot, eps, 1 ) )
    {
        ret = TRS_FAIL;
    }
    //call operator = for delta distributions
    (*pSmallDeltaMarg) = (*pDeltaPot);

    //call operator = for canonical and delta distribution
    (*pCanPotCopy) = (*pUnitPot);

    //call operator = for canonical and unit distribution
    (*pCanPotCopy1) = (*pDeltaPot);

    (*pUnitPot) = (*pUnitPot);
    

    (*pPot) *= (*pBigDeltaPot);

    (*pCloneBigDeltaPot) *= (*pDeltaPot);

	if( !pCloneBigDeltaPot->IsFactorsDistribFunEqual(pBigDeltaPot, eps) )
	{
		ret = TRS_FAIL;
	}

    //we can create the matrix which will be almost delta distribution,
    //but covariance matrix will contain almost zeros
    floatVector almostDeltaCov;
    almostDeltaCov.assign(4, 0.0f);
    almostDeltaCov[0] = 0.00001f;
    almostDeltaCov[3] = 0.00001f;
    i = 0;
    CGaussianPotential* pAlmostDeltaPot = CGaussianPotential::Create( &i, 1, 
        pSimDomain, 1, &deltaMean.front(), &almostDeltaCov.front(), 1.0f );
    if( !pDeltaPot->IsFactorsDistribFunEqual( pAlmostDeltaPot, eps, 0 ) )
    {
        ret = TRS_FAIL;
    }
    if( !pAlmostDeltaPot->IsFactorsDistribFunEqual( pDeltaPot, eps, 0 ) )
    {
        ret = TRS_FAIL;
    }
	(*pCloneBigUniPot ) = ( *pPot );
	if( !(pCloneBigUniPot->IsFactorsDistribFunEqual(pPot, eps)) )
	{
		ret = TRS_FAIL;
	}
	(*pCloneBigDeltaPot) = (*pPot);
	if( !(pCloneBigDeltaPot->IsFactorsDistribFunEqual(pPot, eps))  )
	{
		ret = TRS_FAIL;
	}

    delete pCanPotCopy;
    delete pCanPotCopy1;
    delete pAlmostDeltaPot;
    delete pMargUniPot;
	delete pCloneBigUniPot;
    delete pBigUnitPot;
    delete pCanSmallPot;
    delete pDeltaPot; 
    delete pUnitPot;
	delete pCloneBigDeltaPot;
    delete pBigDeltaPot;
    delete pMargCanPot;
    delete pSmallCanPot;
    delete pMargPot;
    delete pSmallMargPot;
    delete pCanonicalPot;
    //delete pShrPot;
    //delete pEvid;
	delete pGauCPD;
    delete pPot;
    delete pSimDomain;
    return trsResult( ret, ret == TRS_OK ? "No errors" : 
                        "Bad test on SEGaussian");

}
Пример #9
0
//-----------------------------------------------------------------------------
CPotential* CSoftMaxCPD::ConvertWithEvidenceToGaussianPotential(
    const CEvidence* pEvidence,
    floatVector MeanContParents, 
    C2DNumericDenseMatrix<float>* CovContParents,
    const int *parentIndices,
    int flagSumOnMixtureNode ) const
{
    int SoftMaxSize = GetSoftMaxSize();
    if (SoftMaxSize != 2)
    {
        PNL_THROW(CNotImplemented, "It is not sigmoid");
    }
    else
    {
        if (m_CorrespDistribFun->GetDistributionType() == dtSoftMax)
        {
            CPotential* pot = ConvertToGaussianPotential(pEvidence, 
                m_CorrespDistribFun, MeanContParents, CovContParents);

            CPotential *pot2 = NULL;

            int domSize = pot->GetDomainSize();
            bool IsAllContUnobserved = true;
            const pConstNodeTypeVector* ntVec = pot->GetDistribFun()->GetNodeTypesVector();
            for( int i = 0; i < domSize-1; i++  )    
            {
              intVector Domain;
              pot->GetDomain(&Domain);
              int curNode =  Domain[i];
              if( (pEvidence->IsNodeObserved(curNode)))
              {
                if( !(*ntVec)[i]->IsDiscrete() )
                {
                  IsAllContUnobserved = false;
                }
              }
            }

            if ((pot->GetDomainSize() >= 3)&&(!IsAllContUnobserved))
            {
              pot2 = pot->ShrinkObservedNodes(pEvidence);
            }
            else
            {
              intVector Domain;
              pot->GetDomain(&Domain);
              pot2 = pot->Marginalize(&(Domain[0]), 1);
            }
            delete pot;
            return pot2;
        }
        else //it means m_CorrespDistribFun->GetDistributionType == dtCondSoftMax
        {
            int i;
            const CSoftMaxDistribFun* dtSM;

            dtSM = 
                static_cast<CCondSoftMaxDistribFun*>(m_CorrespDistribFun)->
                GetDistribution(parentIndices);
            
            intVector pObsNodes;
            pConstValueVector pObsValues;
            pConstNodeTypeVector pNodeTypes;
            pEvidence->GetObsNodesWithValues(&pObsNodes, &pObsValues, &pNodeTypes);
            
            int r = -1;
            for (i = 0; i < pObsNodes.size(); i++)
            {
                if (m_Domain[m_Domain.size()-1] == pObsNodes[i])
                {
                    r = pObsValues[i]->GetInt();
                    break;
                }
            }
            if (r == -1)
            {
                PNL_THROW(CNotImplemented, "Not exist evidence");
            }
            
            CDistribFun *gauFactData = const_cast<CSoftMaxDistribFun*>(dtSM)->
                ConvertCPDDistribFunToPotential(MeanContParents, CovContParents, r);
            
            intVector gauSubDomain;
            const CNodeType *nt;
            for(i = 0; i < m_Domain.size(); i++)
            {
                nt = GetModelDomain()->GetVariableType( m_Domain[i] );
                if(!(nt->IsDiscrete()))
                {
                    gauSubDomain.push_back(m_Domain[i]);
                }
            }
            
            intVector obsIndex;
            for( i = 0; i < gauSubDomain.size(); i++ )
            {
                if( pEvidence->IsNodeObserved(gauSubDomain[i]) )
                {
                    obsIndex.push_back( i );
                }
            }
            
            CGaussianPotential *resFactor = CGaussianPotential::Create(&gauSubDomain.front(), 
                gauSubDomain.size(), GetModelDomain());
            
            resFactor->SetDistribFun( gauFactData );


            CPotential *pot = NULL;

            int domSize = resFactor->GetDomainSize();
            bool IsAllContUnobserved = true;
            const pConstNodeTypeVector* ntVec = resFactor->GetDistribFun()->GetNodeTypesVector();
            for( i = 0; i < domSize-1; i++  )    
            {
              intVector Domain;
              resFactor->GetDomain(&Domain);
              int curNode =  Domain[i];
              if( (pEvidence->IsNodeObserved(curNode)))
              {
                if( !(*ntVec)[i]->IsDiscrete() )
                {
                  IsAllContUnobserved = false;
                }
              }
            }
            if ((resFactor->GetDomainSize() >= 3)&&(!IsAllContUnobserved))
            {
              pot = resFactor->ShrinkObservedNodes(pEvidence);
            }
            else
            {
              intVector Domain;
              resFactor->GetDomain(&Domain);
              pot = resFactor->Marginalize(&(Domain[0]), 1);
            }
            delete resFactor;
            
            delete gauFactData;
            return pot;
            
        }
    }
}
int CJtreeInfEngine::GetDataForMargAndMult(const int source, const int sink,
					   pnl::CNumericDenseMatrix< float > **sorceMatrix, int **dims_to_keep,
					   int &num_dims_to_keep, pnl::CNumericDenseMatrix< float > **sepMatrix, 
					   pnl::CNumericDenseMatrix< float > **sinkMatrix, int **dims_to_mul,
					   int &num_dims_to_mul)
{
    // bad-args check
    PNL_CHECK_RANGES(source, 0, m_pJTree->GetNumberOfNodes() - 1);
    PNL_CHECK_RANGES(sink, 0, m_pJTree->GetNumberOfNodes() - 1);
    // bad-args check end

    if (source == sink)
    {
	PNL_THROW(CInvalidOperation, " source and sink should differ ");
    }
    if (!m_pJTree->GetGraph()->IsExistingEdge(source, sink))
    {
	PNL_THROW(CInvalidOperation, " there is no edge between source and sink");
    }

    CPotential *potSource = m_pJTree->GetNodePotential(source),
	*potSink   = m_pJTree->GetNodePotential(sink);

    int numNdsInSourceDom, numNdsInSinkDom;
    const int *sourceDom, *sinkDom;
    potSource->GetDomain(&numNdsInSourceDom, &sourceDom);
    potSink->GetDomain(&numNdsInSinkDom, &sinkDom);

    CPotential *potSep = m_pJTree->GetSeparatorPotential(source, sink);
    int numNdsInSepDom;
    const int *sepDom;
    potSep->GetDomain(&numNdsInSepDom, &sepDom);

    EDistributionType sepDistType = potSep->GetDistributionType();

    num_dims_to_keep = numNdsInSepDom;
    *dims_to_keep = new int [num_dims_to_keep];

    int* pEquivPos;
    for (int i = 0; i < numNdsInSepDom; i++)
    {
	pEquivPos = (int*)std::find(sourceDom, sourceDom + numNdsInSourceDom, sepDom[i]);
	if (pEquivPos != sourceDom + numNdsInSourceDom)
	{
	    (*dims_to_keep)[i] = (pEquivPos - sourceDom);
	}
	else 
	{
	    PNL_THROW( CInconsistentSize, "small domain isn't subset of domain")
		return 0;
	}
	//check that pSmallDom is m_Domain's subset
    }
    switch (sepDistType)
    {
    case dtTabular:
	{
	    CDistribFun *sepDistrFun = potSep -> GetDistribFun();
	    CDistribFun *sourceDistrFun = potSource -> GetDistribFun();
	    CDistribFun *sinkDistrFun = potSink -> GetDistribFun();
	    if (!sourceDistrFun->IsValid())
	    {
		PNL_THROW( CInconsistentType, "MarginalizeData is invalid" )
	    }

	    //check if distribution of potSource is Unit Function - do nothing
	    if(sourceDistrFun->IsDistributionSpecific())
	    {
		return 0;
	    }

	    if ( sepDistrFun->IsDistributionSpecific() )
	    {
		sepDistrFun->SetUnitValue(0);
	    }


	    *sorceMatrix = static_cast<CNumericDenseMatrix<float> *>(sourceDistrFun->
		GetMatrix(matTable));
	    *sepMatrix = static_cast<CNumericDenseMatrix<float> *>(sepDistrFun->
		GetMatrix(matTable));

	    EDistributionType dtsink = sinkDistrFun->GetDistributionType();
	    if ((dtsink != dtTabular) && (dtsink != dtScalar))
	    {
		PNL_THROW(CInvalidOperation, "we can multiply only tabulars")
	    }

	    int location;
	    num_dims_to_mul = numNdsInSepDom;
	    *dims_to_mul = new int [num_dims_to_mul];

	    for (int i = 0; i < numNdsInSepDom; i++)
	    {
		location = 
		    std::find(sinkDom, sinkDom + numNdsInSinkDom, sepDom[i]) - sinkDom;
		if (location < numNdsInSinkDom)
		{
		    (*dims_to_mul)[i] = location;
		}
	    }

	    if(sinkDistrFun->IsDistributionSpecific())
	    {
		sinkDistrFun->SetUnitValue(0);
		floatVector *aValue = 
		    (floatVector *)((CDenseMatrix<float>*)sinkDistrFun->
		    GetMatrix(matTable))->GetVector();
		aValue->assign(aValue->size(), 1.0f);
	    }

	    *sinkMatrix = static_cast<CNumericDenseMatrix<float>*>(sinkDistrFun->
		GetMatrix(matTable));

	    break;
	}
    case dtScalar:
	{
	    // propagate isn't need
	    return 0;
	}
    default:
	{
	    PNL_THROW(CNotImplemented, "we have only Tabular now");
	    return 0;
	}
    }

    if (numNdsInSepDom == 0)
    {
	PNL_THROW(COutOfRange, "domain size should be positive");
    }
    return 1;
}
void CJtreeInfEngine::
MarginalizeCliqueToQuery( int clqNum, int querySz, const int *query,
			 int notExpandJPD )
{
    // bad-args check
    PNL_CHECK_RANGES( clqNum, 0, m_pJTree->GetNumberOfNodes() - 1 );
    PNL_CHECK_RANGES( querySz, 1, m_pGraphicalModel->GetNumberOfNodes() );
    PNL_CHECK_IS_NULL_POINTER(query);
    // bad-args check end

    // Note: cant call expand() for potentials, which contain continuous
    // observed nodes in domain, cause those are to be expanded to 
    // mixture of gaussians, which we dont support right now.

    delete m_pQueryJPD;
    m_pQueryJPD = NULL;

    delete m_pPotMPE;
    m_pPotMPE = NULL;

    delete m_pEvidenceMPE;
    m_pEvidenceMPE = NULL;


    bool bExpandAllowed = true;

    CPotential* clqPotWithQuery = m_pJTree->GetNodePotential(clqNum);
    EDistributionType dtClqWithQuery = clqPotWithQuery->GetDistributionType();

    if( std::find_first_of( query, query + querySz,
	m_actuallyObsNodes.begin(),	m_actuallyObsNodes.end() )
	!= ( query + querySz ) )
    {
	const int *queryIt = query, *query_end = query + querySz;

	for( ; queryIt != query_end; ++queryIt )
	{
	    if( std::find( m_actuallyObsNodes.begin(),
		m_actuallyObsNodes.end(), *queryIt )
		!= m_actuallyObsNodes.end() )
	    {
		int shrNodebDiscrete = 
		    m_pGraphicalModel->GetNodeType(*queryIt)->IsDiscrete();
		if(((dtClqWithQuery == dtTabular)&&( !shrNodebDiscrete ))
		    ||(( dtClqWithQuery == dtGaussian )&&( shrNodebDiscrete )))
		{
		    bExpandAllowed = false;
		    break;
		}
	    }
	}
    }

    if( ( bExpandAllowed == false ) && ( notExpandJPD == false ) )
    {
	PNL_THROW( CAlgorithmicException,
	    " JPD expansion not possible technically " );
    }

    bExpandAllowed = notExpandJPD ? false : bExpandAllowed;

    CPotential *pMargJPot = clqPotWithQuery->Marginalize( query, querySz,
	m_bMaximize );

    if( bExpandAllowed )
    {
	CPotential *pExpObsJPot = pMargJPot->ExpandObservedNodes(m_pEvidence);

	if( m_bMaximize )
	{
	    if( pMargJPot->GetDistributionType() == dtScalar )
	    {
		m_pPotMPE = pExpObsJPot->GetNormalized();
		m_pEvidenceMPE = m_pPotMPE->GetMPE();
	    }
	    else
	    {
		m_pPotMPE      = pMargJPot->GetNormalized();
		m_pEvidenceMPE = m_pPotMPE->GetMPE();
	    }
	}
	else
	{
	    m_pQueryJPD = pExpObsJPot->GetNormalized();
	}

	delete pExpObsJPot;
    }
    else
    {
	if( m_bMaximize )
	{
	    m_pPotMPE      = pMargJPot->GetNormalized();
	    m_pEvidenceMPE = m_pPotMPE->GetMPE();
	}
	else
	{
	    m_pQueryJPD = pMargJPot->GetNormalized();
	}
    }
    if((!m_bMaximize)&&(m_pQueryJPD->GetDistributionType() == dtGaussian))
    {
	static_cast<CGaussianDistribFun*>(
	    m_pQueryJPD->GetDistribFun())->UpdateMomentForm();
    }
    if((m_bMaximize)&&(m_pPotMPE->GetDistributionType() == dtGaussian))
    {
	static_cast<CGaussianDistribFun*>(
	    m_pPotMPE->GetDistribFun())->UpdateMomentForm();
    }
    delete pMargJPot;
}
void CJtreeInfEngine::PropagateBetweenClqs(int source, int sink, bool isCollect)
{
    PNL_CHECK_RANGES( source, 0, m_pJTree->GetNumberOfNodes() - 1 );
    PNL_CHECK_RANGES( sink,   0, m_pJTree->GetNumberOfNodes() - 1 );

    if (source == sink)
    {
	PNL_THROW(CInvalidOperation, " source and sink should differ ");
    }

    if (!m_pJTree->GetGraph()->IsExistingEdge(source, sink))
    {
	PNL_THROW(CInvalidOperation,
	    " there is no edge between source and sink ");
    }

    bool isDense = true;
    if(!m_pJTree->GetNodeType(source)->IsDiscrete() || !m_pJTree->GetNodeType(sink)->IsDiscrete())
    {
	isDense = false;
    }
    CPotential *potSource = m_pJTree->GetNodePotential(source),
	*potSink   = m_pJTree->GetNodePotential(sink);

    if(potSource->IsSparse() || potSink->IsSparse())
    {
	isDense = false;
    }

    // check that nodes source and sink are discrete
    if(isDense && !m_bMaximize)
    {
	pnl::CNumericDenseMatrix< float > *sorceMatrix, *sepMatrix, *sinkMatrix;
	int *dims_to_keep, *dims_to_mul;
	int num_dims_to_keep, num_dims_to_mul;

	if (GetDataForMargAndMult(source, sink, &sorceMatrix, &dims_to_keep,
	    num_dims_to_keep, &sepMatrix, &sinkMatrix, &dims_to_mul, num_dims_to_mul))
	{
	    DoPropagate(sorceMatrix, dims_to_keep,
		num_dims_to_keep, sepMatrix, sinkMatrix, dims_to_mul, num_dims_to_mul, isCollect);
	    delete [] dims_to_keep;
	    delete [] dims_to_mul;
	}
	else
	{
	    CPotential *potSink = m_pJTree->GetNodePotential(sink);
	    potSink->Normalize();
	}
    }
    else
    {
	int       numNdsInSepDom;
	const int *sepDom;

	int       numNdsInSDom;
	const int *sDom;
	potSource->GetDomain( &numNdsInSDom, &sDom );
	CPotential *potSep    = m_pJTree->GetSeparatorPotential( source, sink );

	CPotential *newPotSep, *updateRatio;

	potSep->GetDomain( &numNdsInSepDom, &sepDom );

	newPotSep = potSource->Marginalize( sepDom, numNdsInSepDom, m_bMaximize );

	updateRatio = newPotSep->Divide(potSep);

	*potSink *= *updateRatio;

	potSink->Normalize();

	potSep->SetDistribFun(newPotSep->GetDistribFun());

	delete newPotSep;
	delete updateRatio;
    }
}
void CJtreeInfEngine::MarginalNodes( const int *query, int querySz, int notExpandJPD )
{
    // bad-args check
    PNL_CHECK_IS_NULL_POINTER(query);
    PNL_CHECK_RANGES( querySz, 1, m_pGraphicalModel->GetNumberOfNodes() );
    // bad-args check end
    /*
    // the following should be working differently for the case of doing the 
    // whole EnterEvidence procedure or just CollectEvidence for the root node
    if( ( m_lastOpDone != opsDistribute )
    && ( m_lastOpDone != opsMargNodes ) )
    {
    if( m_lastOpDone != opsCollect )
    {
    PNL_THROW( CInvalidOperation,
    " cannot perform marginalization, infEngine inconsistent " );
    }

    int       numOfClqsContQuery;
    const int *clqsContQuery;

    m_pOriginalJTree->GetClqNumsContainingSubset( querySz, query,
    &numOfClqsContQuery, &clqsContQuery );

    PNL_CHECK_FOR_ZERO(numOfClqsContQuery);

    if( std::find( clqsContQuery, clqsContQuery + numOfClqsContQuery,
    m_JTreeRootNode ) == clqsContQuery + numOfClqsContQuery )
    {
    PNL_THROW( CInvalidOperation,
    " cannot marginalize to the non-root-clq nodes set " );
    }

    //////// this is to debug
    for( int i = 0; i < numOfClqsContQuery; ++i )
    {
    CPotential *pJPot = m_pJTree->GetNodePotential(clqsContQuery[i])
    ->Marginalize( query, querySz );

    CPotential *pJPot1 = pJPot->GetNormalized();

    pJPot1->Dump();

    delete pJPot;
    delete pJPot1;
    }
    ///////////////////////////////////////////////////////

    MarginalizeCliqueToQuery( m_JTreeRootNode, querySz, query );

    m_lastOpDone = opsMargNodes;
    }
    else
    {
    */
    int numOfClqsContQuery;
    const int *clqsContQuery;

    m_pJTree->GetClqNumsContainingSubset( querySz, query,
	&numOfClqsContQuery, &clqsContQuery );

    if(numOfClqsContQuery)
    {
	if( std::find( clqsContQuery, clqsContQuery + numOfClqsContQuery,
	    m_JTreeRootNode ) != ( clqsContQuery + numOfClqsContQuery ) )
	{
	    MarginalizeCliqueToQuery( m_JTreeRootNode, querySz, query, notExpandJPD );
	}
	else
	{
	    MarginalizeCliqueToQuery( *clqsContQuery, querySz, query, notExpandJPD );
	}
    }
    else
    {
	const int* clqDomain;
	int   clqSize;
	CPotential *resPot = NULL;
	delete m_pQueryJPD;
	m_pQueryJPD = NULL;

	ShrinkJTreeCliques(querySz, const_cast<int*>(query));
	resPot = MergeCliques(querySz, const_cast<int*>(query));
	resPot->GetDomain(&clqSize, &clqDomain);
	if( !pnlIsIdentical(querySz, const_cast<int*>(query), clqSize, const_cast<int*>(clqDomain)) )
	{
	    m_pQueryJPD = resPot->Marginalize(const_cast<int*>(query), querySz);
	}
	else
	{
	    m_pQueryJPD = static_cast<CPotential*>(resPot->Clone());
	}
	m_pQueryJPD->Normalize();
	delete resPot;
    }
}
CPotential* CJtreeInfEngine::MergeCliques(int domSize, int* Domain)
{
    int numNodes = m_pJTree->GetNumberOfNodes();
    potsPVector	vPots(numNodes, (CPotential*)0);
    int i;
    const int*  clqDomain;
    int			clqSize;
    const int*  sepDomain;
    int			sepSize;

    const int    *nbr, *nbrs_end;
    int          numOfNbrs;
    const int    *nbrs;
    const ENeighborType *nbrsTypes;

    intVector::const_iterator    sourceIt, source_end;
    intVecVector::const_iterator layerIt     = m_collectSequence.begin(),
	collSeq_end = m_collectSequence.end();

    const CGraph *pGraph = m_pJTree->GetGraph();

    intVector  nodesSentMessages;
    intVector  tmpV;

    for( ; layerIt != collSeq_end; ++layerIt )
    {
	for( sourceIt = layerIt->begin(), source_end = layerIt->end();
	    sourceIt != source_end; ++sourceIt )
	{
	    if( !m_NodesAfterShrink[*sourceIt] ) continue;

	    pGraph->GetNeighbors( *sourceIt, &numOfNbrs, &nbrs, &nbrsTypes );
	    tmpV.assign(Domain, Domain+domSize);

	    for( nbr = nbrs, nbrs_end = nbrs + numOfNbrs; nbr != nbrs_end;
		++nbr )
	    {
		if( !m_NodesAfterShrink[*nbr] ) continue;
		m_pJTree->GetSeparatorDomain(*sourceIt, *nbr, &sepSize, &sepDomain);
		tmpV = pnlSetUnion(sepSize, const_cast<int*>(sepDomain), tmpV.size(), &tmpV.front());
	    }
	    m_pJTree->GetNodeContent(*sourceIt, &clqSize, &clqDomain);
	    tmpV = pnlIntersect(clqSize, const_cast<int*>(clqDomain), tmpV.size(), &tmpV.front());
	    if( !pnlIsIdentical(tmpV.size(), &tmpV.front(), clqSize, const_cast<int*>(clqDomain)) )
	    {
		vPots[*sourceIt] = m_pJTree->GetNodePotential(*sourceIt)->Marginalize(tmpV);
	    }
	    else
	    {
		vPots[*sourceIt] = static_cast<CPotential*>(m_pJTree->GetNodePotential(*sourceIt)->Clone());
	    }
	}
    }

    intVector bigDomain;
    layerIt = m_collectSequence.begin();
    nodesSentMessages.assign(numNodes, false);
    CPotential* tPot;
    for( ; layerIt != collSeq_end; ++layerIt )
    {
	for( sourceIt = layerIt->begin(), source_end = layerIt->end();
	    sourceIt != source_end; ++sourceIt )
	{
	    if( !m_NodesAfterShrink[*sourceIt] )continue;
	    pGraph->GetNeighbors( *sourceIt, &numOfNbrs, &nbrs, &nbrsTypes );
	    for( nbr = nbrs, nbrs_end = nbrs + numOfNbrs; nbr != nbrs_end; ++nbr )
	    {
		if( !nodesSentMessages[*nbr] && m_NodesAfterShrink[*nbr] )
		{
		    CPotential* pPot = vPots[*nbr];
		    CPotential* cPot = vPots[*sourceIt];
		    CPotential* bigPot = pnlMultiply(pPot, cPot, GetModel()->GetModelDomain());
		    *bigPot /=  *(m_pJTree->GetSeparatorPotential(*sourceIt, *nbr));
		    m_NodesAfterShrink[*sourceIt] = false;

		    int                 numOfNbrs1;
		    const int           *nbrs1, *nbr1, *nbrs1_end;
		    const ENeighborType *nbrsTypes1;

		    pGraph->GetNeighbors( *nbr, &numOfNbrs1, &nbrs1, &nbrsTypes1 );
		    tmpV.assign(Domain, Domain+domSize);
		    for(nbr1 = nbrs1, nbrs1_end = nbrs1 + numOfNbrs1; nbr1 != nbrs1_end; ++nbr1 )
		    {
			if( !m_NodesAfterShrink[*nbr1] ) continue;
			m_pJTree->GetSeparatorDomain(*nbr, *nbr1, &sepSize, &sepDomain);
			tmpV = pnlSetUnion(sepSize, const_cast<int*>(sepDomain), tmpV.size(), &tmpV.front());
		    }
		    bigPot->GetDomain(&bigDomain);
		    tmpV = pnlIntersect(tmpV.size(), &tmpV.front(), bigDomain.size(), &bigDomain.front());
		    if( tmpV.size() < bigDomain.size() )
		    {
			tPot = bigPot->Marginalize(&tmpV.front(), tmpV.size());
			delete bigPot;
			bigPot = tPot;
		    }
		    delete vPots[*nbr];
		    vPots[*nbr] = bigPot;
		    bigPot->GetDomain(&bigDomain);
		    if( pnlIsSubset(domSize, Domain, bigDomain.size(), &bigDomain.front()) )
		    {
			CPotential* retPot = static_cast<CPotential*>(bigPot->Clone());
			for(i=0; i<numNodes; i++)
			{
			    delete vPots[i];
			}
			vPots.clear();
			m_NodesAfterShrink.clear();
			return retPot;
		    }
		}
		nodesSentMessages[*sourceIt] = true;
	    }
	}
    }
    PNL_THROW(CInternalError, "internal error");
}
bool pnl::EqualResults(CJtreeInfEngine& eng1, CJtreeInfEngine& eng2,
		       float epsilon, int doPrint, int doFile, float *maxDiff)
{
    CJunctionTree *JTree1, *JTree2;
    JTree1 = eng1.GetJTree();
    JTree2 = eng2.GetJTree();
    int NumOfNds1 = JTree1->GetNumberOfNodes();
    int NumOfNds2 = JTree2->GetNumberOfNodes();
    int numOfNdsInClq;
    const int *clique;
    const floatVector *myVector;
    int node;

#if 0
    FILE *out;
    if (doFile)
    {
	out = fopen( "jtree1.out", "w" );

	for(node = 0; node < NumOfNds1; node++)
	{
	    JTree1->GetNodeContent(node, &numOfNdsInClq, &clique);
	    fprintf(out, "Nodes of clique %d :\n", node);
	    for (int i = 0; i < numOfNdsInClq; i++)
		fprintf(out, "%d   ", clique[i]);
	    CMatrix<float>* mat = NULL;
	    CPotential* p = JTree1->GetNodePotential(node);
	    mat = p->GetDistribFun()->GetMatrix(matTable);
	    CNumericDenseMatrix<float>* myMatrix = 
		static_cast<CNumericDenseMatrix<float>*>(mat->ConvertToDense());
	    fprintf(out,"\nMatrix of potential of clique %d:\n", node);
	    myVector = (myMatrix)->GetVector();
	    for(int j = 0; j < myVector->size(); j++)
	    {
		fprintf(out,"%f   ",(*myVector)[j]);
	    }
	    fprintf(out,"\n\n");
	}
	fclose( out );

	out = fopen( "jtree2.out", "w" );

	for(node = 0; node < NumOfNds2; node++)
	{
	    JTree2->GetNodeContent(node, &numOfNdsInClq, &clique);
	    fprintf(out, "Nodes of clique %d :\n", node);
	    for (int i = 0; i < numOfNdsInClq; i++)
		fprintf(out, "%d   ", clique[i]);
	    CMatrix<float>* mat = JTree2->GetNodePotential(node)->
		GetDistribFun()->GetMatrix(matTable);
	    CNumericDenseMatrix<float>* myMatrix = 
		static_cast<CNumericDenseMatrix<float>*>(mat->ConvertToDense());
	    fprintf(out,"\nMatrix of potential of clique %d:\n", node);
	    const floatVector *myVector = (myMatrix)->GetVector();
	    for(int j = 0; j < myVector->size(); j++)
	    {
		fprintf(out,"%f   ",(*myVector)[j]);
	    }
	    fprintf(out,"\n\n");
	}
	fclose( out );
    }
#endif

    bool res = 1;
    if (NumOfNds1 != NumOfNds2) 
	res = 0;
    CDistribFun* distrib1;
    if (maxDiff)
    {
	*maxDiff = 0;
    }
    float maxDifference;
    for(node = 0; node < NumOfNds1; node++)
    {
	distrib1 = JTree1->GetNodePotential(node)->GetDistribFun();
	if (!(distrib1->IsEqual(JTree2->GetNodePotential(node)->
	    GetDistribFun(), epsilon, 1, &maxDifference)))
	{
	    res = 0;
	    if (maxDiff && (*maxDiff < maxDifference))
	    {
		*maxDiff = maxDifference;
	    }
#if 0
	    if (doPrint) 
		printf("clique %d:  notOK  maxDiff = %.6f\n", node, maxDifference);
#endif
	}
	else
	{
#if 0
	    if (doPrint)
		printf("clique %d:  OK\n", node);
#endif
	}
    }
    return res;
}
void CGibbsSamplingInfEngine::
MarginalNodes( const int *queryIn, int querySz, int notExpandJPD )
{
  delete m_pQueryJPD;
  m_pQueryJPD = NULL;
  
  delete m_pPotMPE;
  m_pPotMPE = NULL;
  
  delete m_pEvidenceMPE;
  m_pEvidenceMPE = NULL;
  
  const CFactor *pFactor;
  CPotential *pPot =  NULL;
  int *begin1;
  int *end1;
  int *begin2;
  int *end2;
  
  intVector domainVec;
  intVector queryVec;
  intVector obsQueryVec;
  queryVec.reserve(querySz);
  obsQueryVec.reserve(querySz);
  int i;
  for( i = 0; i < querySz; i++ )
  {
    m_pEvidence->IsNodeObserved(queryIn[i]) ? 
      obsQueryVec.push_back(queryIn[i]):
    queryVec.push_back(queryIn[i]);
  }
  
  
  CPotential *tmpPot = NULL;
  
  if( queryVec.size() )
  {
    for( i = 0; i < m_queryFactors.size(); i++)     
    {
      
      domainVec.clear();
      pFactor = m_queryFactors[i];
      pFactor->GetDomain(&domainVec);
      begin1 = &domainVec.front();
      end1 = &domainVec.back() + 1;
      std::sort(begin1, end1);
      
      begin2 = &queryVec.front();
      end2 = &queryVec.back() + 1;
      std::sort(begin2, end2);
      
      if( std::includes(begin1, end1, begin2, end2) )
      {
        pPot = pFactor->ConvertStatisticToPot( (GetMaxTime()-GetBurnIn()-1)*GetNumStreams() );
        tmpPot = pPot->Marginalize( queryVec );
        delete pPot;
        break;
      }
      		   
    }
    if( !tmpPot )
    {
      PNL_THROW(CInvalidOperation, "Invalid query");
    }
  }
  delete m_pQueryJPD; 
  
  if( obsQueryVec.size() )
  {
    
    EDistributionType paramDistrType = 
      pnlDetermineDistributionType( GetModel()->GetModelDomain(), querySz, queryIn, m_pEvidence);
    
    
    CPotential *pQueryPot;
    switch( paramDistrType )
    {
    case dtTabular:
      {
        pQueryPot = CTabularPotential::CreateUnitFunctionDistribution(
          queryIn, querySz, m_pGraphicalModel->GetModelDomain() );
        break;
      }
      
    case dtGaussian:
      {
        pQueryPot = CGaussianPotential::CreateUnitFunctionDistribution(
          queryIn, querySz, m_pGraphicalModel->GetModelDomain()  );
        break;
      }
    case dtScalar:
      {
        pQueryPot = CScalarPotential::Create(
          queryIn, querySz, m_pGraphicalModel->GetModelDomain()  );
        break;
      }
    case dtCondGaussian:
      {
        PNL_THROW( CNotImplemented, "conditional gaussian factors" )
          break;
      }
    default:
      {
        PNL_THROW( CInconsistentType, "distribution type" )
      }
    }
    
    if( tmpPot)
    {
      (*pQueryPot) *= (*tmpPot);
      delete tmpPot;
    }
    
    if( m_bMaximize )
    {
      m_pPotMPE   = static_cast<CPotential*>
        ( pQueryPot->ExpandObservedNodes( m_pEvidence, 0) );
      
      m_pEvidenceMPE = m_pPotMPE->GetMPE();
    }
    else
    {
      m_pQueryJPD = static_cast<CPotential*>( pQueryPot->ExpandObservedNodes( m_pEvidence, 0) );
    }
    
    delete pQueryPot;
  }
Пример #17
0
int testShrinkObservedNodes()
{
    int i/*,j*/;
    int ret = TRS_OK;
    /*prepare to read the values from console*/
    EDistributionType dt;
    int disType = -1;
    EFactorType pt;
    int paramType = -1;
    /*read int disType corresponding DistributionType*/
    while((disType<0)||(disType>0))/*now we have only Tabulars&Gaussian*/
    {
	trsiRead( &disType, "0", "DistributionType");
    }
    /*read int paramType corresponding FactorType*/
    while((paramType<0)||(paramType>2))
    {
	trsiRead( &paramType, "0", "FactorType");
    }
    dt = EDistributionType(disType);
    pt = EFactorType(paramType);
    int numberOfNodes = 0;
    /*read number of nodes in Factor domain*/
    while(numberOfNodes<=0)
    {
	trsiRead( &numberOfNodes, "1", "Number of Nodes in domain");
    }
    int numNodeTypes = 0;
    /*read number of node types in model*/
    while(numNodeTypes<=0)
    {
	trsiRead( &numNodeTypes, "1", "Number of node types in Domain");
    }
    //int seed1 = pnlTestRandSeed()/*%100000*/;
    /*create string to display the value*/
    /*	char *value = new char[20];
    value = _itoa(seed1, value, 10);
    trsiRead(&seed1, value, "Seed for srand to define NodeTypes etc.");
    delete []value;
    trsWrite(TW_CON|TW_RUN|TW_DEBUG|TW_LST, "seed for rand = %d\n", seed1);
    int *domain = (int *)trsGuardcAlloc(numberOfNodes, sizeof(int));
    CNodeType * allNodeTypes = (CNodeType*)trsGuardcAlloc(numNodeTypes,
    sizeof(CNodeType));
    //To generate the NodeTypes we use rand()% and creates only Tabular now
    for(i=0; i<numNodeTypes; i++)
    {
    allNodeTypes[i] = CNodeType(1, 1+rand()%(numNodeTypes+3));
    }
    */	
    
    /*load data for parameter::ShrinkObservedNodes from console*/
    intVector domain;
    domain.assign( numberOfNodes, 0 );
    nodeTypeVector allNodeTypes;
    allNodeTypes.assign( numNodeTypes, CNodeType() );
    /*read node types*/
    for(i=0; i < numNodeTypes; i++)
    {
	int IsDiscrete = -1;
	int NodeSize = -1;
	while((IsDiscrete<0)||(IsDiscrete>1))
	    /*now we have tabular & Gaussian nodes!! */
	    trsiRead(&IsDiscrete, "1", "Is the node discrete?");
	while(NodeSize<0)
	    trsiRead(&NodeSize, "2", "NodeSize of node");
	allNodeTypes[i] = CNodeType( IsDiscrete != 0, NodeSize );
    }
    const CNodeType **nodeTypesOfDomain = (const CNodeType**)
	trsGuardcAlloc(numberOfNodes, sizeof(CNodeType*));
    int numData = 1;
    int *Ranges = (int*)trsGuardcAlloc(numberOfNodes, sizeof(int));
    /*associate nodes to node types*/
    for(i=0; i<numberOfNodes; i++)
    {
	domain[i] = i;
	int nodeAssociationToNodeType = -1;
	while((nodeAssociationToNodeType<0)||(nodeAssociationToNodeType>=
	    numNodeTypes))
	    trsiRead(&nodeAssociationToNodeType, "0", 
	    "node i has type nodeAssociationToNodeType");
	nodeTypesOfDomain[i] = &allNodeTypes[nodeAssociationToNodeType];
	//	nodeTypesOfDomain[i] = &allNodeTypes[rand()%numNodeTypes];
	Ranges[i] = nodeTypesOfDomain[i]->GetNodeSize();
	numData=numData*Ranges[i];
    }
    
    CModelDomain* pMD = CModelDomain::Create( allNodeTypes, domain );
    
    /*create factor according all information*/
    CFactor *pMyParam = NULL;
    float *data = (float *)trsGuardcAlloc(numData, sizeof(float));
    char *stringVal;/* = (char*)trsGuardcAlloc(50, sizeof(char));*/
    double val=0;
    /*read the values from console*/
    if(pt == ftPotential)
    {
	pMyParam = CTabularPotential::Create( &domain.front(), numberOfNodes, pMD );
	/*here we can create data by multiply on 0.1 - numbers are nonnormalized*/
	for(i=0; i<numData; i++)
	{
	    val = 0.1*i;
	    stringVal = trsDouble(val);
	    trsdRead(&val, stringVal, "value of i's data position");
	    data[i] = (float)val;
	    //data[i] = (float)rand()/1000;
	}
    }
    else
    {
    /*we can only read data from console - it must be normalized!!
	(according their dimensions) - or we can normalize it by function!*/
	if(pt == ftCPD)
	    pMyParam = CTabularCPD::Create( &domain.front(), numberOfNodes, pMD );
	for(i=0; i<numData; i++)
	{
	    val = -1;
	    while((val<0)||(val>1))
	    {
		trsdRead(&val, "-1", "value of (2*i)'s data position");
	    }
	    data[i] = (float)val;
	}
    }
    //trsWrite(TW_CON|TW_RUN|TW_DEBUG|TW_LST, "data for Factor = %d\n", data[i]);
    pMyParam->AllocMatrix(data,matTable);
    int nObsNodes = 0;	/*rand()%numberOfNodes;*/
    while((nObsNodes<=0)||(nObsNodes>numberOfNodes))
    {
	trsiRead(&nObsNodes, "1", "Number of Observed Nodes");
    }
    intVector myHelpForEvidence = intVector(domain.begin(), domain.end() );
    int *ObsNodes = (int *)trsGuardcAlloc(nObsNodes, sizeof(int));
    valueVector TabularValues;
    TabularValues.assign( nObsNodes, (Value)0 );
    char *strVal;
    for(i=0; i<nObsNodes; i++)
    {
	//fixme - we need to have noncopy only different ObsNodes
	/*		j = rand()%(numberOfNodes-i);*/
	int numberOfObsNode = -1;
	strVal = trsInt(i);
        intVector::iterator j = std::find( myHelpForEvidence.begin(), myHelpForEvidence.end(), numberOfObsNode );
	while((numberOfObsNode<0)||(numberOfObsNode>numberOfNodes)||
	    (j==myHelpForEvidence.end()))
	{
	    trsiRead(&numberOfObsNode, strVal,"Number of i's observed node");
	    j = std::find(myHelpForEvidence.begin(), myHelpForEvidence.end(),
		numberOfObsNode);
	}
	//ObsNodes[i] = myHelpForEvidence[j];
	myHelpForEvidence.erase( j );
	ObsNodes[i] = numberOfObsNode;
	int valueOfNode = -1;
	int maxValue = (*nodeTypesOfDomain[ObsNodes[i]]).GetNodeSize();
	while((valueOfNode<0)||(valueOfNode>=maxValue))
	{
	    trsiRead(&valueOfNode,"0","this is i's observed node value");
	}
	TabularValues[i].SetInt(valueOfNode);
	/*rand()%((*nodeTypesOfDomain[ObsNodes[i]]).pgmGetNodeSize());*/
    }
    CEvidence* pEvidence = CEvidence::Create( pMD, nObsNodes, ObsNodes, TabularValues );
    myHelpForEvidence.clear();
    CNodeType *ObservedNodeType = (CNodeType*)trsGuardcAlloc(1, 
	sizeof(CNodeType));
    *ObservedNodeType = CNodeType(1,1);
    CPotential *myTakedInFactor = static_cast<CPotential*>(pMyParam)->ShrinkObservedNodes(pEvidence);
    const int *myfactorDomain;
    int factorDomSize ;
    myTakedInFactor->GetDomain(&factorDomSize, &myfactorDomain);
#if 0
    CNumericDenseMatrix<float> *mySmallMatrix = static_cast<
        CNumericDenseMatrix<float>*>(myTakedInFactor->GetMatrix(matTable));
    int n;
    const float* mySmallData;
    mySmallMatrix->GetRawData(&n, &mySmallData);
    int nDims; // = mySmallMatrix->GetNumberDims();
    const int * mySmallRanges;
    mySmallMatrix->GetRanges(&nDims, &mySmallRanges);
    
    if(nDims!=numberOfNodes)
    {
	ret = TRS_FAIL;
	trsWrite(TW_CON|TW_RUN|TW_DEBUG|TW_LST, "nDims = %d\n", nDims);
    }
    else
    {
	int numSmallData = 1;
	for(i=0; i<nDims; i++)
	{
	    numSmallData = numSmallData*mySmallRanges[i];
	    trsWrite(TW_CON|TW_RUN|TW_DEBUG|TW_LST, "Range[%d] = %d\n", i, 
		mySmallRanges[i]);
	}
	for(i=0; i<numSmallData; i++)
	{	
	    trsWrite(TW_CON|TW_RUN|TW_DEBUG|TW_LST, "mySmallData[%d] = %f ",
		i, mySmallData[i]);
	}
    }
#endif
    //getchar();
    delete(myTakedInFactor);
    delete (pMyParam);
    delete pMD;
    //test gaussian parameter
    nodeTypeVector nTypes;
    nTypes.assign( 2, CNodeType() );
    nTypes[0] = CNodeType( 0, 2 );
    nTypes[1] = CNodeType( 0,1 );
    intVector domn = intVector(3,0);
    domn[1] = 1;
    domn[2] = 1;
    
    CModelDomain* pMD1 = CModelDomain::Create( nTypes, domn );
    
    domn[2] = 2;
    
    CPotential *BigFactor = CGaussianPotential::CreateUnitFunctionDistribution( 
	&domn.front(), domn.size(), pMD1,0 );
    float mean[] = { 1.0f, 3.2f};
    CPotential *SmallDelta = CGaussianPotential::CreateDeltaFunction( &domn.front(), 1, pMD1, mean, 1 );
    domn.resize( 2 );
    domn[0] = 1;
    domn[1] = 2;
    CPotential *SmallFunct = CGaussianPotential::Create( &domn.front(),
	domn.size(),  pMD1);
    float datH[] = { 1.1f, 2.2f, 3.3f };
    float datK[] = { 1.2f, 2.3f, 2.3f, 3.4f, 5.6f, 6.7f, 3.4f, 6.7f, 9.0f };
    SmallFunct->AllocMatrix( datH, matH );
    SmallFunct->AllocMatrix( datK, matK );
    static_cast<CGaussianPotential*>(SmallFunct)->SetCoefficient( 0.2f, 1 );
    CPotential* multFact = BigFactor->Multiply( SmallDelta );
    CPotential* nextMultFact = multFact->Multiply( SmallFunct );
    domn[0] = 0;
    domn[1] = 1;
    CPotential *marginalized = static_cast<CPotential*>(nextMultFact->Marginalize( &domn.front(), domn.size() ));
    int isSpecific = marginalized->IsDistributionSpecific();
    if( isSpecific )
    {
	trsWrite(TW_CON|TW_RUN|TW_DEBUG|TW_LST, "\nGaussian Distribution is specific");
    }
    delete BigFactor;
    delete SmallFunct;
    delete SmallDelta;
    delete pMD1;
    
    int ranges_memory_flag = trsGuardCheck(Ranges);
    int data_memory_flag = trsGuardCheck(data);
    int nodeTypesOfDomain_mem_b = trsGuardCheck(nodeTypesOfDomain);
    int ObsNodes_mem_b = trsGuardCheck(ObsNodes);
    int ObsNodeType_mem_b = trsGuardCheck(ObservedNodeType);
    if(((ranges_memory_flag)||(data_memory_flag)||
	(nodeTypesOfDomain_mem_b)||
	(ObsNodes_mem_b)||(ObsNodeType_mem_b)))
    {
	ret = TRS_FAIL;
	return trsResult( ret, ret == TRS_OK ? "No errors" : 
	"Bad test on ShrinkObservedNodes Method - memory");
    }
    else
    {
	trsGuardFree(ObservedNodeType);
	trsGuardFree(ObsNodes);
	trsGuardFree(nodeTypesOfDomain);
	trsGuardFree(data);
	trsGuardFree(Ranges);
    }			
    return trsResult( ret, ret == TRS_OK ? "No errors" : 
    "Bad test on ShrinkObservedNodes Method");
}
void CSpecPearlInfEngine::MarginalNodes( const int* query, int querySize,
                                     int notExpandJPD )
{
    if( notExpandJPD == 1 )
    {
        PNL_THROW( CInconsistentType, "pearl inference work with expanded distributions only" );
    }

    PNL_CHECK_LEFT_BORDER(querySize, 1);
    
    if( m_pQueryJPD )
    {
        delete m_pQueryJPD;
    }

    if( m_pEvidenceMPE )
    {
        delete m_pEvidenceMPE;
    }
    
    if( querySize == 1 )
    {
        if( m_bMaximize )
        {
            //compute MPE
            m_pEvidenceMPE = m_beliefs[m_curState][query[0]]->GetMPE();
        }
        else
        {
            // get marginal for one node - cretae parameter on existing data - m_beliefs[query[0]];
            m_pQueryJPD = m_beliefs[m_curState][query[0]]->GetNormalized();
        }
    }
    else
    {
        int numParams;
        CFactor ** params; 
        m_pGraphicalModel->GetFactors( querySize, query, &numParams ,&params );
        if ( !numParams )
        {
            PNL_THROW( CBadArg, "only members of one family can be in query instead of one node" ) 
        }
        if( numParams != 1 )
        {
            PNL_THROW( CBadArg, "add more nodes to specify which of jpd you want to know")
        }
        int i;
        //get informatiom from parametr on these nodes to crate new parameter
        //with updated Data
        CPotential* allPot;
        if( m_modelType == mtMRF2 )
        {
            //just multiply and marginalize
            allPot = static_cast<CPotential*>(params[0]->Clone());
        }
        else
        {
            //m_modelType == mtBNet
            //need to convert to potential withiut evidence and multiply
            //and marginalize after that
            allPot = static_cast<CCPD*>(params[0])->ConvertToPotential();
        }
        //get neighbors of last node in domain (child for CPD) 
        //to compute JPD for his family
        int domSize;
        const int* dom;
        params[0]->GetDomain(&domSize, &dom);
        //start multiply to add information after inference
        for( i = 0; i < domSize; i++ )
        {
            (*allPot) *= (*m_beliefs[m_curState][dom[i]]) ;
        }
        m_pQueryJPD = allPot->Marginalize( query, querySize, m_bMaximize );
        //fixme - can replace by normalize in self
        m_pQueryJPD->Normalize();
        if( m_bMaximize )
        {
            //compute MPE
            m_pEvidenceMPE = m_pQueryJPD->GetMPE();
            delete m_pQueryJPD;
            m_pQueryJPD = NULL;
        }
    }
}