Пример #1
0
void CreateEvidencesArHMM( CDBN *pDBN, int nTimeSlices,
			  pEvidencesVector *myEvidencesForDBN )
{

    if( myEvidencesForDBN->size() )
    {
        assert( 0 );
    }
    /////////////////////////////////////////////////////////////////////////////
    // Create values for evidence in every slice from t=0 to t=nTimeSlice
    /////////////////////////////////////////////////////////////////////////////


    intVector obsNodes(1, 1);
    int slice;
    valueVecVector valuesPerSlice;

    for( slice = 0; slice < nTimeSlices; slice++ )
    {
	valueVector tmpVal(1);
	tmpVal[0].SetFlt((float )( rand()%10 ));
	valuesPerSlice.push_back( tmpVal );
    }


    for( slice = 0; slice < nTimeSlices; slice++ )
    {
	myEvidencesForDBN->push_back( CEvidence::Create( pDBN->GetModelDomain(),
	    obsNodes, valuesPerSlice[slice] ) );
    }
}
Пример #2
0
void CMNet::GenerateSamples( pEvidencesVector* evidences, int nSamples ) const
{
    // generate random samples for MNet
    // evidences - vector of poimter to evidences which is generated
    // nSamples - number of samples

    if( nSamples <= 0)
    {
        PNL_THROW(CBadArg, "number of evidences must be positively")
    }
    (*evidences).assign(nSamples, NULL);

    const CMNet* selfMNet = this;
    int ncliques = selfMNet->GetNumberOfCliques();

    int nnodes = selfMNet->GetNumberOfNodes();
    intVector obsNodes(nnodes);

    const CNodeType * nt;
    int nVals = 0;
    int i;
    for( i = 0; i < nnodes; i++ )
    {
        obsNodes[i] = i;
        nt = this->GetNodeType(i);
        if( nt->IsDiscrete() )
        {
            nVals++;
        }
        else
        {
            nVals += nt->GetNodeSize();

        }
    }

    valueVector obsVals(nVals);

    valueVector::iterator iter = obsVals.begin();

    for( i = 0; i < nnodes; i++ )
    {

        nt = this->GetNodeType(i);
        if( nt->IsDiscrete() )
        {
            iter->SetInt(0);
            iter++;
        }
        else
        {
            int j;
            for( j = 0; j < nt->GetNodeSize(); j++, iter++ )
            {
                iter->SetFlt(0.0f);

            }

        }
    }


    pEvidencesVector::iterator evidIter = evidences->begin();
    for( ; evidIter != evidences->end(); evidIter++ )
    {
        (*evidIter) = CEvidence::Create(selfMNet->GetModelDomain(), obsNodes, obsVals);
        (*evidIter)->ToggleNodeState( nnodes, &obsNodes.front() );
        for ( i = 0; i < ncliques; i++ )
        {
            m_pParams->GetFactor( i )->GenerateSample( *evidIter );
        }
    }
}
Пример #3
0
void CMNet::GenerateSamples( pEvidencesVector* evidences, int nSamples, const CEvidence *pEvIn ) const
{
    // generate random samples for MNet
    // evidences - vector of poimter to evidences which is generated
    // nSamples - number of samples

    if( nSamples <= 0)
    {
        PNL_THROW(CBadArg, "number of evidences must be positively")
    }
    (*evidences).assign( nSamples, (CEvidence *)0 );

    const CMNet* selfMNet = this;
    int ncliques = selfMNet->GetNumberOfCliques();
    int nnodes = selfMNet->GetNumberOfNodes();
    intVector obsNodes(nnodes);
    intVector obsNdsSz(nnodes);
    CModelDomain *pMD = GetModelDomain();

    int nVls = 0;
    int i;
    for( i = 0; i < nnodes; i++ )
    {
        obsNodes[i] = i;
        obsNdsSz[i] = pMD->GetNumVlsForNode(i);
        nVls += obsNdsSz[i];

    }

    valueVector obsVls(nVls);
    valueVector::iterator iter = obsVls.begin();

    intVector ndsToToggle;
    ndsToToggle.reserve(nnodes);


    for( i = 0; i < nnodes; i++ )
    {
        int j;
        if( pEvIn && pEvIn->IsNodeObserved(i) )
        {

            const Value* pVal = pEvIn->GetValue(i);
            for( j = 0; j < obsNdsSz[i]; j++, iter++ )
            {
                *iter = pVal[j];
            }
        }
        else
        {
            ndsToToggle.push_back(i);
            bool isDiscr = pMD->GetVariableType(i)->IsDiscrete();
            if( isDiscr )
            {
                iter->SetInt(0);
                iter++;
            }
            else
            {
                for( j = 0; j < obsNdsSz[i]; j++, iter++ )
                {
                    iter->SetFlt(0.0f);
                }
            }
        }
    }


    pEvidencesVector::iterator evidIter = evidences->begin();
    for( ; evidIter != evidences->end(); evidIter++ )
    {
        (*evidIter) = CEvidence::Create(selfMNet->GetModelDomain(), obsNodes, obsVls);
        (*evidIter)->ToggleNodeState( ndsToToggle.size(), &ndsToToggle.front() );

        for ( i = 0; i < ncliques; i++ )
        {
            m_pParams->GetFactor( i )->GenerateSample( *evidIter );
        }
    }
}
Пример #4
0
PNL_USING
int main()
{
    int nnodes = 16;
    int nodeslice = 8;
    CBNet* pKjaerulf = pnlExCreateKjaerulfsBNet();
    CDBN*  pKj = CDBN::Create(pKjaerulf);

    int nSeries = 50;
    int	nslices = 101;
    int i;

    intVector nS(nSeries);
    for(i = 0; i < nSeries; i++)
    {
        nS[i] = nslices;
    }

    valueVector vValues;
    vValues.resize(nodeslice);
    intVector obsNodes(nodeslice);
    for( i=0; i<nodeslice; i++)obsNodes[i] = i;
    CEvidence ***pEv;
    pEv = new CEvidence **[nSeries];

    int series, slice, node, val;

    FILE * fp;
    fp = fopen("../Data/kjaerulff.dat", "r");

    if( !fp )
    {
        std::cout<<"can't open cases file"<<std::endl;
        exit(1);
    }

    for( series = 0; series < nSeries; series++ )
    {
        pEv[series] = new CEvidence*[ nslices ];
        for( slice = 0;  slice < nslices; slice++ )
        {
            for( node = 0; node < nodeslice; node++)
            {
                fscanf(fp, "%d,", &val);
		vValues[node].SetFlt(val);
            }
            (pEv[series])[slice] = CEvidence::Create(pKj->GetModelDomain(), obsNodes,  vValues );
        }

    }
    fclose(fp);

    CGraph *pGraph = CGraph::Create(nnodes, NULL, NULL, NULL);
    for(i=0; i<nnodes-1; i++)
    {
	pGraph->AddEdge(i, i+1,1);
    }
    CNodeType *nodeTypes = new CNodeType[1];

    nodeTypes[0].SetType(1, 2);
    int *nodeAssociation = new int[nnodes];

    for ( i = 0; i < nnodes; i++ )
    {
	nodeAssociation[i] = 0;
    }
    CBNet *pBnet = CBNet::Create( nnodes, 1, nodeTypes, nodeAssociation, pGraph );
    pBnet -> AllocFactors();
    floatVector data;
    data.assign(64, 0.0f);

    for ( node = 0; node < nnodes; node++ )
    {
	pBnet->AllocFactor( node );
	(pBnet->GetFactor( node )) ->AllocMatrix( &data.front(), matTable );
    }

    CDBN*  pDBN = CDBN::Create(pBnet);

    CMlDynamicStructLearn *pLearn = CMlDynamicStructLearn::Create(pDBN, itDBNStructLearnML,
	StructLearnHC, BIC, 4, 1, 30);

    pLearn -> SetData(nSeries, &nS.front(), pEv);
    //	pLearn->SetLearnPriorSlice(true);
    //	pLearn->SetMinProgress((float)1e-4);
    pLearn ->Learn();
    const CDAG* pDAG = pLearn->GetResultDag();
    pDAG->Dump();
    ////////////////////////////////////////////////////////////////////////////
    delete pLearn;
    delete pDBN;
    delete pKj;
    for(series = 0; series < nSeries; series++ )
    {
        for( slice = 0;  slice < nslices; slice++ )
        {
            delete (pEv[series])[slice];
        }
	delete[] pEv[series];
    }
    delete[] pEv;
    return 0;
}
Пример #5
0
float CMlLearningEngine::_LearnPotentials()
{
    int iteration = 1;
    float log_lik = 0.0f;
    CStaticGraphicalModel *grmodel = this->GetStaticModel();
    CFactor *parameter = NULL;
    
    float epsilon = m_precisionIPF;
    const CPotential *joint_prob = NULL;
    CPotential *clique_jpd = NULL;
    
    CMatrix<float> *itogMatrix;
    
    CInfEngine *m_pInfEngine = 
        CNaiveInfEngine::Create(grmodel);
    intVector obsNodes(0);
    valueVector obsValues(0);
    CEvidence *emptyEvidence = CEvidence::Create(grmodel->GetModelDomain(), obsNodes, obsValues);
    m_pInfEngine -> EnterEvidence( emptyEvidence );
    int querySize = grmodel->GetNumberOfNodes();
    int *query;
    query = new int [querySize];

    int i;
    for( i = 0; i < querySize; i++ )
    {
        query[i] = i;
    }
    m_pInfEngine -> MarginalNodes( query, querySize );
    joint_prob = m_pInfEngine->GetQueryJPD();
    CPotential *itog_joint_prob = 
        static_cast<CPotential *>(joint_prob ->Marginalize(query, querySize));
    
    int DomainSize;
    const int *domain;
    
    potsPVector learn_factors;
    CPotential *tmp_factor;
    
    for (i = 0; i <  grmodel -> GetNumberOfFactors(); i++)
    {
        factor = grmodel -> GetFactor(i);
        factor -> GetDomain( &DomainSize, &domain );
        CDistribFun *correspData= factor -> GetDistribFun();
        
        CMatrix<float> *learnMatrix = correspData ->
            GetStatisticalMatrix(stMatTable);
        
        CPotential *factor = CTabularPotential::Create(	domain, DomainSize,
            parameter->GetModelDomain());
        
        learn_factors.push_back(factor);
        learn_factors[i] -> AttachMatrix(learnMatrix->NormalizeAll(), matTable);
    }
    
    int data_length;
    float *old_itog_data = NULL;
    const float *itog_data;
    
    delete [] query;
    int convergence = 0;	
    while( !convergence && (iteration <= m_maxIterIPF))
    {
        iteration++;
        itogMatrix = (itog_joint_prob->GetDistribFun())
            -> GetMatrix(matTable);
        static_cast<CNumericDenseMatrix<float>*>(itogMatrix)->
            GetRawData(&data_length, &itog_data);
        old_itog_data = new float[data_length];
        for( i = 0; i < data_length; i++)
        {
            old_itog_data[i] = itog_data[i];
        }
        for( int clique = 0; clique < grmodel->GetNumberOfFactors(); clique++)
        {
            factor = grmodel -> GetFactor(clique);
            factor -> GetDomain( &DomainSize, &domain );
            clique_jpd = static_cast<CPotential *>
                (itog_joint_prob -> Marginalize( domain, DomainSize ));
            
            
            tmp_factor = itog_joint_prob -> Multiply(learn_factors[clique]);
            delete (itog_joint_prob);
            itog_joint_prob = tmp_factor;
            tmp_factor = itog_joint_prob -> Divide(clique_jpd);
            delete (itog_joint_prob);
            delete (clique_jpd);
            itog_joint_prob = tmp_factor;
            
        }
        itogMatrix = (itog_joint_prob->GetDistribFun())
            -> GetMatrix(matTable);
        
        static_cast<CNumericDenseMatrix<float>*>(itogMatrix)->
            GetRawData(&data_length, &itog_data);
        convergence = true;
        for (int j = 0; j < data_length; j++)
        {
            if( fabs( itog_data[j] - old_itog_data[j] ) > epsilon)
            {
                convergence = false;
                break;
            }
            
        }
        delete []old_itog_data;
    }
    if(iteration > m_maxIterIPF)
    {
        PNL_THROW(CNotConverged, 
            "maximum number of iterations for IPF procedure")
    }
    
    for(int  clique = 0; clique < grmodel -> GetNumberOfFactors(); clique++)
    {
        CMatrix<float> *matrix = NULL;
        factor = grmodel -> GetFactor(clique);
        int DomainSize;
        const int *domain;
        int data_length;
        const float *data;
        factor -> GetDomain( &DomainSize, &domain );
        
        matrix = itog_joint_prob->Marginalize( domain, DomainSize )
            ->GetDistribFun()-> GetMatrix( matTable );
        static_cast<CNumericDenseMatrix<float>*>(matrix)->GetRawData(&data_length, &data);
        CNumericDenseMatrix<float>* matLearn = 
            static_cast<CNumericDenseMatrix<float>*>(
            parameter->GetDistribFun()->GetStatisticalMatrix(stMatTable));
        for(int offset = 0; offset < data_length; offset++)
        {
            float prob = float( ( data[offset] < FLT_EPSILON ) ? -FLT_MAX : log( data[offset] ) );
            
            log_lik += matLearn->GetElementByOffset(offset)*prob - 
                m_Vector_pEvidences.size();
        }
        factor ->AttachMatrix(matrix, matTable);
        delete (learn_factors[clique]);
    }
    delete (itog_joint_prob);
    learn_factors.clear();
    
    return log_lik;
}