예제 #1
0
float * GenerateFloatArray(int NumVal, float StartVal, float FinishVal)
{
    float *vls = new float[NumVal];

    float val;
    for (int i = 0; i < NumVal; i++)
    {
        val = pnlRand(StartVal, FinishVal);
        (vls)[i] = val;
    }

    return vls;
}
예제 #2
0
void GenerateSoftMaxEvidence(CStaticGraphicalModel *pBNet, float StartVal,
                             float FinishVal, valueVector &vls)
{
    int numberOfNodes = pBNet->GetNumberOfNodes();
    EDistributionType dt;
    int i;

    const CNodeType *nodeTypes;// = new CNodeType[numberOfNodes];

    int *NumOfNodeVal = new int[numberOfNodes];
    for (i = 0; i < numberOfNodes; i++)
    {
        nodeTypes = pBNet->GetNodeType(i);
        NumOfNodeVal[i] = nodeTypes->GetNodeSize();
    }

    vls.resize(numberOfNodes);
    for (i = 0; i < numberOfNodes; i++)
    {
        dt = pBNet->GetFactor(i)->GetDistributionType();
        if ((dt == dtSoftMax) || (dt == dtTabular))
        {
            int valInt = rand() % NumOfNodeVal[i];

#ifdef SM_TEST
            printf("%3d\t", valInt);
#endif

            (vls)[i].SetInt(valInt);
        }
        if (dt == dtGaussian)
        {
            float valFl = pnlRand(StartVal, FinishVal);

#ifdef SM_TEST
            printf("%f\t", valFl);
#endif

            (vls)[i].SetFlt(valFl);
        }
    }

#ifdef SM_TEST
    printf("\n");
#endif
    delete [] NumOfNodeVal;
}
예제 #3
0
int testBKInfUsingClusters()
{
    
    
    int ret = TRS_OK;

    int seed = pnlTestRandSeed();
    pnlSeed( seed );
    std::cout<<"seed"<<seed<<std::endl;
    
    
    int nTimeSlices = -1;
    while(nTimeSlices <= 5)
    {
        trsiRead (&nTimeSlices, "10", "Number of slices");
    }
    
    float eps = -1.0f;
    while( eps <= 0 )
    {
        trssRead( &eps, "1.0e-1f", "accuracy in test");
    }
    
    
    CDBN *pDBN = CDBN::Create( pnlExCreateBatNetwork() );
    
    intVecVector clusters;
    intVector interfNds;
    pDBN->GetInterfaceNodes( &interfNds );
    int numIntNds = interfNds.size();
    int numOfClusters = pnlRand( 1, numIntNds );
    clusters.resize(numOfClusters);
    int i;
    for( i = 0; i < numIntNds; i++ )
    {
	( clusters[pnlRand( 0, numOfClusters-1 )] ).push_back( interfNds[i] );
    }

    intVecVector validClusters;
    validClusters.reserve(numOfClusters);
    for( i = 0; i < clusters.size(); i++ )
    {
	if(! clusters[i].empty())
	{
	    validClusters.push_back(clusters[i]);
	}
    }
        
    CBKInfEngine *pBKInf;
    pBKInf = CBKInfEngine::Create( pDBN, validClusters );

    C1_5SliceJtreeInfEngine *pJTreeInf;
    pJTreeInf = C1_5SliceJtreeInfEngine::Create( pDBN );

    intVector nSlices( 1, nTimeSlices );
    pEvidencesVecVector pEvid;
    
    pDBN->GenerateSamples( &pEvid, nSlices);
    int nnodesPerSlice = pDBN->GetNumberOfNodes();
    intVector nodes(nnodesPerSlice, 0);
    for( i = 0; i < nnodesPerSlice; i++ )
    {
	nodes[i] = i;
    }
    intVector ndsToToggle;
    for( i = 0; i < nTimeSlices; i++ )
    {
	std::random_shuffle( nodes.begin(), nodes.end() );
	ndsToToggle.resize( pnlRand(1, nnodesPerSlice) );
	int j;
	for( j = 0; j < ndsToToggle.size(); j++ )
	{
	    ndsToToggle[j] = nodes[j];
	}
	
	(pEvid[0])[i]->ToggleNodeState( ndsToToggle );
    }
    pBKInf->DefineProcedure( ptSmoothing, nTimeSlices );
    pBKInf->EnterEvidence( &(pEvid[0]).front(), nTimeSlices );
    pBKInf->Smoothing();

    pJTreeInf->DefineProcedure( ptSmoothing, nTimeSlices );
    pJTreeInf->EnterEvidence( &pEvid[0].front(), nTimeSlices );
    pJTreeInf->Smoothing();
    
    int querySlice = pnlRand( 0, nTimeSlices - 1 );
    int queryNode = pnlRand( 0, nnodesPerSlice - 1);
    queryNode += (querySlice ? nnodesPerSlice : 0);

    intVector query;
    pDBN->GetGraph()->GetParents( queryNode, &query );
    query.push_back( queryNode );
    std::random_shuffle( query.begin(), query.end() );
    query.resize( pnlRand(1, query.size()) );

    pBKInf->MarginalNodes(&query.front(), query.size(), querySlice);
    pJTreeInf->MarginalNodes(&query.front(), query.size(), querySlice);

    const CPotential *potBK = pBKInf->GetQueryJPD();
    const CPotential *potJTree = pJTreeInf->GetQueryJPD();
    if( !potBK->IsFactorsDistribFunEqual( potJTree , eps ) )
    {
	std::cout<<"BK query JPD \n";
	potBK->Dump();
	std::cout<<"JTree query JPD \n";
	potJTree->Dump();

	ret = TRS_FAIL;
    }
    for( i = 0; i < nTimeSlices; i++ )
    {
	delete (pEvid[0])[i];
    }
    
    delete pBKInf;
    delete pJTreeInf;
    delete pDBN;
 
    return trsResult( ret, ret == TRS_OK ? "No errors" : 
    "Bad test on BK Inference using clusters");
    
    
}
예제 #4
0
int testRandomFactors()
{
    int ret = TRS_OK;
    
    int nnodes = 0;
    int i;
    while(nnodes <= 0)
    {
        trsiRead( &nnodes, "5", "Number of nodes in Model" );
    }
    //create node types
    int seed1 = pnlTestRandSeed();
    //create string to display the value
    char *value = new char[20];
#if 0
    value = _itoa(seed1, value, 10);
#else
    sprintf( value, "%d", seed1 );
#endif
    trsiRead(&seed1, value, "Seed for srand to define NodeTypes etc.");
    delete []value;
    trsWrite(TW_CON|TW_RUN|TW_DEBUG|TW_LST, "seed for rand = %d\n", seed1);
    //create 2 node types and model domain for them
    nodeTypeVector modelNodeType;
    modelNodeType.resize(2);
    modelNodeType[0] = CNodeType( 1, 4 );
    modelNodeType[1] = CNodeType( 1, 3 );
    intVector NodeAssociat;
    NodeAssociat.assign(nnodes, 0);
    for( i = 0; i < nnodes; i++ )
    {
        float rand = pnlRand( 0.0f, 1.0f );
        if( rand < 0.5f )
        {
            NodeAssociat[i] = 1;
        }
    }
    CModelDomain* pMDDiscr = CModelDomain::Create( modelNodeType,
        NodeAssociat );
    //create random graph - number of nodes for every node is rand too
    int lowBorder = nnodes - 1;
    int upperBorder = int((nnodes * (nnodes - 1))/2);
    int numEdges = pnlRand( lowBorder, upperBorder );
mark: 
    CGraph* pGraph = tCreateRandomDAG( nnodes, numEdges, 1 );
    if ( pGraph->NumberOfConnectivityComponents() != 1 )
    {
        delete pGraph;
        goto mark;
    }
    CBNet* pDiscrBNet = CBNet::CreateWithRandomMatrices( pGraph, pMDDiscr );
    //start jtree inference just for checking 
    //the model is valid for inference and all operations can be made
    CEvidence* pDiscrEmptyEvid = CEvidence::Create( pMDDiscr, 0, NULL, valueVector() );
    CJtreeInfEngine* pDiscrInf = CJtreeInfEngine::Create( pDiscrBNet );
    pDiscrInf->EnterEvidence( pDiscrEmptyEvid );
    const CPotential* pot = NULL;
    for( i = 0; i < nnodes; i++ )
    {
        intVector domain;
        pDiscrBNet->GetFactor(i)->GetDomain( &domain );
        pDiscrInf->MarginalNodes( &domain.front(), domain.size() );
        pot = pDiscrInf->GetQueryJPD();
    }
    //make copy of Graph for using with other models
    pGraph = CGraph::Copy( pDiscrBNet->GetGraph() );
    delete pDiscrInf;
    delete pDiscrBNet;
    delete pDiscrEmptyEvid;
    delete pMDDiscr;  

    //create gaussian model domain
    modelNodeType[0] = CNodeType( 0, 4 );
    modelNodeType[1] = CNodeType( 0, 2 );
    CModelDomain* pMDCont = CModelDomain::Create( modelNodeType,
        NodeAssociat );
    CBNet* pContBNet = CBNet::CreateWithRandomMatrices( pGraph, pMDCont );
    CEvidence* pContEmptyEvid = CEvidence::Create( pMDCont, 0, NULL, valueVector() );
    CNaiveInfEngine* pContInf = CNaiveInfEngine::Create( pContBNet );
    pContInf->EnterEvidence( pContEmptyEvid );
    for( i = 0; i < nnodes; i++ )
    {
        intVector domain;
        pContBNet->GetFactor(i)->GetDomain( &domain );
        pContInf->MarginalNodes( &domain.front(), domain.size() );
        pot = pContInf->GetQueryJPD();
    }

    pGraph = CGraph::Copy(pContBNet->GetGraph());
    delete pContInf;
    delete pContBNet;
    delete pContEmptyEvid;
    delete pMDCont;
    //find the node that haven't any parents 
    //and change its node type for it to create Conditional Gaussian CPD
    int numOfNodeWithoutParents = -1;
    intVector parents;
    parents.reserve(nnodes);
    for( i = 0; i < nnodes; i++ )
    {
        pGraph->GetParents( i, &parents );
        if( parents.size() == 0 )
        {
            numOfNodeWithoutParents = i;
            break;
        }
    }
    //change node type of this node, make it discrete
    CNodeType ntTab = CNodeType( 1,4 );
    modelNodeType.push_back( ntTab );
    NodeAssociat[numOfNodeWithoutParents] = 2;
    //need to change this model domain
    CModelDomain* pMDCondGau = CModelDomain::Create( modelNodeType, NodeAssociat );
    CBNet* pCondGauBNet = CBNet::CreateWithRandomMatrices( pGraph, pMDCondGau );
    //need to create evidence for all gaussian nodes
    intVector obsNodes;
    obsNodes.reserve(nnodes);
    int numGauVals = 0;
    for( i = 0; i < numOfNodeWithoutParents; i++ )
    {
        int GauSize = pMDCondGau->GetVariableType(i)->GetNodeSize();
        numGauVals += GauSize;
        obsNodes.push_back( i );
    }
    for( i = numOfNodeWithoutParents + 1; i < nnodes; i++ )
    {
        int GauSize = pMDCondGau->GetVariableType(i)->GetNodeSize();
        numGauVals += GauSize;
        obsNodes.push_back( i );
    }
    valueVector obsGauVals;
    obsGauVals.resize( numGauVals );
    floatVector obsGauValsFl;
    obsGauValsFl.resize( numGauVals);
    pnlRand( numGauVals, &obsGauValsFl.front(), -3.0f, 3.0f);
    //fill the valueVector
    for( i = 0; i < numGauVals; i++ )
    {
        obsGauVals[i].SetFlt(obsGauValsFl[i]);
    }
    CEvidence* pCondGauEvid = CEvidence::Create( pMDCondGau, obsNodes, obsGauVals );
    CJtreeInfEngine* pCondGauInf = CJtreeInfEngine::Create( pCondGauBNet );
    pCondGauInf->EnterEvidence( pCondGauEvid );
    pCondGauInf->MarginalNodes( &numOfNodeWithoutParents, 1 );
    pot = pCondGauInf->GetQueryJPD();
    pot->Dump();

    delete pCondGauInf;
    delete pCondGauBNet;
    delete pCondGauEvid;
    delete pMDCondGau;

    return trsResult( ret, ret == TRS_OK ? "No errors" : 
    "Bad test on RandomFactors");
}
예제 #5
0
void CEMLearningEngine::Cast(const CPotential * pot, int node, int ev, float *** full_evid)
{
//    EDistributionType dt = pot->GetDistributionType();
  //  if (dt == dtTabular) printf("Tabular distribFun\n");
    
    int dims;
    dims = pot->GetDistribFun()->GetMatrix(matTable)->GetNumberDims();
    const int * ranges;
    pot->GetDistribFun()->GetMatrix(matTable)->GetRanges(&dims, &ranges);

    float * segment = new float [ranges[dims-1]+1];
    segment[0] = 0;

    int i, j;

    if (dims == 1)  //discrete node has no discrete parents
    {
        int * multiindex = new int [1];

        for (i=1; i <= ranges[0]; i++)
        {
            multiindex[0] = i-1;
            segment[i] = segment[i-1] + pot->GetDistribFun()->
                GetMatrix(matTable)->GetElementByIndexes(multiindex); 
        }
        segment[0] = -0.001f;

        delete [] multiindex;
    }

    else //discrete node has discrete parents
    {
        int   data_length; 
        const float * data;
        (static_cast <CDenseMatrix <float> * >(pot->GetDistribFun()->GetMatrix(matTable)))->GetRawData (&data_length, &data);
        
        float  * probability = new float [ranges[dims-1]]; 
        for (i=0; i< ranges[dims-1]; i++)
        {
            probability[i] = 0;
        }

        for (i=0; i< ranges[dims-1]; i++)
        {
            for (j=0; (j*ranges[dims-1]+i) < data_length; j++)
            {
                probability[i] += data[ j*ranges[dims-1] + i ];
            }

        }

        for (i=1; i <= ranges[dims-1]; i++)
        {
            segment[i] = segment[i-1] + probability[i-1];
        }

        delete [] probability;
    }

    segment[0] = -0.001f;
    float my_val = pnlRand(0.0f, 1.0f);
    for (i=1; i<=ranges[dims-1]; i++)
    {
        if ((my_val > segment[i-1]) && (my_val <= segment[i]))
        {
            (*full_evid)[node][ev] = i-1;
        }
    }
    delete [] segment;
    
}
예제 #6
0
int testSetStatistics()
{
    int ret = TRS_OK;
    float eps = 0.1f;
    
    int seed = pnlTestRandSeed();
    pnlSeed( seed );   
            
    CBNet *pBNet = pnlExCreateCondGaussArBNet();
    CModelDomain *pMD = pBNet->GetModelDomain();

    
    CGraph *pGraph = CGraph::Copy(pBNet->GetGraph());
    
    CBNet *pBNet1 = CBNet::CreateWithRandomMatrices( pGraph, pMD );

    pEvidencesVector evidences;
    int nEvidences = pnlRand( 3000, 4000);
    
    pBNet->GenerateSamples( &evidences, nEvidences );
   
    
    int i;
    for( i = 0; i < nEvidences; i++)
    {
	
	//evidences[i]->MakeNodeHiddenBySerialNum(0);
    }
    

    CEMLearningEngine *pLearn = CEMLearningEngine::Create(pBNet1);
    pLearn->SetData( nEvidences, &evidences.front() );
    pLearn->SetMaxIterEM();
    pLearn->Learn();

    for( i = 0; i < pBNet->GetNumberOfFactors(); i++ )
    {
	if( ! pBNet->GetFactor(i)->IsFactorsDistribFunEqual(pBNet1->GetFactor(i), eps))
	{
	    ret = TRS_FAIL;
	    pBNet->GetFactor(i)->GetDistribFun()->Dump();
	    pBNet1->GetFactor(i)->GetDistribFun()->Dump();

	}
    }
    
    CDistribFun *pDistr;
    const CMatrix<float>* pMat;
    CFactor *pCPD;
    
    pDistr = pBNet1->GetFactor(0)->GetDistribFun();
    pMat = pDistr->GetStatisticalMatrix(stMatTable);
    
    pCPD = pBNet->GetFactor(0);
    pCPD->SetStatistics(pMat, stMatTable);
    pCPD->ProcessingStatisticalData(nEvidences);
    if( ! pCPD->IsFactorsDistribFunEqual(pBNet1->GetFactor(0), 0.0001f) )
    {
	ret = TRS_FAIL;
    }
    

    pDistr = pBNet1->GetFactor(1)->GetDistribFun();
    
    int parentVal;
    pCPD = pBNet->GetFactor(1);
    
    parentVal = 0;

    pCPD->SetStatistics(pMat, stMatCoeff);

    pMat = pDistr->GetStatisticalMatrix(stMatMu, &parentVal);
    pCPD->SetStatistics(pMat, stMatMu, &parentVal);
    
    
    pMat = pDistr->GetStatisticalMatrix(stMatSigma, &parentVal);
    pCPD->SetStatistics(pMat, stMatSigma, &parentVal);
    
    parentVal = 1;
    
    pMat = pDistr->GetStatisticalMatrix(stMatMu, &parentVal);
    pCPD->SetStatistics(pMat, stMatMu, &parentVal);
    
    
    pMat = pDistr->GetStatisticalMatrix(stMatSigma, &parentVal);
    pCPD->SetStatistics(pMat, stMatSigma, &parentVal);

    pCPD->ProcessingStatisticalData(nEvidences);
    
    if( ! pCPD->IsFactorsDistribFunEqual(pBNet1->GetFactor(1), eps) )
    {
	ret = TRS_FAIL;
    }
    
    
    for( i = 0; i < nEvidences; i++)
    {
	delete evidences[i];
    }
    delete pLearn;
    delete pBNet1;
    delete pBNet;

    
    return trsResult( ret, ret == TRS_OK ? "No errors" : 
    "Bad test on SetStatistics");
    
    
}
예제 #7
0
int main()
{
    CBNet *pBNetForArHMM = pnlExCreateRndArHMM();
    CDBN *pArHMM = CDBN::Create( pBNetForArHMM );

     //Create an inference engine
    C1_5SliceJtreeInfEngine* pInfEng;
    pInfEng = C1_5SliceJtreeInfEngine::Create(pArHMM);

    //Number of time slices for unrolling
    int nTimeSlices = 5;
    const CPotential* pQueryJPD;

    //Crate evidence for every slice
    CEvidence** pEvidences;
    pEvidences = new CEvidence*[nTimeSlices];

    //Let node 1 is always observed
    const int obsNodesNums[] = { 1 };
    valueVector obsNodesVals(1);

    int i;
    for( i = 0; i < nTimeSlices; i++ )
    {
        // Generate random value
	obsNodesVals[0].SetInt(rand()%2);
        pEvidences[i] = CEvidence::Create( pArHMM, 1, obsNodesNums,
            obsNodesVals );
    }

    // Create smoothing procedure
    pInfEng->DefineProcedure(ptSmoothing, nTimeSlices);
    // Enter created evidences
    pInfEng->EnterEvidence(pEvidences, nTimeSlices);
    // Start smoothing process
    pInfEng->Smoothing();

    // Choose query set of nodes for every slice
    int queryPrior[] = { 0 };
    int queryPriorSize = 1;
    int query[] = { 0, 2 };
    int querySize = 2;

    std::cout << " Results of smoothing " << std::endl;

    int slice = 0;
    pInfEng->MarginalNodes( queryPrior, queryPriorSize, slice );
    pQueryJPD = pInfEng->GetQueryJPD();

    ShowResultsForInference(pQueryJPD, slice);
    //pQueryJPD->Dump();


    std::cout << std::endl;

    for( slice = 1; slice < nTimeSlices; slice++ )
    {
        pInfEng->MarginalNodes( query, querySize, slice );
        pQueryJPD = pInfEng->GetQueryJPD();
        ShowResultsForInference(pQueryJPD, slice);
	//pQueryJPD->Dump();
    }

    slice = 0;

    //Create filtering procedure
    pInfEng->DefineProcedure( ptFiltering );
    pInfEng->EnterEvidence( &(pEvidences[slice]), 1 );
    pInfEng->Filtering( slice );
    pInfEng->MarginalNodes( queryPrior, queryPriorSize );
    pQueryJPD = pInfEng->GetQueryJPD();

    std::cout << " Results of filtering " << std::endl;
    ShowResultsForInference(pQueryJPD, slice);
    //pQueryJPD->Dump();


    for( slice = 1; slice < nTimeSlices; slice++ )
    {
        pInfEng->EnterEvidence( &(pEvidences[slice]), 1 );
        pInfEng->Filtering( slice );
        pInfEng->MarginalNodes( query, querySize );
        pQueryJPD = pInfEng->GetQueryJPD();
        ShowResultsForInference(pQueryJPD, slice);
	//pQueryJPD->Dump();
    }

    //Create fixed-lag smoothing (online)
    int lag = 2;
    pInfEng->DefineProcedure( ptFixLagSmoothing, lag );

    for (slice = 0; slice < lag + 1; slice++)
    {
        pInfEng->EnterEvidence( &(pEvidences[slice]), 1 );
    }
    std::cout << " Results of fixed-lag smoothing " << std::endl;

    pInfEng->FixLagSmoothing( slice );
    pInfEng->MarginalNodes( queryPrior, queryPriorSize );
    pQueryJPD = pInfEng->GetQueryJPD();
    ShowResultsForInference(pQueryJPD, slice);
    //pQueryJPD->Dump();

    std::cout << std::endl;

    for( ; slice < nTimeSlices; slice++ )
    {
        pInfEng->EnterEvidence( &(pEvidences[slice]), 1 );
        pInfEng->FixLagSmoothing( slice );
        pInfEng->MarginalNodes( query, querySize );
        pQueryJPD = pInfEng->GetQueryJPD();
        ShowResultsForInference(pQueryJPD, slice);
	//pQueryJPD->Dump();
    }

    delete pInfEng;

    for( slice = 0; slice < nTimeSlices; slice++)
    {
        delete pEvidences[slice];
    }

    //Create learning procedure for DBN
    pEvidencesVecVector evidencesOut;


    const int nTimeSeries = 500;
    printf("nTimeSlices: %d\n", nTimeSlices);
    intVector nSlices(nTimeSeries);
    printf("nSlices_len: %d\n", nSlices.size());
    //define number of slices in the every time series
    pnlRand(nTimeSeries, &nSlices.front(), 3, 20);
    printf("nSlices_len: %d\n", nSlices.size());
    for(int i=0; i<10; i++){
        printf("%d ", nSlices[i]);
    }
    printf("]\n");
    printf("es_len: %d\n", nSlices.size());
    // Generate evidences in a random way
    pArHMM->GenerateSamples( &evidencesOut, nSlices);
    printf("v1: %d\n", evidencesOut.size());
    printf("v2: %d\n", evidencesOut[0].size());

    // Create DBN for learning
    CDBN *pDBN = CDBN::Create(pnlExCreateRndArHMM());

    // Create learning engine
    CEMLearningEngineDBN *pLearn = CEMLearningEngineDBN::Create( pDBN );

    // Set data for learning
    pLearn->SetData( evidencesOut );

    // Start learning
    try
    {
        pLearn->Learn();
    }
    catch(CAlgorithmicException except)
    {
        std::cout << except.GetMessage() << std::endl;
    }

    std::cout<<"Leraning procedure"<<std::endl;

    const CCPD *pCPD1, *pCPD2;
    for( i = 0; i < 4; i++ )
    {
        std::cout<<" initial model"<<std::endl;
        pCPD1 = static_cast<const CCPD*>( pArHMM->GetFactor(i) );
        ShowCPD( pCPD1 );


	std::cout<<" model after learning"<<std::endl;
        pCPD2 = static_cast<const CCPD*>( pDBN->GetFactor(i) );
	ShowCPD( pCPD2 );

    }

    for( i = 0; i < evidencesOut.size(); i++ )
    {
        int j;
        for( j = 0; j < evidencesOut[i].size(); j++ )
        {
            delete evidencesOut[i][j];
        }
    }
    delete pDBN;
    delete pArHMM;
    delete pLearn;

    return 0;
}