CBNet* Learn_process(const CBNet* pBnet)
{
	//start learning for this model
	//create WS BNet with different matrices

	std::cout<<"Learning procedure \n ";
	CGraph *pGraph = CGraph::Copy( pBnet->GetGraph() ); 
	CModelDomain *pMD = pBnet->GetModelDomain();

	CBNet* pLearnBNet = CBNet::CreateWithRandomMatrices( pGraph, pMD );

	//loading data from file 
	const char * fname = "Data/casesForWS";

	pEvidencesVector evVec;

	if( ! CEvidence::Load(fname,  &evVec, pMD) )
	{
		printf("can't open file with cases");
		exit(1);
		getchar();
	}
	int numOfSamples = evVec.size();
	std::cout<<"Number of cases for learning = "<<numOfSamples<<std::endl;

	//create learning engine
	CEMLearningEngine *pLearn = CEMLearningEngine::Create( pLearnBNet );

	//set data for learning

	pLearn->SetData( numOfSamples, &evVec.front() );
	pLearn->Learn();

	//compare information from learned model with initial model
	//both BNet have the same topology and node types 
	//- we need only to compare CPDs
	//need to set tolerance
	float epsilon = 1e-1f;
	int isEqual = IsTheModelEqual( pBnet, pLearnBNet, epsilon );

	std::cout << " The model was learned. The learning was " << std::endl;

	if( isEqual )
	{
		std::cout << " successful " << std::endl;
	}
	else
	{
		std::cout << " unsuccessful " << std::endl;
	}

	int ev;
	for( ev = 0; ev < evVec.size(); ev++ )
	{
		delete evVec[ev];
	}
	delete pLearn;
	delete pBnet;
	return pLearnBNet;
	
}
Пример #2
0
void CBICLearningEngine::Learn()
{
    CEMLearningEngine *pLearn = NULL;

    float resultBIC = -FLT_MAX;
    CBNet *pResultBNet = NULL;
    intVector resultOrder;
    
    
    pEvidencesVector pEv(m_Vector_pEvidences.size(), NULL );
    
    CModelDomain *pMD = m_pGrModel->GetModelDomain();
    
    int nnodes = m_pGrModel->GetNumberOfNodes();
    
    nodeTypeVector varTypes;
    pMD->GetVariableTypes(&varTypes);

    intVector varAss( pMD->GetVariableAssociations(), pMD->GetVariableAssociations() + nnodes );
       
    intVector currentAssociation(nnodes);
    intVector currentObsNodes(nnodes);
    int i;
    for( i = 0; i < nnodes; i++ )
    {
	currentObsNodes[i] = i;
    }

    CGraph *pGraph = CGraph::Create(nnodes, NULL, NULL, NULL);
    CBNet *pBNet;
    int lineSz = int( nnodes * ( nnodes - 1 ) / 2 );
    intVecVector connect;
    intVector indexes(lineSz, 0);
    int startNode, endNode;
    int ind;
    for( ind = 0; ind < lineSz ; )
    {
	if( indexes[ind] == 1 )
	{
	    FindNodesByNumber(&startNode, &endNode, nnodes, ind);
	    pGraph->RemoveEdge(startNode, endNode );
	    indexes[ind] = 0;
	    ind++;
	}
	else
	{
	    FindNodesByNumber(&startNode, &endNode, nnodes, ind);
	    pGraph->AddEdge(startNode, endNode, 1 );
	    indexes[ind] = 1;
	    ind = 0;
	    connect.clear();
	    pGraph->GetConnectivityComponents(&connect);
	    if( connect.size() == 1 )
	    {
		
		do
		{
		    CGraph *pCopyGraph = CGraph::Copy(pGraph);
		    int j;
		    for( j = 0; j < nnodes; j++ )
		    {
			currentAssociation[j] = varAss[currentObsNodes[j]];
		    }
		    
		    pBNet = CBNet::Create(nnodes, varTypes, currentAssociation, pCopyGraph);
		    pBNet->AllocFactors();
		    for( j = 0; j < nnodes; j++ )
		    {
			pBNet->AllocFactor( j );
			pBNet->GetFactor(j)->CreateAllNecessaryMatrices();
		    }

		    int dimOfModel = DimOfModel(pBNet);
		    int k;
		    for( k = 0; k < pEv.size(); k++ )
		    {
			valueVector vls; 
			m_Vector_pEvidences[k]->GetRawData(&vls);
			pEv[k] = CEvidence::Create( pBNet->GetModelDomain(),currentObsNodes, vls );
		    }
		    
		    
		    pLearn = CEMLearningEngine::Create(pBNet);
		    pLearn->SetData(pEv.size(), &pEv.front());
		    pLearn->Learn();
		    int nsteps;
		    const float *score;
		    pLearn->GetCriterionValue(&nsteps, &score);
		    float log_lik = score[nsteps-1];
		    float BIC = log_lik - 0.5f*float( dimOfModel*log(float(pEv.size())) );
		    
		    if( BIC >= resultBIC )
		    {
			delete pResultBNet;
			resultBIC = BIC;
			m_critValue.push_back(BIC);
			pResultBNet = pBNet;
			resultOrder.assign( currentObsNodes.begin(), currentObsNodes.end() );
		    }
		    else
		    {
			delete pBNet;
		    }
		    for( k = 0; k < pEv.size(); k++ )
		    {
			delete pEv[k];
		    }

		    delete pLearn;
		}while(std::next_permutation(currentObsNodes.begin(), currentObsNodes.end()));
		
	    }
	    
	}
    }
    
    delete pGraph;
    m_pResultGrModel = pResultBNet;
    m_resultRenaming.assign(resultOrder.begin(), resultOrder.end());
    
}
Пример #3
0
int main()
{
    PNL_USING
	//we create very small model to start inference on it
	// the model is from Kevin Murphy's BNT\examples\static\belprop_polytree_gaussain
	/*
	Do the example from Satnam Alag's PhD thesis, UCB ME dept 1996 p46
	Make the following polytree, where all arcs point down
	
	 0   1
	  \ /
	   2
	  / \
	 3   4


	*/
	int i;
	//create this model
	int nnodes = 5;
	int numnt = 2;
	CNodeType *nodeTypes = new CNodeType[numnt];
 	nodeTypes[0] = CNodeType(0,2);
	nodeTypes[1] = CNodeType(0,1);
	
	intVector nodeAssociation = intVector(nnodes,0);
	nodeAssociation[1] = 1;
	nodeAssociation[3] = 1;
	int nbs0[] = { 2 };
	int nbs1[] = { 2 };
	int nbs2[] = { 0, 1, 3, 4 };
	int nbs3[] = { 2 };
	int nbs4[] = { 2 };
	int *nbrs[] = { nbs0, nbs1, nbs2, nbs3, nbs4 };
	int numNeighb[] = {1, 1, 4, 1, 1};

	
	ENeighborType ori0[] = { ntChild };
	ENeighborType ori1[] = { ntChild };
	ENeighborType ori2[] = { ntParent, ntParent, ntChild, ntChild };
	ENeighborType ori3[] = { ntParent };
	ENeighborType ori4[] = { ntParent };
	ENeighborType *orient[] = { ori0, ori1, ori2, ori3, ori4 }; 
	
	
	CGraph *pGraph;
	pGraph = CGraph::Create(nnodes, numNeighb, nbrs, orient);
	
	CBNet *pBNet;
	
	pBNet = CBNet::Create( nnodes, numnt, nodeTypes, &nodeAssociation.front(), pGraph );
	//Allocation space for all factors of the model
	pBNet->AllocFactors();
	
	for( i = 0; i < nnodes; i++ )
	{
	    //Allocation space for all matrices of CPD
	    pBNet->AllocFactor(i);
	}
	
	//now we need to create data for CPDs - we'll create matrices
	CFactor *pCPD;
	floatVector smData = floatVector(2,0.0f);
	floatVector bigData = floatVector(4,1.0f);
	intVector ranges = intVector(2, 1);
	ranges[0] = 2;
	smData[0] = 1.0f;
	CNumericDenseMatrix<float> *mean0 = CNumericDenseMatrix<float>::
        Create( 2, &ranges.front(), &smData.front());
	bigData[0] = 4.0f;
	bigData[3] = 4.0f;
	ranges[1] = 2;
	CNumericDenseMatrix<float> *cov0 = CNumericDenseMatrix<float>::
        Create( 2, &ranges.front(), &bigData.front());
	pCPD = pBNet->GetFactor(0);
	pCPD->AttachMatrix(mean0, matMean);
	pCPD->AttachMatrix(cov0, matCovariance);
	ranges[0] = 1;
	ranges[1] = 1;
	float val = 1.0f;
	CNumericDenseMatrix<float> *mean1 = CNumericDenseMatrix<float>::
        Create( 2, &ranges.front(), &val );
	CNumericDenseMatrix<float> *cov1 = CNumericDenseMatrix<float>::
        Create( 2, &ranges.front(), &val );
	pCPD = pBNet->GetFactor(1);
	pCPD->AttachMatrix(mean1, matMean);
	pCPD->AttachMatrix(cov1, matCovariance);
	smData[0] = 0.0f;
	smData[1] = 0.0f;
	ranges[0] = 2;
	CNumericDenseMatrix<float> *mean2 = CNumericDenseMatrix<float>::
        Create(2, &ranges.front(), &smData.front());
	smData[0] = 2.0f;
	smData[1] = 1.0f;
	CNumericDenseMatrix<float> *w21 = CNumericDenseMatrix<float>::
        Create(2, &ranges.front(), &smData.front());
	bigData[0] = 2.0f;
	bigData[1] = 1.0f;
	bigData[2] = 1.0f;
	bigData[3] = 1.0f;
	ranges[1] = 2;
	CNumericDenseMatrix<float> *cov2 = CNumericDenseMatrix<float>::
        Create(2, &ranges.front(), &bigData.front());
	bigData[0] = 1.0f;
	bigData[1] = 2.0f;
	bigData[2] = 1.0f;
	bigData[3] = 0.0f;
	CNumericDenseMatrix<float> *w20 = CNumericDenseMatrix<float>::
        Create(2, &ranges.front(), &bigData.front());
	pCPD = pBNet->GetFactor(2);
	pCPD->AttachMatrix( mean2, matMean );
	pCPD->AttachMatrix( cov2, matCovariance );
	pCPD->AttachMatrix( w20, matWeights,0 );
	pCPD->AttachMatrix( w21, matWeights,1 );
	
	val = 0.0f;
	ranges[0] = 1;
	ranges[1] = 1;
	CNumericDenseMatrix<float> *mean3 = CNumericDenseMatrix<float>::
        Create(2, &ranges.front(), &val);
	val = 1.0f;
	CNumericDenseMatrix<float> *cov3 = CNumericDenseMatrix<float>::
        Create(2, &ranges.front(), &val);
	ranges[1] = 2;
	smData[0] = 1.0f;
	smData[1] = 1.0f;
	CNumericDenseMatrix<float> *w30 = CNumericDenseMatrix<float>::
        Create(2, &ranges.front(), &smData.front());
	pCPD = pBNet->GetFactor(3);
	pCPD->AttachMatrix( mean3, matMean );
	pCPD->AttachMatrix( cov3, matCovariance );
	pCPD->AttachMatrix( w30, matWeights,0 );

	ranges[0] = 2; 
	ranges[1] = 1;
	smData[0] = 0.0f;
	smData[1] = 0.0f;
	CNumericDenseMatrix<float> *mean4 = CNumericDenseMatrix<float>::
        Create(2, &ranges.front(), &smData.front());
	ranges[1] = 2;
	bigData[0] = 1.0f;
	bigData[1] = 0.0f;
	bigData[2] = 0.0f;
	bigData[3] = 1.0f;
	CNumericDenseMatrix<float> *cov4 = CNumericDenseMatrix<float>::
        Create(2, &ranges.front(), &bigData.front());
	bigData[2] = 1.0f;
	CNumericDenseMatrix<float> *w40 = CNumericDenseMatrix<float>::
        Create(2, &ranges.front(), &bigData.front());
	pCPD = pBNet->GetFactor(4);
	pCPD->AttachMatrix( mean4, matMean );
	pCPD->AttachMatrix( cov4, matCovariance );
	pCPD->AttachMatrix( w40, matWeights,0 );

	//Generate random evidences for the modes
	int nEv = 1000;
	pEvidencesVector evid;
	pBNet->GenerateSamples( &evid, nEv );
	/////////////////////////////////////////////////////////////////////
		
	//Create copy of initial model with random matrices 
	CGraph *pGraphCopy = CGraph::Copy(pGraph); 
	CBNet *pLearnBNet = CBNet::CreateWithRandomMatrices(pGraphCopy, pBNet->GetModelDomain() );
	
	// Creating learning process	
	CEMLearningEngine *pLearn = CEMLearningEngine::Create(pLearnBNet);

	pLearn->SetData(nEv, &evid.front());
	pLearn->Learn();
	CNumericDenseMatrix<float> *pMatrix;
	int length = 0;
	const float *output;
	
	///////////////////////////////////////////////////////////////////////
	std::cout<<" results of learning (number of evidences = "<<nEv<<std::endl;
	for (i = 0; i < nnodes; i++ )
	{
	    int j;
	    std::cout<<"\n matrix mean for node "<<i;
	    std::cout<<"\n initial BNet \n";
	    pMatrix = static_cast<CNumericDenseMatrix<float>*>
		(pBNet->GetFactor(i)->GetMatrix(matMean));
	    pMatrix->GetRawData(&length, &output);
	    for ( j = 0; j < length; j++ )
	    {
		std::cout<<" "<<output[j];
	    }
	    std::cout<<"\n BNet with random matrices after learning \n ";
	    pMatrix = static_cast<CNumericDenseMatrix<float>*>
		(pLearnBNet->GetFactor(i)->GetMatrix(matMean));
	    pMatrix->GetRawData(&length, &output);
	    for ( j = 0; j < length; j++)
	    {
		std::cout<<" "<<output[j];
	    }
	    
    	    std::cout<<"\n \n matrix covariance for node "<<i<<'\n';
	    std::cout<<"\n initial BNet \n";

	    pMatrix = static_cast<CNumericDenseMatrix<float>*>
		(pBNet->GetFactor(i)->GetMatrix(matCovariance));
	    pMatrix->GetRawData(&length, &output);
	    for (j = 0; j < length; j++ )
	    {
		std::cout<<" "<<output[j];
	    }
    	    std::cout<<"\n BNet with random matrices after learning \n ";
	    pMatrix = static_cast<CNumericDenseMatrix<float>*>
		(pLearnBNet->GetFactor(i)->GetMatrix(matCovariance));
	    pMatrix->GetRawData(&length, &output);
	    for ( j = 0; j < length; j++ )
	    {
		std::cout<<" "<<output[j];
	    }

	    std::cout<<"\n ___________________________\n";
	    
	}
	
	
	for( i = 0; i < nEv; i++)
	{
	    delete evid[i];
	}
	delete pLearn;
	delete pLearnBNet;
	delete pBNet;
	
	

return 1;
}
Пример #4
0
int testSetStatistics()
{
    int ret = TRS_OK;
    float eps = 0.1f;
    
    int seed = pnlTestRandSeed();
    pnlSeed( seed );   
            
    CBNet *pBNet = pnlExCreateCondGaussArBNet();
    CModelDomain *pMD = pBNet->GetModelDomain();

    
    CGraph *pGraph = CGraph::Copy(pBNet->GetGraph());
    
    CBNet *pBNet1 = CBNet::CreateWithRandomMatrices( pGraph, pMD );

    pEvidencesVector evidences;
    int nEvidences = pnlRand( 3000, 4000);
    
    pBNet->GenerateSamples( &evidences, nEvidences );
   
    
    int i;
    for( i = 0; i < nEvidences; i++)
    {
	
	//evidences[i]->MakeNodeHiddenBySerialNum(0);
    }
    

    CEMLearningEngine *pLearn = CEMLearningEngine::Create(pBNet1);
    pLearn->SetData( nEvidences, &evidences.front() );
    pLearn->SetMaxIterEM();
    pLearn->Learn();

    for( i = 0; i < pBNet->GetNumberOfFactors(); i++ )
    {
	if( ! pBNet->GetFactor(i)->IsFactorsDistribFunEqual(pBNet1->GetFactor(i), eps))
	{
	    ret = TRS_FAIL;
	    pBNet->GetFactor(i)->GetDistribFun()->Dump();
	    pBNet1->GetFactor(i)->GetDistribFun()->Dump();

	}
    }
    
    CDistribFun *pDistr;
    const CMatrix<float>* pMat;
    CFactor *pCPD;
    
    pDistr = pBNet1->GetFactor(0)->GetDistribFun();
    pMat = pDistr->GetStatisticalMatrix(stMatTable);
    
    pCPD = pBNet->GetFactor(0);
    pCPD->SetStatistics(pMat, stMatTable);
    pCPD->ProcessingStatisticalData(nEvidences);
    if( ! pCPD->IsFactorsDistribFunEqual(pBNet1->GetFactor(0), 0.0001f) )
    {
	ret = TRS_FAIL;
    }
    

    pDistr = pBNet1->GetFactor(1)->GetDistribFun();
    
    int parentVal;
    pCPD = pBNet->GetFactor(1);
    
    parentVal = 0;

    pCPD->SetStatistics(pMat, stMatCoeff);

    pMat = pDistr->GetStatisticalMatrix(stMatMu, &parentVal);
    pCPD->SetStatistics(pMat, stMatMu, &parentVal);
    
    
    pMat = pDistr->GetStatisticalMatrix(stMatSigma, &parentVal);
    pCPD->SetStatistics(pMat, stMatSigma, &parentVal);
    
    parentVal = 1;
    
    pMat = pDistr->GetStatisticalMatrix(stMatMu, &parentVal);
    pCPD->SetStatistics(pMat, stMatMu, &parentVal);
    
    
    pMat = pDistr->GetStatisticalMatrix(stMatSigma, &parentVal);
    pCPD->SetStatistics(pMat, stMatSigma, &parentVal);

    pCPD->ProcessingStatisticalData(nEvidences);
    
    if( ! pCPD->IsFactorsDistribFunEqual(pBNet1->GetFactor(1), eps) )
    {
	ret = TRS_FAIL;
    }
    
    
    for( i = 0; i < nEvidences; i++)
    {
	delete evidences[i];
    }
    delete pLearn;
    delete pBNet1;
    delete pBNet;

    
    return trsResult( ret, ret == TRS_OK ? "No errors" : 
    "Bad test on SetStatistics");
    
    
}
bool CStaticStructLearnSEM::LearnOneStep()
{
	intVecVector decompsition;
	CGraph* graph = m_pCurrBNet->GetGraph();
	graph->GetConnectivityComponents( &decompsition );
	CEMLearningEngine* pEMLearn;
	if(decompsition.size() > 1)
	{
		CExInfEngine< CJtreeInfEngine, CBNet, PNL_EXINFENGINEFLAVOUR_DISCONNECTED > *pInf = 
                	CExInfEngine< CJtreeInfEngine, CBNet, PNL_EXINFENGINEFLAVOUR_DISCONNECTED >::
       		Create( m_pCurrBNet  );
		pEMLearn = CEMLearningEngine::Create(m_pCurrBNet, pInf);
	}
	else
    {
        CJtreeInfEngine *pInf = CJtreeInfEngine::Create(m_pCurrBNet);
        pEMLearn = CEMLearningEngine::Create(m_pCurrBNet, pInf);
    }

	int i;
	for(i=0; i<decompsition.size(); i++)
		decompsition[i].clear();
	decompsition.clear();

	ConvertToCurrEvidences(m_pCurrBNet);
	pEMLearn->SetData(m_numberOfAllEvidences, &m_vCurrEvidences.front());
	
	pEMLearn->SetMaxIterEM(m_IterEM);

//	pEMLearn->ClearStatisticData();
	pCPDVector vNeighborCPDs;
	floatVector vNeighborLLs;
	EDGEOPVECTOR vValidMoves;
	intVector vRevCorrespDel;
	CreateNeighborCPDs(m_pCurrBNet, &vNeighborCPDs, &vValidMoves, &vRevCorrespDel);

	pEMLearn->LearnExtraCPDs(m_nMaxFanIn+1, &vNeighborCPDs, &vNeighborLLs);

//	m_pCurrBNet = static_cast<CBNet*>(pEMLearn->GetStaticModel());
	const float* familyLL = pEMLearn->GetFamilyLogLik();
	floatVector familyScores(m_nNodes,0);
	int j, freeparams;
	float logebase = (float)log(float(m_numberOfAllEvidences));
	float total_score = 0.0f;
	CFactor* pCPD;
	for(i=0; i<m_nNodes; i++)
	{
		pCPD = m_pCurrBNet->GetFactor(i);
		freeparams = pCPD->GetNumberOfFreeParameters();
		familyScores[i] = familyLL[i] - 0.5f * float(freeparams) * logebase;
		total_score += familyScores[i];
	}
	int nMoves = vValidMoves.size();
	floatVector neighborScores(nMoves, 0);
	for(i=0; i<nMoves; i++)
	{
		pCPD = static_cast<CFactor*>(vNeighborCPDs[i]);
		freeparams = pCPD->GetNumberOfFreeParameters();
		neighborScores[i] = vNeighborLLs[i] - 0.5f * float(freeparams) * logebase;
	}

	int start, end, max_position=0;
	float tmp_score, best_score = -1e37f; 
	EDGEOP move;
	for(i=0; i<nMoves; i++)
	{
		move = vValidMoves[i];
		switch (move.DAGChangeType)
		{
		case DAG_DEL : 
			end = move.originalEdge.endNode;
			tmp_score = neighborScores[i] - familyScores[end];
			if( best_score<tmp_score )
			{
				best_score = tmp_score;
				max_position = i;
			}
			break;

		case DAG_ADD :
			end = move.originalEdge.endNode;
			tmp_score = neighborScores[i] - familyScores[end];
			if( best_score<tmp_score )
			{
				best_score = tmp_score;
				max_position = i;
			}
			break;

		case DAG_REV :
			end = move.originalEdge.startNode;
			tmp_score = neighborScores[i] - familyScores[end];
			
			end = move.originalEdge.endNode;
			tmp_score += neighborScores[vRevCorrespDel[i]] - familyScores[end];
			if( best_score<tmp_score )
			{
				best_score = tmp_score;
				max_position = i;
			}
			break;
		}
	}

	move = vValidMoves[max_position];
	start = move.originalEdge.startNode;
	end = move.originalEdge.endNode;
	EDAGChangeType changeType = move.DAGChangeType;
	CCPD *addCPD=0, *delCPD=0;
	switch (changeType)
	{
	case DAG_DEL : 
		delCPD = static_cast<CCPD*>((vNeighborCPDs[max_position])->Clone());
		break;

	case DAG_ADD :
		addCPD = static_cast<CCPD*>((vNeighborCPDs[max_position])->Clone());
		break;

	case DAG_REV :
		addCPD = static_cast<CCPD*>((vNeighborCPDs[max_position])->Clone());
		delCPD = static_cast<CCPD*>((vNeighborCPDs[vRevCorrespDel[max_position]])->Clone());			
		break;
	}

	delete pEMLearn;
	for(i=0; i<vNeighborCPDs.size(); i++)
	{
		delete vNeighborCPDs[i];
	}
	vNeighborCPDs.clear();
	for(i=0; i<m_numberOfAllEvidences; i++)
	{
		delete m_vCurrEvidences[i];
	}
	m_vCurrEvidences.clear();
	vValidMoves.clear();
	float score_gate = (float)fabs(m_minProgress * total_score);
	if(best_score <= score_gate)
	{
		if(changeType == DAG_REV)
		{
			delete addCPD;
			delete delCPD;
		}
		if(changeType == DAG_ADD)delete addCPD;
		if(changeType == DAG_DEL)delete delCPD;
		return false;
	}

	total_score += best_score;
	CDAG* pDAG = CDAG::Create(*(m_pCurrBNet->GetGraph()));
	int node, node1, newnode;
	if(!(pDAG->DoMove(start, end, changeType)))
	{
		PNL_THROW(CInternalError, "There are some internal errors");
	}

	intVector vRenaming, Old2New;
	CDAG* iDAG;
	int TopologicSorted = pDAG->IsTopologicallySorted();
	if( TopologicSorted )
	{
		iDAG = pDAG->Clone();
		for(i=0; i<m_nNodes; i++) vRenaming.push_back(i);
	}
	else
		iDAG = pDAG->TopologicalCreateDAG(vRenaming);
	pDAG->Dump();
	intVector gRename;
	for(i=0; i<m_nNodes; i++)
	{
		node = vRenaming[i];
		node1 = m_vGlobalRenaming[node];
		gRename.push_back(node1);
	}
	m_vGlobalRenaming.assign(gRename.begin(), gRename.end());

	int pos;
	for(i=0; i<m_nNodes; i++)
	{
		pos = std::find(vRenaming.begin(), vRenaming.end(), i) - vRenaming.begin();
		Old2New.push_back(pos);
	}

	const int* oldNodeAsso = m_pCurrBNet->GetNodeAssociations();
	intVector newNodeAsso(m_nNodes,0);
	for(i=0; i<m_nNodes; i++)
	{
		newNodeAsso[i] = oldNodeAsso[vRenaming[i]];
	}
	nodeTypeVector vpnt;
	m_pCurrBNet->GetNodeTypes(&vpnt);
	CBNet* pBNet = CBNet::Create(m_nNodes, vpnt.size(), &vpnt.front(), 
		           &newNodeAsso.front(), static_cast<CGraph*>(iDAG));
	CModelDomain* pMDnew = pBNet->GetModelDomain();
	pBNet->AllocFactors();
	intVector domainNew, domainOld;
	const CFactor* factor=0;
	CFactor* curFactor;
	for(i=0; i<m_nNodes; i++)
	{
		domainNew.clear();
		newnode = Old2New[i];
		if( (i != start) && (i != end) )
		{
			factor = m_pCurrBNet->GetFactor(i);
		}
		else
		{
			if(changeType == DAG_REV)
			{
				if(i == start)
					factor = addCPD->Clone();
				if(i == end)
					factor = delCPD->Clone();
			}
			if(changeType == DAG_DEL)
			{
				if(i == start)
					factor = m_pCurrBNet->GetFactor(i);
				if(i == end)
					factor = delCPD->Clone();
			}
			if(changeType == DAG_ADD)
			{
				if(i == start)
					factor = m_pCurrBNet->GetFactor(i);
				if(i == end)
					factor = addCPD->Clone();
			}
		}
		factor->GetDomain(&domainOld);
		for(j=0; j<domainOld.size(); j++)
		{
			domainNew.push_back(Old2New[domainOld[j]]);
		}
		curFactor = CFactor::CopyWithNewDomain(factor, domainNew, pMDnew);
		pBNet->AttachFactor(curFactor);
	}

	if(changeType == DAG_REV)
	{
		delete addCPD;
		delete delCPD;
	}
	if(changeType == DAG_ADD)delete addCPD;
	if(changeType == DAG_DEL)delete delCPD;

	delete m_pCurrBNet;
	delete pDAG;
	m_pCurrBNet = pBNet;
	m_critValue.push_back(total_score);
	return true;
}