void CNodeValues::ToggleNodeStateBySerialNumber(int nNodes, const int *nodeNumbers) { /*to change the status of node from really observed to potentially observed and backwards*/ int i,j; if( ( nNodes<0 ) || ( nNodes>m_numberObsNodes ) ) { PNL_THROW(COutOfRange, "number of nodes to toggle must be less than m_numberObsNodes")/**/ } else { int flagAllRight = 1;/*all nodes from nodeIndices are potentially observed (if it is so - all numbers of this nodes are in m_ObsNodes*/ int thereIsSuchNode = 0;/*check-up flag for node - is it from m_ObsNodes?*/ int *nodeIndices = new int[nNodes]; PNL_CHECK_IF_MEMORY_ALLOCATED( nodeIndices ); /*checking up all input data*/ for ( i=0; i < nNodes; i++ ) { for ( j=0; j < m_numberObsNodes; j++ ) { if( nodeNumbers[i] == j ) { thereIsSuchNode = 1; nodeIndices[i] = j; break; } } if( !thereIsSuchNode ) { flagAllRight = 0; break; } } if ( flagAllRight ) { for( i=0; i < nNodes; i++ ) { m_isObsNow[nodeIndices[i]] = 1-m_isObsNow[nodeIndices[i]]; //fixme !!! is it enougth to use offset? - I think yes } } else { PNL_THROW( COutOfRange, "some node has number which is not in m_obsNodes" ) /**/ } delete []nodeIndices; } }
bool C2DBitwiseMatrix::AddOneRowWithAnother(int nSourceRow, int nDestinationRow) { if(nSourceRow <0 || nSourceRow>= m_nHeight) { PNL_THROW(COutOfRange, "Row out of range!"); return false; } if(nDestinationRow <0 || nDestinationRow>= m_nHeight) { PNL_THROW(COutOfRange, "Row out of range!"); return false; } int nLength = m_nWidth / BITWISEMATRIX_BYTESIZE; if(nLength*BITWISEMATRIX_BYTESIZE < m_nWidth) nLength ++; for(int i=0; i<nLength; i++) { m_pMatrix[nDestinationRow][i] = (unsigned char)(m_pMatrix[nDestinationRow][i] | m_pMatrix[nSourceRow][i]); } return true; }
void CMNet::AttachFactor(CFactor *param) { /* this function is used for attaching factors that have been created already with the call of function Create() for some type of fator */ /* bad-args check */ if( ! param ) { PNL_THROW( CNULLPointer, " param == NULL " ); } if( param->GetFactorType() != ftPotential ) { PNL_THROW( CInconsistentType, "can attach only potential to MNet" ); } int cliqueNumber; int numberOfNodesInDomain; const int *domain; param->GetDomain( &numberOfNodesInDomain, &domain ); cliqueNumber = FindCliqueNumberByDomain( numberOfNodesInDomain, domain ); if ( cliqueNumber == -1 ) { PNL_THROW( CInvalidOperation, " no clique matching domain " ); /* there is no clique to match the domain */ } //make calling AllocFactors() optional if( !m_pParams ) { AllocFactors(); } m_paramInds[cliqueNumber] = m_pParams->AddFactor(param); }
void CTreeCPD::UpdateStatisticsEM( const CPotential *pMargPot, const CEvidence *pEvidence ) { if( !pMargPot ) { PNL_THROW( CNULLPointer, "evidences" )//no corresp evidences } intVector obsPos; pMargPot->GetObsPositions(&obsPos); m_CorrespDistribFun->UpdateStatisticsEM( pMargPot->GetDistribFun(), pEvidence, 1.0f, &m_Domain.front() ); }
void CMNet::CreateTabularPotential( const intVector& domain, const floatVector& data ) { AllocFactor( domain.size(), &domain.front() ); pFactorVector factors; int numFactors = GetFactors( domain.size(), &domain.front(), &factors ); if( numFactors != 1 ) { PNL_THROW( CInconsistentSize, "domain must be the same as corresponding domain size got from graph" ); } factors[0]->AllocMatrix( &data.front(), matTable ); }
void CMNet::AllocFactor(int cliqueNumber) { /* bad-args check */ if( ( cliqueNumber < 0 ) || ( cliqueNumber >= m_cliques.size() ) ) { PNL_THROW( COutOfRange, " cliqueNumber < 0 or >= numberOfCliques " ); } /* bad-args check end */ _AllocFactor( cliqueNumber, m_cliques[cliqueNumber].size(), &m_cliques[cliqueNumber].front(), ftPotential ); }
void CMlDynamicStructLearn::Learn() { RearrangeEvidences(); int nNodesSlice = m_pGrModel->GetNumberOfNodes(); intVector vAncestor, vDescent; for(int i=0; i<nNodesSlice; i++) { vAncestor.push_back(i); vDescent.push_back(i+nNodesSlice); } //currently only the hill climbing algorithm available CMlStaticStructLearn* pSSL = CMlStaticStructLearnHC::Create(m_pGrModel->GetStaticModel(), itStructLearnML, StructLearnHC, BIC, m_nMaxFanIn, vAncestor, vDescent, m_nRestarts); pSSL->SetData(m_vEvidences.size(), &m_vEvidences.front()); pSSL->SetMaxIterIPF(m_nMaxIters); static_cast<CMlStaticStructLearnHC*>(pSSL) ->SetMinProgress(m_minProgress); pSSL->Learn(); const CDAG* p2SDAG = pSSL->GetResultDAG(); // p2SDAG->Dump(); if(!m_LearnPriorSlice) { m_pResultDag = const_cast<CDAG*>(p2SDAG)->Clone(); delete pSSL; return; } intVector vA, vD; CStaticGraphicalModel* pGrModel0 = m_pGrModel->CreatePriorSliceGrModel(); CMlStaticStructLearn* pSSL0 = CMlStaticStructLearnHC::Create(pGrModel0, itStructLearnML, StructLearnHC, BIC, m_nMaxFanIn, vA, vD, m_nRestarts); pSSL0->SetData(m_vEvidence0.size(), &m_vEvidence0.front()); pSSL0->SetMaxIterIPF(m_nMaxIters / 2); static_cast<CMlStaticStructLearnHC*>(pSSL0) ->SetMinProgress(m_minProgress); pSSL0->Learn(); const CDAG* p0SDAG = pSSL0->GetResultDAG(); // p0SDAG->Dump(); CDAG* pDAG = const_cast<CDAG*>(p2SDAG)->Clone(); if(pDAG->SetSubDag(vAncestor, const_cast<CDAG*>(p0SDAG))) { m_pResultDag = pDAG->Clone(); delete pDAG; } else PNL_THROW(CInternalError, "InternalError, can not generate a DAG"); delete pSSL; delete pSSL0; }
void TestsPnlHigh::TestDefaultDistribution() { printf("TestDefaultDistribution\n"); BayesNet *net = SimpleCGM1(); //1 is equal to "Cont1" WCondGaussianDistribFun *pCGDF = dynamic_cast<WCondGaussianDistribFun *>(net->m_pNet->m_paDistribution->Distribution(1)); //pCGDF->m_pDistrib->Dump(); Vector<int> aIndex(pCGDF->desc()->nNode()+3, 0); aIndex[0] = 0; aIndex[4] = 0; aIndex[5] = 0; if (pCGDF->GetAValue(pnl::matMean, aIndex) != 1.0f) { PNL_THROW(pnl::CAlgorithmicException, "Wrong default distribution"); }; if (pCGDF->GetAValue(pnl::matCovariance, aIndex) != 1.0f) { PNL_THROW(pnl::CAlgorithmicException, "Wrong default distribution"); }; if (pCGDF->GetAValue(pnl::matWeights, aIndex) != 0.0f) { PNL_THROW(pnl::CAlgorithmicException, "Wrong default distribution"); }; aIndex[5] = 1; if (pCGDF->GetAValue(pnl::matWeights, aIndex) != 0.0f) { PNL_THROW(pnl::CAlgorithmicException, "Wrong default distribution"); }; aIndex[0] = 1; if (pCGDF->GetAValue(pnl::matWeights, aIndex) != 0.0f) { PNL_THROW(pnl::CAlgorithmicException, "Wrong default distribution"); }; aIndex[5] = 0; if (pCGDF->GetAValue(pnl::matWeights, aIndex) != 0.0f) { PNL_THROW(pnl::CAlgorithmicException, "Wrong default distribution"); }; //WGaussianDistribFun *pGDF = dynamic_cast<WGaussianDistribFun *>(net->m_pNet->m_paDistribution->Distribution(2)); //pGDF->m_pDistrib->Dump(); delete net; };
const CEvidence* CBKInfEngine::GetMPE() { if( m_ProcedureType != ptViterbi ) { PNL_THROW( CInvalidOperation, " you have not been computing the MPE "); } const CEvidence *pEv = m_QuerryJTree->GetMPE(); intVector queryNodes; GetQueryNodes(&queryNodes); SetEvidenceMPE( CEvidence:: Create(pEv, queryNodes.size(), &queryNodes.front(), GrModel()->GetModelDomain())); return GetEvidenceMPE(); }
PNL_USING CParEMLearningEngine* CParEMLearningEngine::Create( CStaticGraphicalModel *pGrModel) { if(!pGrModel) { PNL_THROW(CNULLPointer, "Graphical Model"); } CParEMLearningEngine *pNewEngine = new CParEMLearningEngine(pGrModel, NULL, itParamLearnEM); return pNewEngine; }
void CNodeValues::SetData(const valueVector& data) { /*we set all data - if all nodes are really observed and we need to know all values*/ if ( m_numberObsNodes ) { /*put new data (values of noes as char array) instead of old data*/ if( m_rawValues.size() != data.size() ) { PNL_THROW( CInconsistentSize, "input data size must corresponds node types of observed nodes" ); } m_rawValues.assign( data.begin(), data.end() ); } }
void TokIdNode::Alias(TokId const &id) { TokIdNode *nd; for(nd = h_prev; nd; nd = nd->h_prev) { if(nd->Match(id)) { PNL_THROW(pnl::CBadArg, "attempt to make ambiguous alias, brothers cannot have matching aliases"); } } for(nd = h_next; nd; nd = nd->h_next) { if(nd->Match(id)) { PNL_THROW(pnl::CBadArg, "attempt to make ambiguous alias, brothers cannot have matching aliases"); } } this->id.push_back(id); for(nd = v_prev; nd; nd = nd->v_prev) { nd->desc.insert(std::make_pair(id, this)); } }
PNL_USING CGibbsSamplingInfEngine * CGibbsSamplingInfEngine::Create( const CStaticGraphicalModel *pGraphicalModel ) { if( !pGraphicalModel ) { PNL_THROW( CNULLPointer, "graphical model" ); return NULL; } else { CGibbsSamplingInfEngine* newInf = new CGibbsSamplingInfEngine( pGraphicalModel ); return newInf; } }
CTreeCPD::CTreeCPD( const CTreeCPD& TreeCPD ) :CCPD( dtTree, ftCPD, TreeCPD.GetModelDomain() ) { //m_CorrespDistribFun = TreeCPD.m_CorrespDistribFun->CloneDistribFun(); if( TreeCPD.m_CorrespDistribFun->GetDistributionType() == dtTree ) { delete m_CorrespDistribFun; m_CorrespDistribFun = CTreeDistribFun::Copy( static_cast<CTreeDistribFun*>(TreeCPD.m_CorrespDistribFun )); } else { PNL_THROW( CInconsistentType, "distribution must be tree" ); } m_Domain = intVector( TreeCPD.m_Domain ); }
PNL_USING CLWSamplingInfEngine * CLWSamplingInfEngine::Create( const CStaticGraphicalModel *pGraphicalModel, int particleCount ) { if( !pGraphicalModel ) { PNL_THROW( CNULLPointer, "graphical model" ); return NULL; } else { CLWSamplingInfEngine* newInf = new CLWSamplingInfEngine( pGraphicalModel, particleCount); return newInf; } }
void CPersistGaussianDistribFun::TraverseSubobject(CPNLBase *pObj, CContext *pContext) { CGaussianDistribFun *pDF = dynamic_cast<CGaussianDistribFun*>(pObj); if(pDF->IsDistributionSpecific() == 3) { PNL_THROW(CInvalidOperation, "DistribFun is Gaussian multiplied by Delta"); } TraverseDistribFunSubobjects(pDF, pContext); if(pDF->IsDistributionSpecific() == 1) { return; } if(pDF->IsDistributionSpecific() == 2) { pContext->Put(pDF->GetMatrix(matMean), "MatMean"); return; } if(pDF->GetCanonicalFormFlag()) { pContext->Put(pDF->GetMatrix(matH), "MatH"); pContext->Put(pDF->GetMatrix(matK), "MatK"); } if(pDF->GetMomentFormFlag()) { pContext->Put(pDF->GetMatrix(matMean), "MatMean"); pContext->Put(pDF->GetMatrix(matCovariance), "MatCovariance"); if(!pDF->GetFactorFlag()) { int nParent = pDF->GetNumberOfNodes() - 1; for(int i = 0; i < nParent; ++i) { std::stringstream name; name << "MatWeight" << i; pContext->Put(pDF->GetMatrix(matWeights, i), name.str().c_str()); } } } }
void CSoftMaxCPD::AllocDistribution(const float* pWeights, const float* pOffsets, const int* parentCombination) { PNL_CHECK_IS_NULL_POINTER(pWeights); PNL_CHECK_IS_NULL_POINTER(pOffsets); //////////////////////////////////////////////// const CNodeType *nt; nt = GetModelDomain()->GetVariableType( m_Domain[m_Domain.size()-1] ); int SoftMaxSize = nt->GetNodeSize(); if (SoftMaxSize == 2) { int matSize = 0; int i; for (i = 0; i < m_Domain.size(); i++) { nt = GetModelDomain()->GetVariableType( m_Domain[i] ); if(!(nt->IsDiscrete())) { matSize ++; } } //matSize = matSize; for (i = 0; i < 2*matSize-1; i+=2) { if (pWeights[i] - pWeights[i+1] == 0) PNL_THROW(CNotImplemented, "sigmoid must have distinct weights"); } } //////////////////////////////////////////////// if (m_CorrespDistribFun->GetDistributionType() == dtSoftMax) { AllocMatrix(pWeights, matWeights); static_cast<CSoftMaxDistribFun*>(m_CorrespDistribFun)-> AllocOffsetVector(pOffsets); } else { PNL_CHECK_IS_NULL_POINTER(parentCombination); AllocMatrix(pWeights, matWeights, -1, parentCombination); static_cast<CCondSoftMaxDistribFun*>(m_CorrespDistribFun)-> AllocOffsetVector(pOffsets, parentCombination); } }
const CPotential* CBKInfEngine::GetQueryJPD() { if( m_ProcedureType != ptFiltering && m_ProcedureType != ptSmoothing && m_ProcedureType != ptFixLagSmoothing ) { PNL_THROW( CInvalidOperation, " you have not been computing the Query JPD "); } if( !GetQueryPot() ) { const CPotential* pQueryPot = m_QuerryJTree->GetQueryJPD(); /* const CDistribFun *pQueryDistribFun = pQueryPot->GetDistribFun(); switch( pQueryDistribFun->GetDistributionType() ) { case dtTabular: SetQueryPot( CTabularPotential::Create( m_queryNodes, GrModel()->GetModelDomain(), NULL) ); break; case dtGaussian: SetQueryPot( CGaussianPotential::Create( m_queryNodes, GrModel()->GetModelDomain(), NULL) ); break; case dtScalar: SetQueryPot( CScalarPotential::Create( m_queryNodes, GrModel()->GetModelDomain() ) ); break; default: PNL_THROW(CNotImplemented, "type of potential"); } GetQueryPot()->SetDistribFun( pQueryDistribFun ); */ intVector obsPos; pQueryPot->GetObsPositions(&obsPos); intVector queryNodes; GetQueryNodes(&queryNodes); SetQueryPot(static_cast<CPotential*>( CFactor:: CopyWithNewDomain(pQueryPot, queryNodes, GrModel()->GetModelDomain(), obsPos))); } return GetQueryPot(); }
void CStaticStructLearnSEM::ConvertToCurrEvidences(CBNet* pBNet) { if(!pBNet) PNL_THROW(CInvalidOperation, "need to create a new BNet first"); int i,oldNode,newNode,m; intVector ObsNodes; if( m_vCurrEvidences.size() != 0 ) { for(i=0; i<m_numberOfAllEvidences; i++) delete m_vCurrEvidences[i]; m_vCurrEvidences.clear(); } CModelDomain* pMD = pBNet->GetModelDomain(); CModelDomain* pGrMD = this->GetStaticModel()->GetModelDomain(); const CNodeType* nt; const Value* value; valueVector vValues; for(i=0; i<m_numberOfAllEvidences; i++) { const CEvidence* pEvidence = m_Vector_pEvidences[i]; for(newNode=0; newNode<m_nNodes; newNode++) { oldNode = m_vGlobalRenaming[newNode]; if( pEvidence->IsNodeObserved(oldNode) ) { ObsNodes.push_back(newNode); value = pEvidence->GetValue(oldNode); nt = pGrMD->GetVariableType(oldNode); if(nt->IsDiscrete()) { vValues.push_back(*value); } else { for(m=0; m<nt->GetNodeSize(); m++) vValues.push_back(*(value+m)); } } } CEvidence* pEv = CEvidence::Create(pMD, ObsNodes, vValues); m_vCurrEvidences.push_back(pEv); vValues.clear(); ObsNodes.clear(); } }
void CJtreeInfEngine::EnterEvidence( const CEvidence *pEvidence, int maximize, int sumOnMixtureNode ) { // bad-args check PNL_CHECK_IS_NULL_POINTER(pEvidence); PNL_CHECK_RANGES( maximize, 0, 2 ); if( pEvidence->GetModelDomain() != m_pGraphicalModel->GetModelDomain() ) { PNL_THROW( CInvalidOperation, "evidence and the Graphical Model must be on one Model Domain" ); } // bad-args check end int i; ShrinkObserved( pEvidence, maximize, sumOnMixtureNode ); CollectEvidence(); DistributeEvidence(); for (i = 0; i < m_pJTree->GetNumberOfNodes(); i++) { EDistributionType dt = m_pJTree->GetNodePotential(i)->GetDistribFun()-> GetDistributionType(); if(dt == dtGaussian) { static_cast<CGaussianDistribFun*>(m_pJTree->GetNodePotential(i)-> GetDistribFun())->UpdateCanonicalCoefficient(); } } if (GetModel()->GetModelType() == mtBNet) { for (i = 0; i < GetModel()->GetNumberOfNodes(); i++) { if (GetModel()->GetFactor(i)->GetDistributionType() == dtSoftMax) { GetModel()->GetModelDomain()->ChangeNodeType(i, 0); } } } }
void CStaticLearningEngine::AppendData(int dim, const CEvidence* const* pEvidences) { //create vector consists of points to evidences int i = 0; for( i = 0; i < dim; i++ ) { if (!pEvidences[i]) { PNL_THROW(CNULLPointer,"evidence") } m_Vector_pEvidences.push_back( pEvidences[i] ); } m_numberOfAllEvidences = m_Vector_pEvidences.size(); ClearStatisticData(); }
const CPotential* CJtreeInfEngine::GetQueryJPD() const { /* operation validity check */ /* if( m_lastOpDone != opsMargNodes ) { PNL_THROW( CInvalidOperation, " cannot return query jpd, marginalization not done " ); } */ /* operation validity check end */ if( !m_pQueryJPD ) { PNL_THROW( CInvalidOperation, " can't call GetQueryJPD() before calling MarginalNodes() " ); } return m_pQueryJPD; }
void CMlStaticStructLearnHC::Learn() { if(m_Vector_pEvidences.size() == 0) PNL_THROW(CInconsistentState, "should set the data first"); CGraph* iGraph = m_pGrModel->GetGraph(); CDAG* iDAG = CDAG::Create(*iGraph); CDAG* pDAG; CDAG* pBestDAG = NULL; float BestScore = (float)-1e37; int irestarts = m_nRestarts; int i, istart; float score; for(istart=0; istart<irestarts; istart++) { if(istart>0) { delete iDAG; intVector vDiscrete, vContinuous; const CNodeType* nt; for(i=0; i<m_nNodes; i++) { nt = m_pGrModel->GetNodeType(i); if( nt->IsDiscrete() ) vDiscrete.push_back(i); else vContinuous.push_back(i); } iDAG = CDAG::RandomCreateADAG(m_nNodes, m_nMaxFanIn, vDiscrete, vContinuous); } LearnInOneStart(iDAG, &pDAG, &score); if(score > BestScore) { delete pBestDAG; pBestDAG = pDAG->Clone(); BestScore = score; } delete pDAG; } delete iDAG; m_pResultDAG = pBestDAG->Clone(); m_critValue.push_back(BestScore); delete pBestDAG; }
PNL_USING ///////////////////////////////////////////////////////////////////////////// CSpecPearlInfEngine* CSpecPearlInfEngine::Create(const CStaticGraphicalModel* pGrModel) { PNL_CHECK_IS_NULL_POINTER(pGrModel); if( !IsInputModelValid(pGrModel) ) { PNL_THROW( CInconsistentType, " input model is invalid " ); } CSpecPearlInfEngine* pPearlInfEng = new CSpecPearlInfEngine(pGrModel); PNL_CHECK_IF_MEMORY_ALLOCATED(pPearlInfEng); return pPearlInfEng; }
void CMlLearningEngine::Learn() { /* function takes an information from m_pEvidences and learns factors of graphical model using prior probabilities or not */ float logLikTmp = 0; if(!m_pGrModel) { PNL_THROW( CNULLPointer, "no graphical model") } CStaticGraphicalModel *grmodel = this->GetStaticModel(); CFactor *parameter = NULL; int numberOfDomains = grmodel -> GetNumberOfFactors(); for( int domainNodes = 0; domainNodes < numberOfDomains; domainNodes++ ) { factor = grmodel->GetFactor( domainNodes ); factor ->UpdateStatisticsML( &m_Vector_pEvidences.front(), m_Vector_pEvidences.size() ); PNL_CHECK_LEFT_BORDER(m_numberOfAllEvidences, 1); logLikTmp += parameter->ProcessingStatisticalData(m_numberOfAllEvidences); } switch( grmodel -> GetModelType() ) { case mtBNet: { break; } case mtMRF2: case mtMNet: { logLikTmp = _LearnPotentials(); break; } default: { PNL_THROW(CBadConst, "model type" ) break; } } m_critValue.push_back(logLikTmp); }
CMRF2::CMRF2( int numberOfCliques, const int *cliqueSizes, const int **cliques, CModelDomain* pMD ) :CMNet( numberOfCliques, cliqueSizes, cliques, pMD ) { int i; /* clique validity check */ for( i = 0; i < numberOfCliques; i++ ) { if( cliqueSizes[i] != 2 ) { PNL_THROW( CInconsistentType, " not all the cliques are of two nodes " ); } } /* clique validity check */ m_modelType = mtMRF2; }
PNL_USING CDynamicGraphicalModel:: CDynamicGraphicalModel( EModelTypes modelType, CStaticGraphicalModel *pGrModel): CGraphicalModel(pGrModel->GetModelDomain()) { // Check the validity of the model: if( !pGrModel->IsValidAsBaseForDynamicModel() ) { PNL_THROW( CInconsistentType, "static model doesn't valid for creation dynamic model" ) } m_pGrModel = pGrModel; m_modelType = modelType; m_nnodesPerSlice = pGrModel->GetNumberOfNodes() / 2; FindInterfaceNodes(); }
void CGibbsSamplingInfEngine:: EnterEvidence( const CEvidence *pEvidenceIn, int maximize, int sumOnMixtureNode ) { if( !m_queryes.size() ) { PNL_THROW( CAlgorithmicException, "Possible queryes must be defined"); } PNL_CHECK_IS_NULL_POINTER(pEvidenceIn); m_pEvidence = pEvidenceIn; m_bMaximize = maximize; DestroyCurrentEvidences(); DestroyQueryFactors(); if(GetModel()->GetModelType() == mtBNet) { static_cast< const CBNet* >(GetModel())-> GenerateSamples( GetCurrentEvidences(), GetNumStreams(), pEvidenceIn ); } else { static_cast< const CMNet* >(GetModel())-> GenerateSamples( GetCurrentEvidences(), GetNumStreams(), pEvidenceIn ); } CreateQueryFactors(); boolVector sampleIsNeed; if( m_bUsingDSep ) { ConsDSep( m_queryes, &sampleIsNeed, m_pEvidence ); } else { FindCurrentNdsForSampling( &sampleIsNeed ); } SetSamplingNdsFlags(sampleIsNeed); Sampling( 0, GetMaxTime() ); }
void CBayesLearningEngine::AppendData( int dim, const CEvidence* const* pEvidencesIn ) { //create vector consists of points to evidences PNL_CHECK_LEFT_BORDER(dim, 1); int i = 0; for( i = 0; i < dim; i++ ) { if (!pEvidencesIn[i]) { PNL_THROW(CNULLPointer,"evidence") } if( IsInfNeed(pEvidencesIn[i]) ) { PNL_THROW(CNotImplemented,"all nodes should be observed") } m_Vector_pEvidences.push_back( pEvidencesIn[i] ); } m_numberOfAllEvidences = m_Vector_pEvidences.size(); }
void TokArr::Init(char const *s) { int num_ampersands; resize(0); for(;;) { for(num_ampersands = 0; isspace(*s) || *s == '&'; ++s) { num_ampersands += *s == '&'; } if(*s == 0) { while(num_ampersands--) { push_back(""); } return; } if(size()) { while(--num_ampersands > 0) { push_back(""); } } else { while(num_ampersands--) { push_back(""); } } if(!(isalnum(*s) || strchr("-^+.", *s))) { PNL_THROW(pnl::CBadArg, "alien symbol inside TokArr, one can use alphanumerics or + - . ^ & only"); } push_back(Tok::root); s = back().Init(s); } }