void CMNet::CreateTabularPotential( const intVector& domain,
        const floatVector& data )
{
    AllocFactor( domain.size(), &domain.front() );

    pFactorVector factors;
    int numFactors = GetFactors( domain.size(), &domain.front(), &factors );
    if( numFactors != 1 )
    {
        PNL_THROW( CInconsistentSize,
            "domain must be the same as corresponding domain size got from graph" );
    }
    factors[0]->AllocMatrix( &data.front(), matTable );
}
void CSamplingInfEngine::
GetObsDimsWithVls(intVector &domain, int nonObsNode, const CEvidence* pEv, 
		  intVector *dims, intVector *vls) const
{
    int nnodes = domain.size();
    dims->resize(nnodes - 1);
    vls->resize(nnodes - 1);
    
    int* it = &domain.front();
    int* itDims = &dims->front();
    int* itVls = &vls->front();
    int i;
    for( i = 0; i < nnodes; i++, it++ )
    {		
	if( *it != nonObsNode )
	{
	    *itDims = i;
	    *itVls = pEv->GetValueBySerialNumber(*it)->GetInt();//only if all nds are tabular!!
	    
	    itDims++;
	    itVls++;
	}
    }
    
}
void CGaussianCPD::AllocDistribution( const floatVector& mean,
		            const floatVector& cov, float normCoeff,
		            const floatVecVector& weights,
		            const intVector& parentCombination  )
{
    if( weights.size() )
    {
	pnlVector<const float*> pWeights;
	int numWeights = weights.size();
	pWeights.resize( numWeights );
	for( int i = 0; i < numWeights; i++ )
	{
	    pWeights[i] = &weights[i].front();
	}
	AllocDistribution( &mean.front(), &cov.front(), normCoeff,
	    &pWeights.front(), &parentCombination.front() );
    }
    else
    {
	AllocDistribution( &mean.front(), &cov.front(), normCoeff, NULL,
	    &parentCombination.front() );
    }
}
void CGibbsSamplingInfEngine::
MarginalNodes( const intVector& queryNdsIn, int notExpandJPD  )
{
  MarginalNodes( &queryNdsIn.front(), queryNdsIn.size(), notExpandJPD );
}
void CExInfEngine< INF_ENGINE, MODEL, FLAV, FALLBACK_ENGINE1, FALLBACK_ENGINE2 >::MarginalNodes( intVector const &queryNds, int notExpandJPD )
{
    MarginalNodes(&queryNds.front(), queryNds.size(), notExpandJPD );
}
void CGaussianCPD::SetCoefficientVec( float coeff,
		                     const intVector& parentCombination )
{
    SetCoefficient( coeff, &parentCombination.front() );
}
float CGaussianCPD::GetCoefficientVec( const intVector& parentCombination )
{
    return GetCoefficient( &parentCombination.front() );
}
void CSoftMaxCPD::AllocDistribution(const floatVector& weights,
    const floatVector& offsets, const intVector& parentCombination)
{
    AllocDistribution(&weights.front(), &offsets.front(), 
        &parentCombination.front());
}
int CGraphicalModel::GetFactors( const intVector& subdomainIn,
		                    pFactorVector *paramsOut ) const
{
	return GetFactors( subdomainIn.size(), &subdomainIn.front(),
		paramsOut );
};
void CGraphicalModel::AllocFactor( const intVector& domainIn)
{
	AllocFactor( domainIn.size(), &domainIn.front() );
};
void CNodeValues::ToggleNodeStateBySerialNumber( const intVector& numsOfNds )
{
    int numNds = numsOfNds.size();
    const int* pNumsOfNds = &numsOfNds.front();
    ToggleNodeStateBySerialNumber( numNds, pNumsOfNds );
}
float CMlStaticStructLearn::ComputeFamilyScore(intVector vFamily)
{
    int nFamily = vFamily.size();
    CCPD* iCPD = this->CreateRandomCPD(nFamily, &vFamily.front(), m_pGrModel);
    CTabularDistribFun *pDistribFun;
    int ncases = m_Vector_pEvidences.size();
    const CEvidence * pEv;
    float score;
    float pred = 0;
    EDistributionType NodeType;
    switch (m_ScoreMethod)
    {
    case MaxLh :
        if ( !((iCPD->GetDistribFun()->GetDistributionType() == dtSoftMax)
                || (iCPD->GetDistribFun()->GetDistributionType() == dtCondSoftMax)))
        {
            iCPD->UpdateStatisticsML( &m_Vector_pEvidences.front(), ncases );
            score = iCPD->ProcessingStatisticalData(ncases);
        }
        else
        {
            float **evid = NULL;
            float **full_evid = NULL;
            BuildFullEvidenceMatrix(&full_evid);
            CSoftMaxCPD* SoftMaxFactor = static_cast<CSoftMaxCPD*>(iCPD);
            SoftMaxFactor->BuildCurrentEvidenceMatrix(&full_evid, &evid,
                    vFamily,m_Vector_pEvidences.size());
            SoftMaxFactor->InitLearnData();
            SoftMaxFactor->SetMaximizingMethod(mmGradient);
            SoftMaxFactor->MaximumLikelihood(evid, m_Vector_pEvidences.size(),
                                             0.00001f, 0.01f);
            SoftMaxFactor->CopyLearnDataToDistrib();
            if (SoftMaxFactor->GetDistribFun()->GetDistributionType() == dtSoftMax)

            {
                score = ((CSoftMaxDistribFun*)SoftMaxFactor->GetDistribFun())->CalculateLikelihood(evid,ncases);
            }
            else
            {
                score = ((CCondSoftMaxDistribFun*)SoftMaxFactor->GetDistribFun())->CalculateLikelihood(evid,ncases);
            };
            for (int k = 0; k < SoftMaxFactor->GetDomainSize(); k++)
            {
                delete [] evid[k];
            }
            delete [] evid;
            int i;
            intVector obsNodes;
            (m_Vector_pEvidences[0])->GetAllObsNodes(&obsNodes);
            for (i=0; i<obsNodes.size(); i++)
            {
                delete [] full_evid[i];
            }
            delete [] full_evid;
        };
        break;
    case PreAs :
        int i;
        NodeType = iCPD->GetDistributionType();
        switch (NodeType)
        {
        case dtTabular :
            for(i = 0; i < ncases; i++)
            {

                pConstEvidenceVector tempEv(0);
                tempEv.push_back(m_Vector_pEvidences[i]);
                iCPD->UpdateStatisticsML(&tempEv.front(), tempEv.size());
                iCPD->ProcessingStatisticalData(tempEv.size());
                pred += log(((CTabularCPD*)iCPD)->GetMatrixValue(m_Vector_pEvidences[i]));
            }
            break;
        case dtGaussian :
            for(i = 0; i < ncases; i += 1 )
            {

                pConstEvidenceVector tempEv(0);
                tempEv.push_back(m_Vector_pEvidences[i]);

                iCPD->UpdateStatisticsML(&tempEv.front(), tempEv.size());
                float tmp = 0;
                if (i != 0)
                {
                    tmp =iCPD->ProcessingStatisticalData(1);
                    pred +=tmp;

                }

            }
            break;
        case dtSoftMax:
            PNL_THROW(CNotImplemented,
                      "This type score method has not been implemented yet");
            break;
        default:
            PNL_THROW(CNotImplemented,
                      "This type score method has not been implemented yet");
            break;
        };

        score = pred;
        break;
    case MarLh :
    {
        //проверка того, что потенциал дискретный
        if (iCPD->GetDistributionType() != dtTabular)
        {
            PNL_THROW(CNotImplemented,
                      "This type of score method has been implemented only for discrete nets");
        }

        int DomainSize;
        const int * domain;
        switch(m_priorType)
        {
        case Dirichlet:
            iCPD->GetDomain(&DomainSize, &domain);

            pDistribFun = static_cast<CTabularDistribFun *>(iCPD->GetDistribFun());

            pDistribFun->InitPseudoCounts();

            for (i=0; i<ncases; i++)
            {
                pEv = m_Vector_pEvidences[i];
                const CEvidence *pEvidences[] = { pEv };
                pDistribFun->BayesUpdateFactor(pEvidences, 1, domain);
            }
            score = pDistribFun->CalculateBayesianScore();
            break;
        case K2:
            iCPD->GetDomain(&DomainSize, &domain);

            pDistribFun = static_cast<CTabularDistribFun *>(iCPD->GetDistribFun());

            pDistribFun->InitPseudoCounts(m_K2alfa);

            for (i=0; i<ncases; i++)
            {
                pEv = m_Vector_pEvidences[i];
                const CEvidence *pEvidences[] = { pEv };
                pDistribFun->BayesUpdateFactor(pEvidences, 1, domain);
            }
            score = pDistribFun->CalculateBayesianScore();
            break;
        case BDeu:
            iCPD->GetDomain(&DomainSize, &domain);

            pDistribFun = static_cast<CTabularDistribFun *>(iCPD->GetDistribFun());

            pDistribFun->InitPseudoCounts();

            for (i=0; i<ncases; i++)
            {
                pEv = m_Vector_pEvidences[i];
                const CEvidence *pEvidences[] = { pEv };
                pDistribFun->BayesUpdateFactor(pEvidences, 1, domain);
            }
            score = pDistribFun->CalculateBayesianScore() / iCPD->GetNumberOfFreeParameters();
            break;
        default:
            PNL_THROW(CNotImplemented,
                      "This type of prior has not been implemented yet");
            break;
        }


        break;
    }
    default :
        PNL_THROW(CNotImplemented,
                  "This type score method has not been implemented yet");
        break;
    }


    int dim = iCPD->GetNumberOfFreeParameters();
    switch (m_ScoreType)
    {
    case BIC :
        score -= 0.5f * float(dim) * float(log(float(ncases)));
        break;
    case AIC :
        score -= 0.5f * float(dim);
        break;
    case WithoutFine:
        break;
    case VAR :
        PNL_THROW(CNotImplemented,
                  "This type score function has not been implemented yet");
        break;
    default:
        PNL_THROW(CNotImplemented,
                  "This type score function has not been implemented yet");
        break;
    }

    delete iCPD;
    return score;
}