pFactorVector *params) const
{
    return (m_pGrModel->GetFactors( numberOfNodes, nodes, params ));
}

bool CDynamicGraphicalModel::IsValid( std::string* description ) const
{
    if( !m_pGrModel )
    {
        std::stringstream st;
        st<<"Static Graphical Model (base for DBN) hase null pointer"<<std::endl;
        std::string s = st.str();
	description->insert( description->begin(), s.begin(), s.end() );
	return false;
    }
    else
    {
        
	if( !m_pGrModel->IsValidAsBaseForDynamicModel(description) )
	{
	    return false;
	}
	
    }
    return true;
}

#ifdef PNL_RTTI
const CPNLType CDynamicGraphicalModel::m_TypeInfo = CPNLType("CDynamicGraphicalModel", &(CGraphicalModel::m_TypeInfo));

#endif
    
    const CEvidence *pEv = m_pEvidence;
    flags->assign(nnds, true);
    int i;
    for( i = 0; i < nnds; i++ )
    {
	if( pEv->IsNodeObserved(ndsForSampling[i]) )
	{
	    (*flags)[i] = false;
	}
    }
}

void CSamplingInfEngine::Continue( int dt )
{
    PNL_CHECK_LEFT_BORDER( dt, 0);
    
    int startTime = GetMaxTime();
    PNL_CHECK_LEFT_BORDER( startTime, 0);
    
    int endTime = startTime + dt;
    SetMaxTime(endTime);
    Sampling( startTime, endTime );
}



#ifdef PNL_RTTI
const CPNLType CSamplingInfEngine::m_TypeInfo = CPNLType("CSamplingInfEngine", &(CInfEngine::m_TypeInfo));

#endif
		case DAG_ADD :
			if(iDAG->DoMove(start, end, DAG_ADD))
			{
				init_score += max_score;
				FamilyScore[end] += max_score;
			}
			break;

		case DAG_REV :
			if(iDAG->DoMove(start, end, DAG_REV))
			{
				init_score += max_score;
				FamilyScore[start] += max_start;
				FamilyScore[end] += max_end;
			}
			break;
		}
		vValidMoves.clear();
		step++;
	}

	*LearnedScore = this->ScoreDAG(iDAG, &FamilyScore);
	*LearnedDag = iDAG->Clone();
	delete iDAG;
}

#ifdef PNL_RTTI
const CPNLType CMlStaticStructLearnHC::m_TypeInfo = CPNLType("CMlStaticStructLearnHC", &(CMlStaticStructLearn::m_TypeInfo));

#endif
    for ( node = 0; node < numberOfInterfaceNodes; node++ )
    {
	newIntNodes[node] = interfaceNodes[node] - numberOfNonIntNodes;
    }

    for( i = nnodesInDBN/2; i < nnodesInDBN; i++ )
    {
	graph->GetNeighbors(i, &numberOfNeighbors, &neighbors, &orientation);

	FinalNeighbors.resize(numberOfNeighbors);

	for ( j = 0; j < numberOfNeighbors; j++ )
	{
	    newNumber = neighbors[j] - numberOfNonIntNodes;
	    FinalNeighbors[j] = ( newNumber < numberOfInterfaceNodes ) ?
		( std::find( newIntNodes.begin(), newIntNodes.end(),
		newNumber) - newIntNodes.begin() ) : newNumber;
	}

	pFinalGraph->SetNeighbors( i - numberOfNonIntNodes, numberOfNeighbors,
	    &(FinalNeighbors.front()), orientation );
    }

    return pFinalGraph;
}

#ifdef PNL_RTTI
const CPNLType C1_5SliceInfEngine::m_TypeInfo = CPNLType("C1_5SliceInfEngine", &(C2TBNInfEngine::m_TypeInfo));

#endif
            *domIt ) != actuallyObsNodes.end() )
            {
            obsNodesInDomain.push_back(*domIt);
            }
            }
            
              if( !obsNodesInDomain.empty() )
              {
              // here is a line, why it all should not work for a cond gaussian
              const CNodeType *pObsNodeNT = 
              m_pJTree->GetNodeType(i)->IsDiscrete()
              ? &m_ObsNodeType : &m_ObsGaussType;
              
                const CPotential *pShrPot = (*factIt)->ShrinkObservedNodes(
                obsNodesInDomain.size(), obsNodesInDomain.begin(),
                allOffsets.begin(), pObsNodeNT );
                
                  // this should be changed to perform a safe operation
                  m_pJTree->GetNodePotential(i)->SetDistribFun(
                  pShrPot->GetDistribFun());
                  
                    delete pShrPot;
                    }
}*/
}

#ifdef PNL_RTTI
const CPNLType CFactors::m_TypeInfo = CPNLType("CFactors", &(CPNLBase::m_TypeInfo));

#endif
示例#6
0
    if( GetModelDomain() != pEv->GetModelDomain() )
    {
        PNL_THROW(CBadArg, "different model domain")
    }
    int nnodes = GetGraph()->GetNumberOfNodes();
    int nObsNodes = pEv->GetNumberObsNodes();
    if( nObsNodes != nnodes )
    {
        PNL_THROW(CNotImplemented, "all nodes must be observed")
    }

    const int* flags = pEv->GetObsNodesFlags();
    if( std::find( flags, flags + nnodes, 0 ) != flags + nnodes )
    {
        PNL_THROW( CNotImplemented, "all nodes must be observed" )
    }

    float ll = 0.0f;
    int i;
    for( i = 0; i < GetNumberOfCliques(); i++ )
    {
        ll += GetFactor( i )->GetLogLik( pEv );
    }
    return ll;
}

#ifdef PNL_RTTI
const CPNLType CMNet::m_TypeInfo = CPNLType("CMNet", &(CStaticGraphicalModel::m_TypeInfo));

#endif
示例#7
0
CGraphicalModel::CGraphicalModel(CModelDomain* pMD)
{
    m_pMD = pMD;
    void* pObj = this;
    m_pMD->AddRef(pObj);
}

CGraphicalModel::CGraphicalModel(int numberOfNodes, 
                                 int numberOfNodeTypes,
                                 const CNodeType *nodeTypes,
                                 const int *nodeAssociation )
{
    CGraphicalModel* pObj = this;
    nodeTypeVector nt = nodeTypeVector( nodeTypes, nodeTypes + numberOfNodeTypes );
    intVector nAssociation = intVector( nodeAssociation, 
        nodeAssociation + numberOfNodes );
    m_pMD = CModelDomain::Create( nt, nAssociation, pObj );
}


CGraphicalModel::~CGraphicalModel()
{
    void *pObj = this;
    m_pMD->Release(pObj);
}

#ifdef PNL_RTTI
const CPNLType CGraphicalModel::m_TypeInfo = CPNLType("CGraphicalModel", &(CPNLBase::m_TypeInfo));

#endif
//                                                                         //
//                INTEL CORPORATION PROPRIETARY INFORMATION                //
//   This software is supplied under the terms of a license agreement or   //
//  nondisclosure agreement with Intel Corporation and may not be copied   //
//   or disclosed except in accordance with the terms of that agreement.   //
//       Copyright (c) 2003 Intel Corporation. All Rights Reserved.        //
//                                                                         //
//  File:      pnlNumericDenseMatrix.cpp                                  //
//                                                                         //
//  Purpose:   CNumericDenseMatrix class member functions implementation   //
//                                                                         //
//  Author(s):                                                             //
//                                                                         //
/////////////////////////////////////////////////////////////////////////////

#include "pnlConfig.hpp"
#include "pnlNumericDenseMatrix.hpp"

PNL_USING

#ifdef PNL_RTTI
template<>
const CPNLType &CNumericDenseMatrix< int >::GetStaticTypeInfo()
{
  return CNumericDenseMatrix< int >::m_TypeInfo;
}

template <>
const CPNLType CNumericDenseMatrix< float >::m_TypeInfo = CPNLType("CNumericDenseMatrix", &(iCNumericDenseMatrix< Type >::m_TypeInfo));

#endif
			tPot = bigPot->Marginalize(&tmpV.front(), tmpV.size());
			delete bigPot;
			bigPot = tPot;
		    }
		    delete vPots[*nbr];
		    vPots[*nbr] = bigPot;
		    bigPot->GetDomain(&bigDomain);
		    if( pnlIsSubset(domSize, Domain, bigDomain.size(), &bigDomain.front()) )
		    {
			CPotential* retPot = static_cast<CPotential*>(bigPot->Clone());
			for(i=0; i<numNodes; i++)
			{
			    delete vPots[i];
			}
			vPots.clear();
			m_NodesAfterShrink.clear();
			return retPot;
		    }
		}
		nodesSentMessages[*sourceIt] = true;
	    }
	}
    }
    PNL_THROW(CInternalError, "internal error");
}

#ifdef PNL_RTTI
const CPNLType CJtreeInfEngine::m_TypeInfo = CPNLType("CJtreeInfEngine", &(CInfEngine::m_TypeInfo));

#endif
    {
	m_pMatrix[nDestinationRow][i] = (unsigned char)(m_pMatrix[nDestinationRow][i] | m_pMatrix[nSourceRow][i]);
    }
    return true;

}

void C2DBitwiseMatrix::operator =(const C2DBitwiseMatrix& SrcMatrix)
{
    CreateMatrix(SrcMatrix.GetWidth(), SrcMatrix.GetHeight());
    int nLength = m_nWidth / BITWISEMATRIX_BYTESIZE;
    if(nLength*BITWISEMATRIX_BYTESIZE < m_nWidth)
	nLength ++;
    for(int i=0; i<m_nHeight; i++)
	for(int j=0; j<nLength; j++)
	    m_pMatrix[i][j] = SrcMatrix.m_pMatrix[i][j];
}

void C2DBitwiseMatrix::ZeroOneRow(int nRow)
{
    int nLength = m_nWidth / BITWISEMATRIX_BYTESIZE;
    if(nLength*BITWISEMATRIX_BYTESIZE < m_nWidth)
	nLength ++;
    for(int i=0; i<nLength; i++)
	m_pMatrix[nRow][i] =0;
}

#ifdef PNL_RTTI
const CPNLType C2DBitwiseMatrix::m_TypeInfo = CPNLType("C2DBitwiseMatrix", &(CPNLBase::m_TypeInfo));

#endif
		pCPD->CreateAllNecessaryMatrices(1);
		return pCPD;
	}
	else
	{
		if( dt == dtMixGaussian )
		{
			floatVector data;
			static_cast<CMixtureGaussianCPD*>(factor)->GetProbabilities(&data);
			pCPD = CMixtureGaussianCPD::Create(family, nfamily, pMD, &data.front());
			static_cast<CCondGaussianDistribFun*>(pCPD->GetDistribFun()) -> CreateDefaultMatrices(1);
			return pCPD;
		}
		else
		{
			if( (dt == dtGaussian) || (dt == dtCondGaussian) )
			{
				pCPD = CGaussianCPD::Create(family, nfamily, pMD);
				pCPD->CreateAllNecessaryMatrices(1);
				return pCPD;
			}
			else
				PNL_THROW(CNotImplemented, "this type of distribution is not supported yet");
		}				
	}
}

#ifdef PNL_RTTI
const CPNLType CStaticStructLearnSEM::m_TypeInfo = CPNLType("CStaticStructLearnSEM", &(CStaticLearningEngine::m_TypeInfo));

#endif
        m_nextState = 1 - m_curState;
        //compute beliefs
        changed = 0;
        for( i = 0; i < nNodes; i++ )
        {
            if( !m_areReallyObserved[m_connNodes[i]])
            {
                ComputeBelief( m_connNodes[i] );
                changed += !m_beliefs[m_curState][m_connNodes[i]]->IsFactorsDistribFunEqual(
                    m_beliefs[m_nextState][m_connNodes[i]], m_tolerance);
            }
        }
        converged = !(changed);
        iter++;
    }//while ((!converged)&&(iter<m_numberOfIterations))
    m_IterationCounter = iter;
    //need to set both states to valid state
    m_curState = m_nextState;
}
/////////////////////////////////////////////////////////////////////////////

void CSpecPearlInfEngine::TreeProtocol()
{
}
/////////////////////////////////////////////////////////////////////////////

#ifdef PNL_RTTI
const CPNLType CSpecPearlInfEngine::m_TypeInfo = CPNLType("CSpecPearlInfEngine", &(CInfEngine::m_TypeInfo));

#endif
/////////////////////////////////////////////////////////////////////////////
    }
}

const int *CNodeValues::GetOffset()const
{
    return &m_offset.front();
}

void CNodeValues::GetRawData(valueVector* values) const
{
    PNL_CHECK_IS_NULL_POINTER(values);
    values->assign( m_rawValues.begin(), m_rawValues.end() );
}
const CNodeType *const* CNodeValues::GetNodeTypes()const
{
    return &m_NodeTypes.front();
}

void CNodeValues::GetObsNodesFlags( intVector* obsNodesFlagsOut ) const
{
    
    obsNodesFlagsOut->assign( m_isObsNow.begin(), m_isObsNow.end() );
}


#ifdef PNL_RTTI
const CPNLType CNodeValues::m_TypeInfo = CPNLType("CNodeValues", &(CPNLBase::m_TypeInfo));

#endif

       
    while( location != m_refList.end() )
    {
        location = std::find( m_refList.erase(location), m_refList.end(),
            pObject );
    }
    
    if( m_refList.empty() )
    {
        delete this;
    };
}
//////////////////////////////////////////////////////////////////////////

CReferenceCounter::CReferenceCounter()
{
}

//////////////////////////////////////////////////////////////////////////

CReferenceCounter::~CReferenceCounter()
{
}
#endif // PAR_OMP

#endif // NDEBUG

#ifdef PNL_RTTI
const CPNLType CReferenceCounter::m_TypeInfo = CPNLType("CReferenceCounter", &(CPNLBase::m_TypeInfo));

#endif
            "uniform distribution can't have any matrices with data" );
    }
    m_CorrespDistribFun->CreateDefaultMatrices(typeOfMatrices);
}
//-------------------------------------------------------------------------------
 void CSoftMaxCPD::BuildCurrentEvidenceMatrix(float ***full_evid, float ***evid,intVector family,int numEv)
{
  int i, j;
  *evid = new float* [family.size()];
  for (i = 0; i < family.size(); i++)
  {
    (*evid)[i] = new float [numEv];
  }

  for (i = 0; i < numEv; i++)
  {
    for (j = 0; j < family.size(); j++)
    {
      (*evid)[j][i] = (*full_evid)[family[j]][i];
    }
  }
}
//--------------------------------------------------------------------------------

#ifdef PNL_RTTI
const CPNLType CSoftMaxCPD::m_TypeInfo = CPNLType("CSoftMaxCPD", &(CCPD::m_TypeInfo));

#endif

// end of file ----------------------------------------------------------------
示例#16
0
	beta_i = Alpha_i(nNodes, (*startNode) + 1) - 1;

    }while ((linearNumber < alpha_i) || (linearNumber > beta_i));
    (*endNode) = nNodes - beta_i + linearNumber - 1;
}

bool CBICLearningEngine::IsInputModelValid(const CStaticGraphicalModel *pGrModel)
{
    if( pGrModel->GetModelType() != mtBNet )
    {
        return false;
    }
    
    const CModelDomain * pMD = pGrModel->GetModelDomain();
    nodeTypeVector varTypes;
    pMD->GetVariableTypes(&varTypes);
    int i;
    for( i = 0; i < varTypes.size(); i++ )
    {
        if( !varTypes[i].IsDiscrete() )
        {
            return false;
        }
    }
    return true;
}

#ifdef PNL_RTTI
const CPNLType CBICLearningEngine::m_TypeInfo = CPNLType("CBICLearningEngine", &(CStaticLearningEngine::m_TypeInfo));

#endif
	return;
    }
    intVector vA, vD;
    CStaticGraphicalModel* pGrModel0 = m_pGrModel->CreatePriorSliceGrModel();
    CMlStaticStructLearn* pSSL0 = CMlStaticStructLearnHC::Create(pGrModel0, itStructLearnML,
	StructLearnHC, BIC, m_nMaxFanIn,
	vA, vD, m_nRestarts);
    pSSL0->SetData(m_vEvidence0.size(), &m_vEvidence0.front());
    pSSL0->SetMaxIterIPF(m_nMaxIters / 2);
    static_cast<CMlStaticStructLearnHC*>(pSSL0) ->SetMinProgress(m_minProgress);

    pSSL0->Learn();
    const CDAG* p0SDAG = pSSL0->GetResultDAG();
    //	p0SDAG->Dump();
    CDAG* pDAG = const_cast<CDAG*>(p2SDAG)->Clone();
    if(pDAG->SetSubDag(vAncestor, const_cast<CDAG*>(p0SDAG)))
    {
	m_pResultDag = pDAG->Clone();
	delete pDAG;
    }
    else
	PNL_THROW(CInternalError, "InternalError, can not generate a DAG");

    delete pSSL;
    delete pSSL0;
}

#ifdef PNL_RTTI
const CPNLType CMlDynamicStructLearn::m_TypeInfo = CPNLType("CMlDynamicStructLearn", &(CDynamicLearningEngine::m_TypeInfo));

#endif
    int series;
    const CEvidence ***evFinal;
    evFinal = new const CEvidence ** [numOfTimeSeries];
    
    for( series = 0; series < numOfTimeSeries; series++ ) 
    {
        nSlices = evidences[series].size();
        numberOfSlices[series] = nSlices;
        evFinal[series] = new const CEvidence * [nSlices];
        
        int slice;
        
        for( slice = 0; slice < numberOfSlices[series]; slice++ )
        {
            evFinal[series][slice] = evidences[series][slice];
        }
    }
   
    SetData( numOfTimeSeries, &numberOfSlices.front(), evFinal );
    
    for( series = 0; series < numOfTimeSeries; series++ ) 
    {
            delete []evFinal[series];
    }
    delete []evFinal;
}

#ifdef PNL_RTTI
const CPNLType CDynamicLearningEngine::m_TypeInfo = CPNLType("CDynamicLearningEngine", &(CLearningEngine::m_TypeInfo));

#endif
示例#19
0
/////////////////////////////////////////////////////////////////////////////
//                                                                         //
//                INTEL CORPORATION PROPRIETARY INFORMATION                //
//   This software is supplied under the terms of a license agreement or   //
//  nondisclosure agreement with Intel Corporation and may not be copied   //
//   or disclosed except in accordance with the terms of that agreement.   //
//       Copyright (c) 2003 Intel Corporation. All Rights Reserved.        //
//                                                                         //
//  File:      pnlNodeType.cpp                                             //
//                                                                         //
//  Purpose:   CNodeType class member functions implementation             //
//                                                                         //
//  Author(s):                                                             //
//                                                                         //
/////////////////////////////////////////////////////////////////////////////
#include "pnlConfig.hpp"
#include "pnlNodeType.hpp"

PNL_USING

#ifdef PNL_RTTI
const CPNLType CNodeType::m_TypeInfo = CPNLType("CNodeType", &(CPNLBase::m_TypeInfo));
#endif
示例#20
0
    intVecVector node_assoc;

    int maximize;
    mutable CPotential *query_JPD;
    mutable CEvidence *MPE_ev;

#ifdef PNL_RTTI
    static const CPNLType m_TypeInfo;
#endif 
};

#ifdef PNL_RTTI
template< class INF_ENGINE, class MODEL ,
          EExInfEngineFlavour FLAV,
          class FALLBACK_ENGINE1 , class FALLBACK_ENGINE2>
const CPNLType CExInfEngine<INF_ENGINE, MODEL, FLAV, FALLBACK_ENGINE1, FALLBACK_ENGINE2>::m_TypeInfo = CPNLType("CExInfEngine", &(CInfEngine::m_TypeInfo));

#endif

#ifndef SWIG
template< class INF_ENGINE, class MODEL, EExInfEngineFlavour FLAV, class FALLBACK_ENGINE1, class FALLBACK_ENGINE2 >
inline CExInfEngine< INF_ENGINE, MODEL, FLAV, FALLBACK_ENGINE1, FALLBACK_ENGINE2 > *CExInfEngine< INF_ENGINE, MODEL, FLAV, FALLBACK_ENGINE1, FALLBACK_ENGINE2 >::Create( CStaticGraphicalModel const *gm )
{
    return new( CExInfEngine< INF_ENGINE, MODEL, FLAV, FALLBACK_ENGINE1, FALLBACK_ENGINE2 > )( gm );
}

template< class INF_ENGINE, class MODEL, EExInfEngineFlavour FLAV, class FALLBACK_ENGINE1, class FALLBACK_ENGINE2 >
inline CStaticGraphicalModel const *CExInfEngine< INF_ENGINE, MODEL, FLAV, FALLBACK_ENGINE1, FALLBACK_ENGINE2 >::GetModel() const
{
    return graphical_model;
}
示例#21
0
    if ((*NodeTVector)[DomSize - 1]->IsDiscrete())
    {
        for( i = 0; i < m_Domain.size(); ++i)
        {
            if(!((*NodeTVector)[i]->IsDiscrete()) && !(pEvidence->IsNodeObserved(m_Domain[i])))
                obsIndInDomain.push_back(i);
        }
        floatVector prob = static_cast<CTreeDistribFun*>(m_CorrespDistribFun)->GetProbability(pEvidence);
        resPot = CTabularPotential::Create(	&m_Domain.front(), m_Domain.size(), GetModelDomain(), 
            &prob.front(), obsIndInDomain);
    }
    else
    {
        for( i = 0; i < m_Domain.size(); ++i)
        {
            if(((*NodeTVector)[i]->IsDiscrete()) && !(pEvidence->IsNodeObserved(m_Domain[i])))
                obsIndInDomain.push_back(i);
        }
        float exp, var;
        static_cast<CTreeDistribFun*>(m_CorrespDistribFun)->GetAdjectives( pEvidence, exp, var );
        resPot = CGaussianPotential::Create( &m_Domain.front(), m_Domain.size(), GetModelDomain(), 1, 
            &exp, &var, 0.0f, obsIndInDomain);
    }
    return resPot;
}

#ifdef PNL_RTTI
const CPNLType CTreeCPD::m_TypeInfo = CPNLType("CTreeCPD", &(CCPD::m_TypeInfo));

#endif
#endif
                    result = 0;
                    result1 = 0;
                    break;
                }
            }
#if 0
            if (print)
            {
                if (result1)
                    printf("%d\tOK", i);
                else
                    printf("%d\tnot OK", i);
                printf("\n");
            }
#endif
        }
    }    
    
    return result;
}
#endif // PAR_RESULTS_RELIABILITY

#ifdef PNL_RTTI
const CPNLType CParEMLearningEngine::m_TypeInfo = CPNLType("CParEMLearningEngine", &(CEMLearningEngine::m_TypeInfo));

#endif

#endif // PAR_PNL
// end of file ----------------------------------------------------------------
示例#23
0
            params->push_back(GetFactor(*clqsIt));
        }
    }
    else
    {
        int       numOfClqsScndNode;
        const int *clqsScndNode;
        
        GetClqsNumsForNode( *(nodes + 1), &numOfClqsScndNode, &clqsScndNode );
        
        const int *pClqNum = std::find_first_of( clqsFrstNode,
            clqsFrstNode + numOfClqsFrstNode, clqsScndNode,
            clqsScndNode + numOfClqsScndNode );
        
        if( pClqNum == clqsFrstNode + numOfClqsFrstNode )
        {
            return 0;
        }
        
        params->push_back(GetFactor(*pClqNum));
    }
    
    assert( params->size() != 0 );
    
    return 1;
}

#ifdef PNL_RTTI
const CPNLType CMRF2::m_TypeInfo = CPNLType("CMRF2", &(CMNet::m_TypeInfo));

#endif
    m_DistributionType = dt;
}
CDistribFun::CDistribFun( EDistributionType dt )
:m_NumberOfNodes(0), m_bUnitFunctionDistribution(0), m_bAllMatricesAreValid(0)//, m_bMatricesAreAllocated(0)
{
    m_DistributionType = dt;
}

void CDistribFun::ResetNodeTypes( pConstNodeTypeVector &nodeTypes )
{
    if( int(nodeTypes.size()) != m_NumberOfNodes )
    {
        PNL_THROW( COutOfRange, "number of node types" );
    }
    int i;
    for( i = 0; i < m_NumberOfNodes; i++)
    {
        PNL_CHECK_IS_NULL_POINTER( nodeTypes[i] );
        if( *m_NodeTypes[i] != *nodeTypes[i] )
        {
            PNL_THROW( CInconsistentType, "node types must be equal");
        }
    }
    m_NodeTypes.assign( nodeTypes.begin(), nodeTypes.end() );
}

#ifdef PNL_RTTI
const CPNLType CDistribFun::m_TypeInfo = CPNLType("CDistribFun", &(CPNLBase::m_TypeInfo));

#endif
}

void CMlStaticStructLearn::SetPriorType(EPriorTypes ptype)
{
    m_priorType = ptype;
}

void CMlStaticStructLearn::SetScoreFunction(EScoreFunTypes ftype)
{
    m_ScoreType = ftype;
}

EScoreFunTypes CMlStaticStructLearn::GetScoreFunction()
{
    return m_ScoreType;
}

int CMlStaticStructLearn::GetK2PriorParam()
{
    return m_K2alfa;
}

void CMlStaticStructLearn::SetK2PriorParam(int alfa)
{
    m_K2alfa = alfa;
}

#ifdef PNL_RTTI
const CPNLType CMlStaticStructLearn::m_TypeInfo = CPNLType("CMlStaticStructLearn", &(CStaticLearningEngine::m_TypeInfo));

#endif