void BP::construct() { // create edge properties _edges.clear(); _edges.reserve( nrVars() ); _edge2lut.clear(); if( props.updates == Properties::UpdateType::SEQMAX ) _edge2lut.reserve( nrVars() ); for( size_t i = 0; i < nrVars(); ++i ) { _edges.push_back( vector<EdgeProp>() ); _edges[i].reserve( nbV(i).size() ); if( props.updates == Properties::UpdateType::SEQMAX ) { _edge2lut.push_back( vector<LutType::iterator>() ); _edge2lut[i].reserve( nbV(i).size() ); } foreach( const Neighbor &I, nbV(i) ) { EdgeProp newEP; newEP.message = Prob( var(i).states() ); newEP.newMessage = Prob( var(i).states() ); if( DAI_BP_FAST ) { newEP.index.reserve( factor(I).nrStates() ); for( IndexFor k( var(i), factor(I).vars() ); k.valid(); ++k ) newEP.index.push_back( k ); } newEP.residual = 0.0; _edges[i].push_back( newEP ); if( props.updates == Properties::UpdateType::SEQMAX ) _edge2lut[i].push_back( _lut.insert( make_pair( newEP.residual, make_pair( i, _edges[i].size() - 1 ))) ); } }
Factor LC::NewPancake (size_t i, size_t _I, bool & hasNaNs) { size_t I = nbV(i)[_I]; Factor piet = _pancakes[i]; // recalculate _pancake[i] VarSet Ivars = factor(I).vars(); Factor A_I; for( VarSet::const_iterator k = Ivars.begin(); k != Ivars.end(); k++ ) if( var(i) != *k ) A_I *= (_pancakes[findVar(*k)] * factor(I).inverse()).marginal( Ivars / var(i), false ); if( Ivars.size() > 1 ) A_I ^= (1.0 / (Ivars.size() - 1)); Factor A_Ii = (_pancakes[i] * factor(I).inverse() * _phis[i][_I].inverse()).marginal( Ivars / var(i), false ); Factor quot = A_I / A_Ii; if( props.damping != 0.0 ) quot = (quot^(1.0 - props.damping)) * (_phis[i][_I]^props.damping); piet *= quot / _phis[i][_I].normalized(); _phis[i][_I] = quot.normalized(); piet.normalize(); if( piet.hasNaNs() ) { cerr << name() << "::NewPancake(" << i << ", " << _I << "): has NaNs!" << endl; hasNaNs = true; } return piet; }
void CBP::construct() { // prepare datastructures for compression for (size_t i=0; i<nrVars(); i++) { _gndVarToSuperVar[i] = i; } for (size_t i=0; i<nrFactors(); i++) { _gndFacToSuperFac[i] = i; } // create edge properties _edges.clear(); _edges.reserve( nrVars() ); for( size_t i = 0; i < nrVars(); ++i ) { _edges.push_back( vector<EdgeProp>() ); _edges[i].reserve( nbV(i).size() ); foreach( const Neighbor &I, nbV(i) ) { EdgeProp newEP; size_t edgeCount = factor(I).counts()[I.dual].size(); newEP.message = vector<Prob>(edgeCount,Prob( var(i).states() )); newEP.newMessage = vector<Prob>(edgeCount,Prob( var(i).states() )); newEP.count = vector<int>(edgeCount); newEP.index = vector<ind_t>(edgeCount); newEP.nrPos = edgeCount; // simulate orginal varSet with possibly more variables, must ensure that the number of variables is equal to number of ground variables VarSet gndVarSet; size_t varCount = 0; foreach(const Neighbor &tmpVar, nbF(I)) { for(map<size_t, int>::const_iterator iter=factor(I).counts()[tmpVar.iter].begin(); iter!=factor(I).counts()[tmpVar.iter].end();iter++) { gndVarSet |= Var(varCount, var(tmpVar).states()); varCount++; } } varCount = 0; foreach(const Neighbor &tmpVar, nbF(I)) { size_t pos=0; for(map<size_t, int>::const_iterator iter=factor(I).counts()[tmpVar.iter].begin(); iter!=factor(I).counts()[tmpVar.iter].end();iter++,pos++) { if (tmpVar == i) { // assumes that counts are iterated in increases order of positions size_t sortedPos = factor(I).sigma()[(*iter).first]; newEP.count[pos] = (*iter).second; newEP.index[pos].reserve( factor(I).states() ); for( IndexFor k( Var(sortedPos, var(i).states()), gndVarSet ); k >= 0; ++k ) { newEP.index[pos].push_back( k ); } } varCount++; } } _edges[i].push_back( newEP ); }
LC::LC( const FactorGraph & fg, const PropertySet &opts ) : DAIAlgFG(fg), _pancakes(), _cavitydists(), _phis(), _beliefs(), _maxdiff(0.0), _iters(0), props() { setProperties( opts ); // create pancakes _pancakes.resize( nrVars() ); // create cavitydists for( size_t i=0; i < nrVars(); i++ ) _cavitydists.push_back(Factor( delta(i) )); // create phis _phis.reserve( nrVars() ); for( size_t i = 0; i < nrVars(); i++ ) { _phis.push_back( vector<Factor>() ); _phis[i].reserve( nbV(i).size() ); foreach( const Neighbor &I, nbV(i) ) _phis[i].push_back( Factor( factor(I).vars() / var(i) ) ); } // create beliefs _beliefs.reserve( nrVars() ); for( size_t i=0; i < nrVars(); i++ ) _beliefs.push_back(Factor(var(i))); }