void BipartiteGraph::eraseEdge( size_t n1, size_t n2 ) { DAI_ASSERT( n1 < nrNodes1() ); DAI_ASSERT( n2 < nrNodes2() ); size_t iter; // Search for edge among neighbors of n1 for( iter = 0; iter < nb1(n1).size(); iter++ ) if( nb1(n1, iter).node == n2 ) { // Remove it nb1(n1).erase( nb1(n1).begin() + iter ); break; } // Change the iter and dual values of the subsequent neighbors for( ; iter < nb1(n1).size(); iter++ ) { Neighbor &m2 = nb1( n1, iter ); m2.iter = iter; nb2( m2.node, m2.dual ).dual = iter; } // Search for edge among neighbors of n2 for( iter = 0; iter < nb2(n2).size(); iter++ ) if( nb2(n2, iter).node == n1 ) { // Remove it nb2(n2).erase( nb2(n2).begin() + iter ); break; } // Change the iter and node values of the subsequent neighbors for( ; iter < nb2(n2).size(); iter++ ) { Neighbor &m1 = nb2( n2, iter ); m1.iter = iter; nb1( m1.node, m1.dual ).dual = iter; } }
void GraphAL::eraseEdge( size_t n1, size_t n2 ) { DAI_ASSERT( n1 < nrNodes() ); DAI_ASSERT( n2 < nrNodes() ); size_t iter; // Search for edge among neighbors of n1 for( iter = 0; iter < nb(n1).size(); iter++ ) if( nb(n1, iter).node == n2 ) { // Remove it nb(n1).erase( nb(n1).begin() + iter ); break; } // Change the iter and dual values of the subsequent neighbors for( ; iter < nb(n1).size(); iter++ ) { Neighbor &m = nb( n1, iter ); m.iter = iter; nb( m.node, m.dual ).dual = iter; } // Search for edge among neighbors of n2 for( iter = 0; iter < nb(n2).size(); iter++ ) if( nb(n2, iter).node == n1 ) { // Remove it nb(n2).erase( nb(n2).begin() + iter ); break; } // Change the iter and node values of the subsequent neighbors for( ; iter < nb(n2).size(); iter++ ) { Neighbor &m = nb( n2, iter ); m.iter = iter; nb( m.node, m.dual ).dual = iter; } }
void HAK::setProperties( const PropertySet &opts ) { DAI_ASSERT( opts.hasKey("tol") ); DAI_ASSERT( opts.hasKey("doubleloop") ); DAI_ASSERT( opts.hasKey("clusters") ); props.tol = opts.getStringAs<Real>("tol"); props.doubleloop = opts.getStringAs<bool>("doubleloop"); props.clusters = opts.getStringAs<Properties::ClustersType>("clusters"); if( opts.hasKey("maxiter") ) props.maxiter = opts.getStringAs<size_t>("maxiter"); else props.maxiter = 10000; if( opts.hasKey("maxtime") ) props.maxtime = opts.getStringAs<Real>("maxtime"); else props.maxtime = INFINITY; if( opts.hasKey("verbose") ) props.verbose = opts.getStringAs<size_t>("verbose"); else props.verbose = 0; if( opts.hasKey("loopdepth") ) props.loopdepth = opts.getStringAs<size_t>("loopdepth"); else DAI_ASSERT( props.clusters != Properties::ClustersType::LOOP ); if( opts.hasKey("damping") ) props.damping = opts.getStringAs<Real>("damping"); else props.damping = 0.0; if( opts.hasKey("init") ) props.init = opts.getStringAs<Properties::InitType>("init"); else props.init = Properties::InitType::UNIFORM; }
void GraphAL::checkConsistency() const { size_t N = nrNodes(); for( size_t n1 = 0; n1 < N; n1++ ) { size_t iter = 0; foreach( const Neighbor &n2, nb(n1) ) { DAI_ASSERT( n2.iter == iter ); DAI_ASSERT( n2.node < N ); DAI_ASSERT( n2.dual < nb(n2).size() ); DAI_ASSERT( nb(n2, n2.dual) == n1 ); iter++; } }
void SharedParameters::setPermsAndVarSetsFromVarOrders() { if( _varorders.size() == 0 ) return; DAI_ASSERT( _estimation != NULL ); // Construct the permutation objects and the varsets for( FactorOrientations::const_iterator foi = _varorders.begin(); foi != _varorders.end(); ++foi ) { VarSet vs; _perms[foi->first] = calculatePermutation( foi->second, vs ); _varsets[foi->first] = vs; DAI_ASSERT( _estimation->probSize() == vs.nrStates() ); } }
CondProbEstimation::CondProbEstimation( size_t target_dimension, const Prob &pseudocounts ) : _target_dim(target_dimension), _initial_stats(pseudocounts), _props() { DAI_ASSERT( !(_initial_stats.size() % _target_dim) ); _props.setAsString<size_t>("target_dim", _target_dim); _props.setAsString<size_t>("total_dim", _initial_stats.size()); _props.setAsString<Real>("pseudo_count", _initial_stats.get(0)); }
TreeEP::TreeEP( const FactorGraph &fg, const PropertySet &opts ) : JTree(fg, opts("updates",string("HUGIN")), false), _maxdiff(0.0), _iters(0), props(), _Q() { setProperties( opts ); if( opts.hasKey("tree") ) { construct( fg, opts.getAs<RootedTree>("tree") ); } else { if( props.type == Properties::TypeType::ORG || props.type == Properties::TypeType::ALT ) { // ORG: construct weighted graph with as weights a crude estimate of the // mutual information between the nodes // ALT: construct weighted graph with as weights an upper bound on the // effective interaction strength between pairs of nodes WeightedGraph<Real> wg; // in order to get a connected weighted graph, we start // by connecting every variable to the zero'th variable with weight 0 for( size_t i = 1; i < fg.nrVars(); i++ ) wg[UEdge(i,0)] = 0.0; for( size_t i = 0; i < fg.nrVars(); i++ ) { SmallSet<size_t> delta_i = fg.bipGraph().delta1( i, false ); const Var& v_i = fg.var(i); foreach( size_t j, delta_i ) if( i < j ) { const Var& v_j = fg.var(j); VarSet v_ij( v_i, v_j ); SmallSet<size_t> nb_ij = fg.bipGraph().nb1Set( i ) | fg.bipGraph().nb1Set( j ); Factor piet; foreach( size_t I, nb_ij ) { const VarSet& Ivars = fg.factor(I).vars(); if( props.type == Properties::TypeType::ORG ) { if( (Ivars == v_i) || (Ivars == v_j) ) piet *= fg.factor(I); else if( Ivars >> v_ij ) piet *= fg.factor(I).marginal( v_ij ); } else { if( Ivars >> v_ij ) piet *= fg.factor(I); } } if( props.type == Properties::TypeType::ORG ) { if( piet.vars() >> v_ij ) { piet = piet.marginal( v_ij ); Factor pietf = piet.marginal(v_i) * piet.marginal(v_j); wg[UEdge(i,j)] = dist( piet, pietf, DISTKL ); } else { // this should never happen... DAI_ASSERT( 0 == 1 ); wg[UEdge(i,j)] = 0; } } else wg[UEdge(i,j)] = piet.strength(v_i, v_j); }
void CFactorGraph::clamp( size_t i, size_t x, bool backup ) { DAI_ASSERT( x <= var(i).states() ); // store evidence of variable _evidence[var(i).label()] = x; // Multiply each factor that contains the variable with a delta function CFactor mask( var(i), (Real)0 ); mask[x] = (Real)1; map<size_t, CFactor> newFacs; foreach( const Neighbor &I, nbV(i) ) { newFacs[I] = factor(I) * mask; newFacs[I].sigma() = factor(I).sigma(); newFacs[I].counts() = factor(I).counts(); }
void GraphAL::eraseNode( size_t n ) { DAI_ASSERT( n < nrNodes() ); // Erase neighbor entry of node n _nb.erase( _nb.begin() + n ); // Adjust neighbor entries of nodes for( size_t n2 = 0; n2 < nrNodes(); n2++ ) { for( size_t iter = 0; iter < nb(n2).size(); ) { Neighbor &m = nb(n2, iter); if( m.node == n ) { // delete this entry, because it points to the deleted node nb(n2).erase( nb(n2).begin() + iter ); } else { // update this entry and the corresponding dual of the neighboring node if( m.node > n ) m.node--; nb( m.node, m.dual ).dual = iter; m.iter = iter++; } } } }
void BipartiteGraph::eraseNode2( size_t n2 ) { DAI_ASSERT( n2 < nrNodes2() ); // Erase neighbor entry of node n2 _nb2.erase( _nb2.begin() + n2 ); // Adjust neighbor entries of nodes of type 1 for( size_t n1 = 0; n1 < nrNodes1(); n1++ ) { for( size_t iter = 0; iter < nb1(n1).size(); ) { Neighbor &m2 = nb1(n1, iter); if( m2.node == n2 ) { // delete this entry, because it points to the deleted node nb1(n1).erase( nb1(n1).begin() + iter ); } else if( m2.node > n2 ) { // update this entry and the corresponding dual of the neighboring node of type 2 m2.iter = iter; m2.node--; nb2( m2.node, m2.dual ).dual = iter; iter++; } else { // skip iter++; } } } }
void BipartiteGraph::eraseNode1( size_t n1 ) { DAI_ASSERT( n1 < nrNodes1() ); // Erase neighbor entry of node n1 _nb1.erase( _nb1.begin() + n1 ); // Adjust neighbor entries of nodes of type 2 for( size_t n2 = 0; n2 < nrNodes2(); n2++ ) { for( size_t iter = 0; iter < nb2(n2).size(); ) { Neighbor &m1 = nb2(n2, iter); if( m1.node == n1 ) { // delete this entry, because it points to the deleted node nb2(n2).erase( nb2(n2).begin() + iter ); } else if( m1.node > n1 ) { // update this entry and the corresponding dual of the neighboring node of type 1 m1.iter = iter; m1.node--; nb1( m1.node, m1.dual ).dual = iter; iter++; } else { // skip iter++; } } } }
Real BBPCostFunction::evaluate( const InfAlg &ia, const vector<size_t> *stateP ) const { Real cf = 0.0; const FactorGraph &fg = ia.fg(); switch( (size_t)(*this) ) { case CFN_BETHE_ENT: // ignores state cf = -ia.logZ(); break; case CFN_VAR_ENT: // ignores state for( size_t i = 0; i < fg.nrVars(); i++ ) cf += -ia.beliefV(i).entropy(); break; case CFN_FACTOR_ENT: // ignores state for( size_t I = 0; I < fg.nrFactors(); I++ ) cf += -ia.beliefF(I).entropy(); break; case CFN_GIBBS_B: case CFN_GIBBS_B2: case CFN_GIBBS_EXP: { DAI_ASSERT( stateP != NULL ); vector<size_t> state = *stateP; DAI_ASSERT( state.size() == fg.nrVars() ); for( size_t i = 0; i < fg.nrVars(); i++ ) { Real b = ia.beliefV(i)[state[i]]; switch( (size_t)(*this) ) { case CFN_GIBBS_B: cf += b; break; case CFN_GIBBS_B2: cf += b * b / 2.0; break; case CFN_GIBBS_EXP: cf += exp( b ); break; default: DAI_THROW(UNKNOWN_ENUM_VALUE); } } break; } case CFN_GIBBS_B_FACTOR: case CFN_GIBBS_B2_FACTOR: case CFN_GIBBS_EXP_FACTOR: { DAI_ASSERT( stateP != NULL ); vector<size_t> state = *stateP; DAI_ASSERT( state.size() == fg.nrVars() ); for( size_t I = 0; I < fg.nrFactors(); I++ ) { size_t x_I = getFactorEntryForState( fg, I, state ); Real b = ia.beliefF(I)[x_I]; switch( (size_t)(*this) ) { case CFN_GIBBS_B_FACTOR: cf += b; break; case CFN_GIBBS_B2_FACTOR: cf += b * b / 2.0; break; case CFN_GIBBS_EXP_FACTOR: cf += exp( b ); break; default: DAI_THROW(UNKNOWN_ENUM_VALUE); } } break; } default: DAI_THROWE(UNKNOWN_ENUM_VALUE, "Unknown cost function " + std::string(*this)); } return cf; }
CondProbEstimation::CondProbEstimation( size_t target_dimension, const Prob &pseudocounts ) : _target_dim(target_dimension), _stats(pseudocounts), _initial_stats(pseudocounts) { DAI_ASSERT( !(_stats.size() % _target_dim) ); }
void MR::propagateCavityFields() { Real sum_even, sum_odd; Real maxdev; size_t maxruns = 1000; for( size_t i = 0; i < G.nrNodes(); i++ ) bforeach( const Neighbor &j, G.nb(i) ) M[i][j.iter] = 0.1; size_t run=0; do { maxdev=0.0; run++; for( size_t i = 0; i < G.nrNodes(); i++ ) { bforeach( const Neighbor &j, G.nb(i) ) { size_t _j = j.iter; size_t _i = G.findNb(j,i); DAI_ASSERT( G.nb(j,_i) == i ); Real newM = 0.0; if( props.updates == Properties::UpdateType::FULL ) { // find indices in nb(j) that do not correspond with i sub_nb _nbj_min_i(G.nb(j).size()); _nbj_min_i.set(); _nbj_min_i.reset(_i); // find indices in nb(i) that do not correspond with j sub_nb _nbi_min_j(G.nb(i).size()); _nbi_min_j.set(); _nbi_min_j.reset(_j); sum_subs(j, _nbj_min_i, &sum_even, &sum_odd); newM = (tanh(theta[j]) * sum_even + sum_odd) / (sum_even + tanh(theta[j]) * sum_odd); sum_subs(i, _nbi_min_j, &sum_even, &sum_odd); Real denom = sum_even + tanh(theta[i]) * sum_odd; Real numer = 0.0; for(size_t _k=0; _k < G.nb(i).size(); _k++) if(_k != _j) { sub_nb _nbi_min_jk(_nbi_min_j); _nbi_min_jk.reset(_k); sum_subs(i, _nbi_min_jk, &sum_even, &sum_odd); numer += tJ[i][_k] * cors[i][_j][_k] * (tanh(theta[i]) * sum_even + sum_odd); } newM -= numer / denom; } else if( props.updates == Properties::UpdateType::LINEAR ) { newM = T(j,_i); for(size_t _l=0; _l<G.nb(i).size(); _l++) if( _l != _j ) newM -= Omega(i,_j,_l) * tJ[i][_l] * cors[i][_j][_l]; for(size_t _l1=0; _l1<G.nb(j).size(); _l1++) if( _l1 != _i ) for( size_t _l2=_l1+1; _l2<G.nb(j).size(); _l2++) if( _l2 != _i) newM += Gamma(j,_i,_l1,_l2) * tJ[j][_l1] * tJ[j][_l2] * cors[j][_l1][_l2]; } Real dev = newM - M[i][_j]; // dev *= 0.02; if( abs(dev) >= maxdev ) maxdev = abs(dev); newM = M[i][_j] + dev; if( abs(newM) > 1.0 ) newM = (newM > 0.0) ? 1.0 : -1.0; M[i][_j] = newM; } } } while((maxdev>props.tol)&&(run<maxruns)); _iters = run; if( maxdev > _maxdiff ) _maxdiff = maxdev; if(run==maxruns){ if( props.verbose >= 1 ) cerr << "MR::propagateCavityFields: Convergence not reached (maxdev=" << maxdev << ")..." << endl; } }