// Add the fact that v1 and v2 are equivalent // return true if v1 was not already equivalent to v2 // and false if v1 was already equivalent to v2 bool SetEquivalent(int v1, int v2) { bool const already_equiv = false; if (v1 == v2) return already_equiv; ensureElementExists(v1); ensureElementExists(v2); const int r1 = disjoint_sets_.find_set(v1); const int r2 = disjoint_sets_.find_set(v2); if (r1 == r2) return already_equiv; root_set_map_t::const_iterator it1 = rootSetMap_.find(r1); assert(it1 != rootSetMap_.end()); std::list<int> s1 = it1->second; root_set_map_t::const_iterator it2 = rootSetMap_.find(r2); assert(it2 != rootSetMap_.end()); std::list<int> s2 = it2->second; s1.splice(s1.begin(), s2); // union the related sets disjoint_sets_.link(r1, r2); // union the disjoint sets // associate the combined related set with the new root int const new_root = disjoint_sets_.find_set(v1); if (new_root != r1) { rootSetMap_.erase(it1); } else { rootSetMap_.erase(it2); } rootSetMap_[new_root] = s1; return !already_equiv; }
void dynamic_connected_components(EdgeListGraph& g, DisjointSets& ds) { typename graph_traits<EdgeListGraph>::edge_iterator e, end; for (tie(e,end) = edges(g); e != end; ++e) ds.link(source(*e,g),target(*e,g)); }
//compute bag affiliation for all vertices //store result in m_bagindex void ClusterAnalysis::computeBags() { const Graph &G = m_C->constGraph(); // Storage structure for results m_bagindex.init(G); // We use Union-Find for chunks and bags DisjointSets<> uf; NodeArray<int> setid(G); // Index mapping for union-find #if 0 node* nn = new node[G.numberOfNodes()]; // dito #endif // Every cluster gets its index ClusterArray<int> cind(*m_C); // We store the lists of cluster vertices List<node>* clists = new List<node>[m_C->numberOfClusters()]; int i = 0; // Store index and detect the current leaf clusters List<cluster> ccleafs; ClusterArray<int> unprocessedChildren(*m_C); //processing below: compute bags for(cluster c : m_C->clusters) { cind[c] = i++; if (c->cCount() == 0) ccleafs.pushBack(c); unprocessedChildren[c] = c->cCount(); } // Now we run through all vertices, storing them in the parent lists, // at the same time, we initialize m_bagindex for(node v : G.nodes) { // setid is constant in the following setid[v] = uf.makeSet(); // Each vertex v gets its own ClusterArray that stores v's bag index per cluster. // See comment on use of ClusterArrays above m_bagindex[v] = new ClusterArray<int>(*m_C,DefaultIndex, m_C->maxClusterIndex()+1);//m_C->numberOfClusters()); cluster c = m_C->clusterOf(v); // Push vertices in parent list clists[cind[c]].pushBack(v); } // Now each clist contains the direct vertex descendants // We process the clusters bottom-up, compute the chunks // of the leafs first. At each level, for a cluster the // vertex lists of all children are concatenated // (could improve this by having an array of size(#leafs) // and concatenating only at child1), then the bags are // updated as follows: chunks may be linked by exactly // the edges with lca(c) ie the ones in m_lcaEdges[c], // and bags may be built by direct child clusters that join chunks. // While concatenating the vertex lists, we can check // for the vertices in each child if the uf number is the same // as the one of a first initial vertex, otherwise we join. // First, lowest level clusters are processed: All chunks are bags OGDF_ASSERT(!ccleafs.empty()); while (!ccleafs.empty()){ const cluster c = ccleafs.popFrontRet(); Skiplist<int*> cbags; //Stores bag indexes ocurring in c auto storeResult = [&] { for (node v : clists[cind[c]]) { int theid = uf.find(setid[v]); (*m_bagindex[v])[c] = theid; if (!cbags.isElement(&theid)) { cbags.add(new int(theid)); } // push into list of outer active vertices if (m_storeoalists && isOuterActive(v, c)) { (*m_oalists)[c].pushBack(v); } } (*m_bags)[c] = cbags.size(); // store number of bags of c }; if (m_storeoalists){ //no outeractive vertices detected so far (*m_oalists)[c].clear(); } //process leafs separately if (c->cCount() == 0) { //Todo could use lcaEdges list here too, see below for (node u : c->nodes) { for(adjEntry adj : u->adjEntries) { node w = adj->twinNode(); if (m_C->clusterOf(w) == c) { uf.link(uf.find(setid[u]),uf.find(setid[w])); } } } // Now all chunks in the leaf cluster are computed // update for parent is done in the else case storeResult(); } else { // ?We construct the vertex list by concatenating // ?the lists of the children to the current list. // We need the lists for storing the results efficiently. // (Should be slightly faster than to call clusterNodes each time) // Bags are either links of chunks by edges with lca==c // or links of chunk by child clusters. // Edge links for(edge e : (*m_lcaEdges)[c]) { uf.link(uf.find(setid[e->source()]),uf.find(setid[e->target()])); } // Cluster links for(cluster cc : c->children) { //Initial id per child cluster cc: Use value of first //vertex, each time we encounter a different value in cc, //we link the chunks //add (*itcc)'s vertices to c's list ListConstIterator<node> itvc = clists[cind[cc]].begin(); int inid; if (itvc.valid()) inid = uf.find(setid[*itvc]); while (itvc.valid()) { int theid = uf.find(setid[*itvc]); if (theid != inid) uf.link(inid,theid); clists[cind[c]].pushBack(*itvc); ++itvc; } } storeResult(); } // Now we update the status of the parent cluster and, // in case all its children are processed, add it to // the process queue. if (c != m_C->rootCluster()) { OGDF_ASSERT(unprocessedChildren[c->parent()] > 0); unprocessedChildren[c->parent()]--; if (unprocessedChildren[c->parent()] == 0) ccleafs.pushBack(c->parent()); } } // clean up delete[] clists; }