//in ClusterGraph?? //is not yet recursive!!! node collapseCluster(ClusterGraph& CG, cluster c, Graph& G) { OGDF_ASSERT(c->cCount() == 0) ListIterator<node> its; SListPure<node> collaps; //we should check here if not empty node robinson = (*(c->nBegin())); for (its = c->nBegin(); its.valid(); its++) collaps.pushBack(*its); CG.collaps(collaps, G); if (c != CG.rootCluster()) CG.delCluster(c); return robinson; }
//todo: is called only once, but could be sped up the same way as the co-conn check void MaxCPlanarMaster::clusterConnection(cluster c, GraphCopy &gc, double &upperBoundC) { // For better performance, a node array is used to indicate which nodes are contained // in the currently considered cluster. NodeArray<bool> vInC(gc,false); // First check, if the current cluster \a c is a leaf cluster. // If so, compute the number of edges that have at least to be added // to make the cluster induced graph connected. if (c->cCount()==0) { //cluster \a c is a leaf cluster GraphCopy *inducedC = new GraphCopy((const Graph&)gc); List<node> clusterNodes; c->getClusterNodes(clusterNodes); // \a clusterNodes now contains all (original) nodes of cluster \a c. for (node w : clusterNodes) { vInC[gc.copy(w)] = true; } // Delete all nodes from \a inducedC that do not belong to the cluster, // in order to obtain the cluster induced graph. node v = inducedC->firstNode(); while (v!=nullptr) { node w = v->succ(); if (!vInC[inducedC->original(v)]) inducedC->delNode(v); v = w; } // Determine number of connected components of cluster induced graph. //Todo: check could be skipped if (!isConnected(*inducedC)) { NodeArray<int> conC(*inducedC); int nCC = connectedComponents(*inducedC,conC); //at least #connected components - 1 edges have to be added. upperBoundC -= (nCC-1)*m_largestConnectionCoeff; } delete inducedC; // Cluster \a c is an "inner" cluster. Process all child clusters first. } else { //c->cCount is != 0, process all child clusters first for (cluster ci : c->children) { clusterConnection(ci, gc, upperBoundC); } // Create cluster induced graph. GraphCopy *inducedC = new GraphCopy((const Graph&)gc); List<node> clusterNodes; c->getClusterNodes(clusterNodes); //\a clusterNodes now contains all (original) nodes of cluster \a c. for (node w : clusterNodes) { vInC[gc.copy(w)] = true; } node v = inducedC->firstNode(); while (v!=nullptr) { node w = v->succ(); if (!vInC[inducedC->original(v)]) inducedC->delNode(v); v = w; } // Now collapse each child cluster to one node and determine #connected components of \a inducedC. List<node> oChildClusterNodes; List<node> cChildClusterNodes; for (cluster ci : c->children) { ci->getClusterNodes(oChildClusterNodes); // Compute corresponding nodes of graph \a inducedC. for (node u : oChildClusterNodes) { node copy = inducedC->copy(gc.copy(u)); cChildClusterNodes.pushBack(copy); } inducedC->collapse(cChildClusterNodes); oChildClusterNodes.clear(); cChildClusterNodes.clear(); } // Now, check \a inducedC for connectivity. if (!isConnected(*inducedC)) { NodeArray<int> conC(*inducedC); int nCC = connectedComponents(*inducedC,conC); //at least #connected components - 1 edges have to added. upperBoundC -= (nCC-1)*m_largestConnectionCoeff; } delete inducedC; } }//clusterConnection
//compute bag affiliation for all vertices //store result in m_bagindex void ClusterAnalysis::computeBags() { const Graph &G = m_C->constGraph(); // Storage structure for results m_bagindex.init(G); // We use Union-Find for chunks and bags DisjointSets<> uf; NodeArray<int> setid(G); // Index mapping for union-find #if 0 node* nn = new node[G.numberOfNodes()]; // dito #endif // Every cluster gets its index ClusterArray<int> cind(*m_C); // We store the lists of cluster vertices List<node>* clists = new List<node>[m_C->numberOfClusters()]; int i = 0; // Store index and detect the current leaf clusters List<cluster> ccleafs; ClusterArray<int> unprocessedChildren(*m_C); //processing below: compute bags for(cluster c : m_C->clusters) { cind[c] = i++; if (c->cCount() == 0) ccleafs.pushBack(c); unprocessedChildren[c] = c->cCount(); } // Now we run through all vertices, storing them in the parent lists, // at the same time, we initialize m_bagindex for(node v : G.nodes) { // setid is constant in the following setid[v] = uf.makeSet(); // Each vertex v gets its own ClusterArray that stores v's bag index per cluster. // See comment on use of ClusterArrays above m_bagindex[v] = new ClusterArray<int>(*m_C,DefaultIndex, m_C->maxClusterIndex()+1);//m_C->numberOfClusters()); cluster c = m_C->clusterOf(v); // Push vertices in parent list clists[cind[c]].pushBack(v); } // Now each clist contains the direct vertex descendants // We process the clusters bottom-up, compute the chunks // of the leafs first. At each level, for a cluster the // vertex lists of all children are concatenated // (could improve this by having an array of size(#leafs) // and concatenating only at child1), then the bags are // updated as follows: chunks may be linked by exactly // the edges with lca(c) ie the ones in m_lcaEdges[c], // and bags may be built by direct child clusters that join chunks. // While concatenating the vertex lists, we can check // for the vertices in each child if the uf number is the same // as the one of a first initial vertex, otherwise we join. // First, lowest level clusters are processed: All chunks are bags OGDF_ASSERT(!ccleafs.empty()); while (!ccleafs.empty()){ const cluster c = ccleafs.popFrontRet(); Skiplist<int*> cbags; //Stores bag indexes ocurring in c auto storeResult = [&] { for (node v : clists[cind[c]]) { int theid = uf.find(setid[v]); (*m_bagindex[v])[c] = theid; if (!cbags.isElement(&theid)) { cbags.add(new int(theid)); } // push into list of outer active vertices if (m_storeoalists && isOuterActive(v, c)) { (*m_oalists)[c].pushBack(v); } } (*m_bags)[c] = cbags.size(); // store number of bags of c }; if (m_storeoalists){ //no outeractive vertices detected so far (*m_oalists)[c].clear(); } //process leafs separately if (c->cCount() == 0) { //Todo could use lcaEdges list here too, see below for (node u : c->nodes) { for(adjEntry adj : u->adjEntries) { node w = adj->twinNode(); if (m_C->clusterOf(w) == c) { uf.link(uf.find(setid[u]),uf.find(setid[w])); } } } // Now all chunks in the leaf cluster are computed // update for parent is done in the else case storeResult(); } else { // ?We construct the vertex list by concatenating // ?the lists of the children to the current list. // We need the lists for storing the results efficiently. // (Should be slightly faster than to call clusterNodes each time) // Bags are either links of chunks by edges with lca==c // or links of chunk by child clusters. // Edge links for(edge e : (*m_lcaEdges)[c]) { uf.link(uf.find(setid[e->source()]),uf.find(setid[e->target()])); } // Cluster links for(cluster cc : c->children) { //Initial id per child cluster cc: Use value of first //vertex, each time we encounter a different value in cc, //we link the chunks //add (*itcc)'s vertices to c's list ListConstIterator<node> itvc = clists[cind[cc]].begin(); int inid; if (itvc.valid()) inid = uf.find(setid[*itvc]); while (itvc.valid()) { int theid = uf.find(setid[*itvc]); if (theid != inid) uf.link(inid,theid); clists[cind[c]].pushBack(*itvc); ++itvc; } } storeResult(); } // Now we update the status of the parent cluster and, // in case all its children are processed, add it to // the process queue. if (c != m_C->rootCluster()) { OGDF_ASSERT(unprocessedChildren[c->parent()] > 0); unprocessedChildren[c->parent()]--; if (unprocessedChildren[c->parent()] == 0) ccleafs.pushBack(c->parent()); } } // clean up delete[] clists; }