static void cleanup1(graph_t * g) { node_t *n; edge_t *e, *f; int c; for (c = 0; c < GD_comp(g).size; c++) { GD_nlist(g) = GD_comp(g).list[c]; for (n = GD_nlist(g); n; n = ND_next(n)) { renewlist(&ND_in(n)); renewlist(&ND_out(n)); ND_mark(n) = FALSE; } } for (n = agfstnode(g); n; n = agnxtnode(g, n)) { for (e = agfstout(g, n); e; e = agnxtout(g, e)) { f = ED_to_virt(e); /* Null out any other references to f to make sure we don't * handle it a second time. For example, parallel multiedges * share a virtual edge. */ if (f && (e == ED_to_orig(f))) { edge_t *e1, *f1; #ifndef WITH_CGRAPH for (e1 = agfstout(g, n); e1; e1 = agnxtout(g, e1)) { if (e != e1) { f1 = ED_to_virt(e1); if (f1 && (f == f1)) { ED_to_virt(e1) = NULL; } } } #else node_t *n1; for (n1 = agfstnode(g); n1; n1 = agnxtnode(g, n1)) { for (e1 = agfstout(g, n1); e1; e1 = agnxtout(g, e1)) { if (e != e1) { f1 = ED_to_virt(e1); if (f1 && (f == f1)) { ED_to_virt(e1) = NULL; } } } } free(f->base.data); #endif free(f); } ED_to_virt(e) = NULL; } } free(GD_comp(g).list); GD_comp(g).list = NULL; GD_comp(g).size = 0; }
/* Run the network simplex algorithm on each component. */ void rank1(graph_t* g) { int maxiter = MAXINT; int c; char *s; if ((s = agget(g,"nslimit1"))) maxiter = atof(s) * agnnodes(g); for (c = 0; c < GD_comp(g).size; c++) { GD_nlist(g) = GD_comp(g).list[c]; rank(g,(GD_n_cluster(g) == 0?1:0),maxiter); /* TB balance */ } }
/* delete virtual nodes of a cluster, and install real nodes or sub-clusters */ void expand_cluster(graph_t * subg) { /* build internal structure of the cluster */ class2(subg); GD_comp(subg).size = 1; GD_comp(subg).list[0] = GD_nlist(subg); allocate_ranks(subg); build_ranks(subg, 0); merge_ranks(subg); /* build external structure of the cluster */ interclexp(subg); remove_rankleaders(subg); }
static void dot_cleanup_graph(graph_t * g) { int i, c; graph_t *clust; for (c = 1; c <= GD_n_cluster(g); c++) { clust = GD_clust(g)[c]; GD_cluster_was_collapsed(clust) = FALSE; dot_cleanup(clust); } if (GD_clust(g)) free (GD_clust(g)); if (GD_rankleader(g)) free (GD_rankleader(g)); free_list(GD_comp(g)); if (GD_rank(g)) { for (i = GD_minrank(g); i <= GD_maxrank(g); i++) free(GD_rank(g)[i].av); if (GD_minrank(g) == -1) free(GD_rank(g)-1); else free(GD_rank(g)); } if (g != agroot(g)) #ifndef WITH_CGRAPH memset(&(g->u), 0, sizeof(Agraphinfo_t)); #else /* WITH_CGRAPH */ agclean(g,AGRAPH,"Agraphinfo_t"); #endif /* WITH_CGRAPH */ }
static void dot_cleanup_graph(graph_t * g) { int i, c; graph_t *clust; for (c = 1; c <= GD_n_cluster(g); c++) { clust = GD_clust(g)[c]; GD_cluster_was_collapsed(clust) = FALSE; dot_cleanup(clust); } free_list(GD_comp(g)); if ((g == g->root) && GD_rank(g)) { for (i = GD_minrank(g); i <= GD_maxrank(g); i++) free(GD_rank(g)[i].v); free(GD_rank(g)); } if (g != g->root) memset(&(g->u), 0, sizeof(Agraphinfo_t)); }
/* dot1_rank: * asp != NULL => g is root */ static void dot1_rank(graph_t * g, aspect_t* asp) { point p; #ifdef ALLOW_LEVELS attrsym_t* N_level; #endif edgelabel_ranks(g); if (asp) { init_UF_size(g); initEdgeTypes(g); } collapse_sets(g,g); /*collapse_leaves(g); */ class1(g); p = minmax_edges(g); decompose(g, 0); if (asp && ((GD_comp(g).size > 1)||(GD_n_cluster(g) > 0))) { asp->badGraph = 1; asp = NULL; } acyclic(g); if (minmax_edges2(g, p)) decompose(g, 0); #ifdef ALLOW_LEVELS if ((N_level = agattr(g,AGNODE,"level",NULL))) setRanks(g, N_level); else #endif if (asp) rank3(g, asp); else rank1(g); expand_ranksets(g, asp); cleanup1(g); }
static void dot_cleanup_graph(graph_t * g) { int i; graph_t *subg; for (subg = agfstsubg(g); subg; subg = agnxtsubg(subg)) { dot_cleanup_graph(subg); } if (GD_clust(g)) free (GD_clust(g)); if (GD_rankleader(g)) free (GD_rankleader(g)); free_list(GD_comp(g)); if (GD_rank(g)) { for (i = GD_minrank(g); i <= GD_maxrank(g); i++) free(GD_rank(g)[i].av); if (GD_minrank(g) == -1) free(GD_rank(g)-1); else free(GD_rank(g)); } if (g != agroot(g)) agdelrec(g,"Agraphinfo_t"); }