void cgraph_rebuild_references (void) { basic_block bb; struct cgraph_node *node = cgraph_get_node (current_function_decl); gimple_stmt_iterator gsi; ipa_remove_all_references (&node->ref_list); node->count = ENTRY_BLOCK_PTR->count; FOR_EACH_BB (bb) { for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) { gimple stmt = gsi_stmt (gsi); walk_stmt_load_store_addr_ops (stmt, node, mark_load, mark_store, mark_address); } for (gsi = gsi_start (phi_nodes (bb)); !gsi_end_p (gsi); gsi_next (&gsi)) walk_stmt_load_store_addr_ops (gsi_stmt (gsi), node, mark_load, mark_store, mark_address); } record_eh_tables (node, cfun); }
static unsigned int remove_cgraph_callee_edges (void) { struct cgraph_node *node = cgraph_get_node (current_function_decl); cgraph_node_remove_callees (node); ipa_remove_all_references (&node->ref_list); return 0; }
/* Remove node from the varpool. */ void varpool_remove_node (struct varpool_node *node) { void **slot; slot = htab_find_slot (varpool_hash, node, NO_INSERT); gcc_assert (*slot == node); htab_clear_slot (varpool_hash, slot); gcc_assert (!varpool_assembled_nodes_queue); if (node->next) node->next->prev = node->prev; if (node->prev) node->prev->next = node->next; else { gcc_assert (varpool_nodes == node); varpool_nodes = node->next; } if (varpool_first_unanalyzed_node == node) varpool_first_unanalyzed_node = node->next_needed; if (node->next_needed) node->next_needed->prev_needed = node->prev_needed; else if (node->prev_needed) { gcc_assert (varpool_last_needed_node); varpool_last_needed_node = node->prev_needed; } if (node->prev_needed) node->prev_needed->next_needed = node->next_needed; else if (node->next_needed) { gcc_assert (varpool_nodes_queue == node); varpool_nodes_queue = node->next_needed; } if (node->same_comdat_group) { struct varpool_node *prev; for (prev = node->same_comdat_group; prev->same_comdat_group != node; prev = prev->same_comdat_group) ; if (node->same_comdat_group == prev) prev->same_comdat_group = NULL; else prev->same_comdat_group = node->same_comdat_group; node->same_comdat_group = NULL; } ipa_remove_all_references (&node->ref_list); ipa_remove_all_refering (&node->ref_list); ggc_free (node); }
void symtab_unregister_node (symtab_node *node) { void **slot; ipa_remove_all_references (&node->ref_list); ipa_remove_all_referring (&node->ref_list); if (node->same_comdat_group) { symtab_node *prev; for (prev = node->same_comdat_group; prev->same_comdat_group != node; prev = prev->same_comdat_group) ; if (node->same_comdat_group == prev) prev->same_comdat_group = NULL; else prev->same_comdat_group = node->same_comdat_group; node->same_comdat_group = NULL; } if (node->previous) node->previous->next = node->next; else symtab_nodes = node->next; if (node->next) node->next->previous = node->previous; node->next = NULL; node->previous = NULL; slot = htab_find_slot (symtab_hash, node, NO_INSERT); /* During LTO symtab merging we temporarily corrupt decl to symtab node hash. */ gcc_assert ((slot && *slot) || in_lto_p); if (slot && *slot && *slot == node) { symtab_node *replacement_node = NULL; if (cgraph_node *cnode = dyn_cast <cgraph_node> (node)) replacement_node = cgraph_find_replacement_node (cnode); if (!replacement_node) htab_clear_slot (symtab_hash, slot); else *slot = replacement_node; } if (!is_a <varpool_node> (node) || !DECL_HARD_REGISTER (node->decl)) unlink_from_assembler_name_hash (node, false); }
unsigned int rebuild_cgraph_edges (void) { basic_block bb; struct cgraph_node *node = cgraph_get_node (current_function_decl); gimple_stmt_iterator gsi; cgraph_node_remove_callees (node); ipa_remove_all_references (&node->ref_list); node->count = ENTRY_BLOCK_PTR->count; FOR_EACH_BB (bb) { for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) { gimple stmt = gsi_stmt (gsi); tree decl; if (is_gimple_call (stmt)) { int freq = compute_call_stmt_bb_frequency (current_function_decl, bb); decl = gimple_call_fndecl (stmt); if (decl) cgraph_create_edge (node, cgraph_get_create_node (decl), stmt, bb->count, freq); else cgraph_create_indirect_edge (node, stmt, gimple_call_flags (stmt), bb->count, freq); } walk_stmt_load_store_addr_ops (stmt, node, mark_load, mark_store, mark_address); } for (gsi = gsi_start (phi_nodes (bb)); !gsi_end_p (gsi); gsi_next (&gsi)) walk_stmt_load_store_addr_ops (gsi_stmt (gsi), node, mark_load, mark_store, mark_address); } record_eh_tables (node, cfun); gcc_assert (!node->global.inlined_to); return 0; }
void symtab_unregister_node (symtab_node node) { void **slot; ipa_remove_all_references (&node->symbol.ref_list); ipa_remove_all_referring (&node->symbol.ref_list); if (node->symbol.same_comdat_group) { symtab_node prev; for (prev = node->symbol.same_comdat_group; prev->symbol.same_comdat_group != node; prev = prev->symbol.same_comdat_group) ; if (node->symbol.same_comdat_group == prev) prev->symbol.same_comdat_group = NULL; else prev->symbol.same_comdat_group = node->symbol.same_comdat_group; node->symbol.same_comdat_group = NULL; } if (node->symbol.previous) node->symbol.previous->symbol.next = node->symbol.next; else symtab_nodes = node->symbol.next; if (node->symbol.next) node->symbol.next->symbol.previous = node->symbol.previous; node->symbol.next = NULL; node->symbol.previous = NULL; slot = htab_find_slot (symtab_hash, node, NO_INSERT); if (*slot == node) { symtab_node replacement_node = NULL; if (symtab_function_p (node)) replacement_node = (symtab_node)cgraph_find_replacement_node (cgraph (node)); if (!replacement_node) htab_clear_slot (symtab_hash, slot); else *slot = replacement_node; } unlink_from_assembler_name_hash (node); }
unsigned int inline_transform (struct cgraph_node *node) { unsigned int todo = 0; struct cgraph_edge *e, *next; /* FIXME: Currently the pass manager is adding inline transform more than once to some clones. This needs revisiting after WPA cleanups. */ if (cfun->after_inlining) return 0; /* We might need the body of this function so that we can expand it inline somewhere else. */ if (preserve_function_body_p (node)) save_inline_function_body (node); for (e = node->callees; e; e = next) { next = e->next_callee; cgraph_redirect_edge_call_stmt_to_callee (e); } ipa_remove_all_references (&node->ref_list); timevar_push (TV_INTEGRATION); if (node->callees && optimize) todo = optimize_inline_calls (current_function_decl); timevar_pop (TV_INTEGRATION); cfun->always_inline_functions_inlined = true; cfun->after_inlining = true; todo |= execute_fixup_cfg (); if (!(todo & TODO_update_ssa_any)) /* Redirecting edges might lead to a need for vops to be recomputed. */ todo |= TODO_update_ssa_only_virtuals; return todo; }