static void lto_cgraph_replace_node (struct cgraph_node *node, struct cgraph_node *prevailing_node) { struct cgraph_edge *e, *next; bool compatible_p; if (cgraph_dump_file) { fprintf (cgraph_dump_file, "Replacing cgraph node %s/%i by %s/%i" " for symbol %s\n", xstrdup (cgraph_node_name (node)), node->uid, xstrdup (cgraph_node_name (prevailing_node)), prevailing_node->uid, IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->decl))))); } /* Merge node flags. */ if (node->needed) cgraph_mark_needed_node (prevailing_node); if (node->reachable) cgraph_mark_reachable_node (prevailing_node); if (node->address_taken) { gcc_assert (!prevailing_node->global.inlined_to); cgraph_mark_address_taken_node (prevailing_node); } /* Redirect all incoming edges. */ compatible_p = types_compatible_p (TREE_TYPE (TREE_TYPE (prevailing_node->decl)), TREE_TYPE (TREE_TYPE (node->decl))); for (e = node->callers; e; e = next) { next = e->next_caller; cgraph_redirect_edge_callee (e, prevailing_node); /* If there is a mismatch between the supposed callee return type and the real one do not attempt to inline this function. ??? We really need a way to match function signatures for ABI compatibility and perform related promotions at inlining time. */ if (!compatible_p) e->call_stmt_cannot_inline_p = 1; } /* Redirect incomming references. */ ipa_clone_refering (prevailing_node, NULL, &node->ref_list); /* Finally remove the replaced node. */ cgraph_remove_node (node); }
static void record_cdtor_fn (tree fndecl) { struct cgraph_node *node; if (targetm.have_ctors_dtors || (!DECL_STATIC_CONSTRUCTOR (fndecl) && !DECL_STATIC_DESTRUCTOR (fndecl))) return; if (DECL_STATIC_CONSTRUCTOR (fndecl)) { VEC_safe_push (tree, gc, static_ctors, fndecl); DECL_STATIC_CONSTRUCTOR (fndecl) = 0; } if (DECL_STATIC_DESTRUCTOR (fndecl)) { VEC_safe_push (tree, gc, static_dtors, fndecl); DECL_STATIC_DESTRUCTOR (fndecl) = 0; } node = cgraph_node (fndecl); node->local.disregard_inline_limits = 1; cgraph_mark_reachable_node (node); }
void cgraph_finalize_function (tree decl, bool nested) { struct cgraph_node *node = cgraph_node (decl); if (node->local.finalized) cgraph_reset_node (node); node->pid = cgraph_max_pid ++; notice_global_symbol (decl); node->local.finalized = true; node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL; record_cdtor_fn (node->decl); if (node->nested) lower_nested_functions (decl); gcc_assert (!node->nested); if (decide_is_function_needed (node, decl)) cgraph_mark_needed_node (node); /* Since we reclaim unreachable nodes at the end of every language level unit, we need to be conservative about possible entry points there. */ if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))) cgraph_mark_reachable_node (node); /* If we've not yet emitted decl, tell the debug info about it. */ if (!TREE_ASM_WRITTEN (decl)) (*debug_hooks->deferred_inline_function) (decl); /* Possibly warn about unused parameters. */ if (warn_unused_parameter) do_warn_unused_parameter (decl); if (!nested) ggc_collect (); }
bool cgraph_process_new_functions (void) { bool output = false; tree fndecl; struct cgraph_node *node; /* Note that this queue may grow as its being processed, as the new functions may generate new ones. */ while (cgraph_new_nodes) { node = cgraph_new_nodes; fndecl = node->decl; cgraph_new_nodes = cgraph_new_nodes->next_needed; switch (cgraph_state) { case CGRAPH_STATE_CONSTRUCTION: /* At construction time we just need to finalize function and move it into reachable functions list. */ node->next_needed = NULL; cgraph_finalize_function (fndecl, false); cgraph_mark_reachable_node (node); output = true; break; case CGRAPH_STATE_IPA: case CGRAPH_STATE_IPA_SSA: /* When IPA optimization already started, do all essential transformations that has been already performed on the whole cgraph but not on this function. */ gimple_register_cfg_hooks (); if (!node->analyzed) cgraph_analyze_function (node); push_cfun (DECL_STRUCT_FUNCTION (fndecl)); current_function_decl = fndecl; compute_inline_parameters (node); if ((cgraph_state == CGRAPH_STATE_IPA_SSA && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl))) /* When not optimizing, be sure we run early local passes anyway to expand OMP. */ || !optimize) execute_pass_list (pass_early_local_passes.pass.sub); free_dominance_info (CDI_POST_DOMINATORS); free_dominance_info (CDI_DOMINATORS); pop_cfun (); current_function_decl = NULL; break; case CGRAPH_STATE_EXPANSION: /* Functions created during expansion shall be compiled directly. */ node->output = 0; cgraph_expand_function (node); break; default: gcc_unreachable (); break; } cgraph_call_function_insertion_hooks (node); } return output; }