/* Create clone of E in the node N represented by CALL_EXPR the callgraph. */ struct cgraph_edge * cgraph_clone_edge (struct cgraph_edge *e, struct cgraph_node *n, gimple call_stmt, unsigned stmt_uid, gcov_type count_scale, int freq_scale, bool update_original) { struct cgraph_edge *new_edge; gcov_type count = apply_probability (e->count, count_scale); gcov_type freq; /* We do not want to ignore loop nest after frequency drops to 0. */ if (!freq_scale) freq_scale = 1; freq = e->frequency * (gcov_type) freq_scale / CGRAPH_FREQ_BASE; if (freq > CGRAPH_FREQ_MAX) freq = CGRAPH_FREQ_MAX; if (e->indirect_unknown_callee) { tree decl; if (call_stmt && (decl = gimple_call_fndecl (call_stmt))) { struct cgraph_node *callee = cgraph_get_node (decl); gcc_checking_assert (callee); new_edge = cgraph_create_edge (n, callee, call_stmt, count, freq); } else { new_edge = cgraph_create_indirect_edge (n, call_stmt, e->indirect_info->ecf_flags, count, freq); *new_edge->indirect_info = *e->indirect_info; } } else { new_edge = cgraph_create_edge (n, e->callee, call_stmt, count, freq); if (e->indirect_info) { new_edge->indirect_info = ggc_alloc_cleared_cgraph_indirect_call_info (); *new_edge->indirect_info = *e->indirect_info; } } new_edge->inline_failed = e->inline_failed; new_edge->indirect_inlining_edge = e->indirect_inlining_edge; new_edge->lto_stmt_uid = stmt_uid; /* Clone flags that depend on call_stmt availability manually. */ new_edge->can_throw_external = e->can_throw_external; new_edge->call_stmt_cannot_inline_p = e->call_stmt_cannot_inline_p; if (update_original) { e->count -= new_edge->count; if (e->count < 0) e->count = 0; } cgraph_call_edge_duplication_hooks (e, new_edge); return new_edge; }
unsigned int rebuild_cgraph_edges (void) { basic_block bb; struct cgraph_node *node = cgraph_get_node (current_function_decl); gimple_stmt_iterator gsi; cgraph_node_remove_callees (node); ipa_remove_all_references (&node->ref_list); node->count = ENTRY_BLOCK_PTR->count; FOR_EACH_BB (bb) { for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) { gimple stmt = gsi_stmt (gsi); tree decl; if (is_gimple_call (stmt)) { int freq = compute_call_stmt_bb_frequency (current_function_decl, bb); decl = gimple_call_fndecl (stmt); if (decl) cgraph_create_edge (node, cgraph_get_create_node (decl), stmt, bb->count, freq); else cgraph_create_indirect_edge (node, stmt, gimple_call_flags (stmt), bb->count, freq); } walk_stmt_load_store_addr_ops (stmt, node, mark_load, mark_store, mark_address); } for (gsi = gsi_start (phi_nodes (bb)); !gsi_end_p (gsi); gsi_next (&gsi)) walk_stmt_load_store_addr_ops (gsi_stmt (gsi), node, mark_load, mark_store, mark_address); } record_eh_tables (node, cfun); gcc_assert (!node->global.inlined_to); return 0; }
static void stackleak_add_instrumentation(gimple_stmt_iterator *gsi) { gimple stmt; gcall *track_stack; cgraph_node_ptr node; int frequency; basic_block bb; // insert call to void pax_track_stack(void) stmt = gimple_build_call(track_function_decl, 0); track_stack = as_a_gcall(stmt); gsi_insert_after(gsi, track_stack, GSI_CONTINUE_LINKING); // update the cgraph bb = gimple_bb(track_stack); node = cgraph_get_create_node(track_function_decl); gcc_assert(node); frequency = compute_call_stmt_bb_frequency(current_function_decl, bb); cgraph_create_edge(cgraph_get_node(current_function_decl), node, track_stack, bb->count, frequency, bb->loop_depth); }
static void stackleak_check_alloca(gimple_stmt_iterator *gsi) { gimple stmt; gcall *check_alloca; tree alloca_size; cgraph_node_ptr node; int frequency; basic_block bb; // insert call to void pax_check_alloca(unsigned long size) alloca_size = gimple_call_arg(gsi_stmt(*gsi), 0); stmt = gimple_build_call(check_function_decl, 1, alloca_size); check_alloca = as_a_gcall(stmt); gsi_insert_before(gsi, check_alloca, GSI_SAME_STMT); // update the cgraph bb = gimple_bb(check_alloca); node = cgraph_get_create_node(check_function_decl); gcc_assert(node); frequency = compute_call_stmt_bb_frequency(current_function_decl, bb); cgraph_create_edge(cgraph_get_node(current_function_decl), node, check_alloca, bb->count, frequency, bb->loop_depth); }
static unsigned int build_cgraph_edges (void) { basic_block bb; struct cgraph_node *node = cgraph_get_node (current_function_decl); struct pointer_set_t *visited_nodes = pointer_set_create (); gimple_stmt_iterator gsi; tree decl; unsigned ix; /* Create the callgraph edges and record the nodes referenced by the function. body. */ FOR_EACH_BB_FN (bb, cfun) { for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) { gimple stmt = gsi_stmt (gsi); tree decl; if (is_gimple_debug (stmt)) continue; if (is_gimple_call (stmt)) { int freq = compute_call_stmt_bb_frequency (current_function_decl, bb); decl = gimple_call_fndecl (stmt); if (decl) cgraph_create_edge (node, cgraph_get_create_node (decl), stmt, bb->count, freq); else if (gimple_call_internal_p (stmt)) ; else cgraph_create_indirect_edge (node, stmt, gimple_call_flags (stmt), bb->count, freq); } ipa_record_stmt_references (node, stmt); if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL && gimple_omp_parallel_child_fn (stmt)) { tree fn = gimple_omp_parallel_child_fn (stmt); ipa_record_reference (node, cgraph_get_create_node (fn), IPA_REF_ADDR, stmt); } if (gimple_code (stmt) == GIMPLE_OMP_TASK) { tree fn = gimple_omp_task_child_fn (stmt); if (fn) ipa_record_reference (node, cgraph_get_create_node (fn), IPA_REF_ADDR, stmt); fn = gimple_omp_task_copy_fn (stmt); if (fn) ipa_record_reference (node, cgraph_get_create_node (fn), IPA_REF_ADDR, stmt); } } for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi)) ipa_record_stmt_references (node, gsi_stmt (gsi)); } /* Look for initializers of constant variables and private statics. */ FOR_EACH_LOCAL_DECL (cfun, ix, decl) if (TREE_CODE (decl) == VAR_DECL && (TREE_STATIC (decl) && !DECL_EXTERNAL (decl)) && !DECL_HAS_VALUE_EXPR_P (decl)) varpool_finalize_decl (decl); record_eh_tables (node, cfun); pointer_set_destroy (visited_nodes); return 0; }