void cli_out_data_ctor (cli_out_data *self, struct ui_file *stream) { gdb_assert (stream != NULL); self->streams = NULL; VEC_safe_push (ui_filep, self->streams, stream); self->suppress_output = 0; }
static void add_ssa_edge (tree var, bool is_varying) { imm_use_iterator iter; use_operand_p use_p; FOR_EACH_IMM_USE_FAST (use_p, iter, var) { gimple use_stmt = USE_STMT (use_p); if (prop_simulate_again_p (use_stmt) && !gimple_plf (use_stmt, STMT_IN_SSA_EDGE_WORKLIST)) { gimple_set_plf (use_stmt, STMT_IN_SSA_EDGE_WORKLIST, true); if (is_varying) VEC_safe_push (gimple, gc, varying_ssa_edges, use_stmt); else VEC_safe_push (gimple, gc, interesting_ssa_edges, use_stmt); } }
static void add_ssa_edge (tree var, bool is_varying) { imm_use_iterator iter; use_operand_p use_p; FOR_EACH_IMM_USE_FAST (use_p, iter, var) { tree use_stmt = USE_STMT (use_p); if (!DONT_SIMULATE_AGAIN (use_stmt) && !STMT_IN_SSA_EDGE_WORKLIST (use_stmt)) { STMT_IN_SSA_EDGE_WORKLIST (use_stmt) = 1; if (is_varying) VEC_safe_push (tree, gc, varying_ssa_edges, use_stmt); else VEC_safe_push (tree, gc, interesting_ssa_edges, use_stmt); } }
static void ftrace_update_insns (struct btrace_function *bfun, CORE_ADDR pc) { struct btrace_insn *insn; insn = VEC_safe_push (btrace_insn_s, bfun->insn, NULL); insn->pc = pc; if (record_debug > 1) ftrace_debug (bfun, "update insn"); }
void * vec_stack_p_reserve_exact_1 (int alloc, void *space) { struct vec_prefix *pfx = (struct vec_prefix *) space; VEC_safe_push (void_p, heap, stack_vecs, space); pfx->num = 0; pfx->alloc = alloc; return space; }
static int cli_redirect (struct ui_out *uiout, struct ui_file *outstream) { cli_out_data *data = ui_out_data (uiout); if (outstream != NULL) VEC_safe_push (ui_filep, data->streams, outstream); else VEC_pop (ui_filep, data->streams); return 0; }
static void streamer_tree_cache_add_to_node_array (struct streamer_tree_cache_d *cache, unsigned ix, tree t) { /* Make sure we're either replacing an old element or appending consecutively. */ gcc_assert (ix <= VEC_length (tree, cache->nodes)); if (ix == VEC_length (tree, cache->nodes)) VEC_safe_push (tree, heap, cache->nodes, t); else VEC_replace (tree, cache->nodes, ix, t); }
static void insert_call(gimple stmt, tree decl) { gimple call; gimple_stmt_iterator gsi; /* Call the function */ call = gimple_build_call(decl, 0); gsi = gsi_for_stmt(stmt); gsi_insert_before(&gsi, call, GSI_NEW_STMT); /* So we don't process this bad-boy */ VEC_safe_push(tree, gc, analyized_fns, decl); }
void gdbscm_parse_function_args (const char *func_name, int beginning_arg_pos, const SCM *keywords, const char *format, ...) { va_list args; const char *p; int i, have_rest, num_keywords, length, position; int have_optional = 0; SCM status; SCM rest = SCM_EOL; /* Keep track of malloc'd strings. We need to free them upon error. */ VEC (char_ptr) *allocated_strings = NULL; char *ptr; have_rest = validate_arg_format (format); num_keywords = count_keywords (keywords); va_start (args, format); p = format; position = beginning_arg_pos; /* Process required, optional arguments. */ while (*p && *p != '#' && *p != '.') { SCM arg; void *arg_ptr; if (*p == '|') { have_optional = 1; ++p; continue; } arg = va_arg (args, SCM); if (!have_optional || !SCM_UNBNDP (arg)) { arg_ptr = va_arg (args, void *); status = extract_arg (*p, arg, arg_ptr, func_name, position); if (!gdbscm_is_false (status)) goto fail; if (*p == 's') VEC_safe_push (char_ptr, allocated_strings, *(char **) arg_ptr); } ++p; ++position; }
tree make_ssa_name (tree var, tree stmt) { tree t; use_operand_p imm; gcc_assert (DECL_P (var) || TREE_CODE (var) == INDIRECT_REF); gcc_assert (!stmt || EXPR_P (stmt) || GIMPLE_STMT_P (stmt) || TREE_CODE (stmt) == PHI_NODE); /* If our free list has an element, then use it. */ if (FREE_SSANAMES (cfun)) { t = FREE_SSANAMES (cfun); FREE_SSANAMES (cfun) = TREE_CHAIN (FREE_SSANAMES (cfun)); #ifdef GATHER_STATISTICS ssa_name_nodes_reused++; #endif /* The node was cleared out when we put it on the free list, so there is no need to do so again here. */ gcc_assert (ssa_name (SSA_NAME_VERSION (t)) == NULL); VEC_replace (tree, SSANAMES (cfun), SSA_NAME_VERSION (t), t); } else { t = make_node (SSA_NAME); SSA_NAME_VERSION (t) = num_ssa_names; VEC_safe_push (tree, gc, SSANAMES (cfun), t); #ifdef GATHER_STATISTICS ssa_name_nodes_created++; #endif } TREE_TYPE (t) = TREE_TYPE (var); SSA_NAME_VAR (t) = var; SSA_NAME_DEF_STMT (t) = stmt; SSA_NAME_PTR_INFO (t) = NULL; SSA_NAME_IN_FREE_LIST (t) = 0; SSA_NAME_IS_DEFAULT_DEF (t) = 0; imm = &(SSA_NAME_IMM_USE_NODE (t)); imm->use = NULL; imm->prev = imm; imm->next = imm; imm->stmt = t; return t; }
static void process_defs (df_ref *def_rec, int top_flag) { df_ref def; while ((def = *def_rec++) != NULL) { df_ref curr_def = VEC_index (df_ref, reg_defs, DF_REF_REGNO (def)); unsigned int dregno; if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) != top_flag) continue; dregno = DF_REF_REGNO (def); if (curr_def) VEC_safe_push (df_ref, heap, reg_defs_stack, curr_def); else { /* Do not store anything if "transitioning" from NULL to NULL. But otherwise, push a special entry on the stack to tell the leave_block callback that the entry in reg_defs was NULL. */ if (DF_REF_FLAGS (def) & DF_MD_GEN_FLAGS) ; else VEC_safe_push (df_ref, heap, reg_defs_stack, def); } if (DF_REF_FLAGS (def) & DF_MD_GEN_FLAGS) { bitmap_set_bit (local_md, dregno); VEC_replace (df_ref, reg_defs, dregno, NULL); } else { bitmap_clear_bit (local_md, dregno); VEC_replace (df_ref, reg_defs, dregno, def); } } }
static void record_cdtor_fn (tree fndecl) { struct cgraph_node *node; if (targetm.have_ctors_dtors || (!DECL_STATIC_CONSTRUCTOR (fndecl) && !DECL_STATIC_DESTRUCTOR (fndecl))) return; if (DECL_STATIC_CONSTRUCTOR (fndecl)) { VEC_safe_push (tree, gc, static_ctors, fndecl); DECL_STATIC_CONSTRUCTOR (fndecl) = 0; } if (DECL_STATIC_DESTRUCTOR (fndecl)) { VEC_safe_push (tree, gc, static_dtors, fndecl); DECL_STATIC_DESTRUCTOR (fndecl) = 0; } node = cgraph_node (fndecl); node->local.disregard_inline_limits = 1; cgraph_mark_reachable_node (node); }
static VEC (tree, gc) * copy_vuses_from_stmt (tree stmt) { ssa_op_iter iter; tree vuse; VEC (tree, gc) *vuses = NULL; if (!stmt) return NULL; FOR_EACH_SSA_TREE_OPERAND (vuse, stmt, iter, SSA_OP_VUSE) VEC_safe_push (tree, gc, vuses, vuse); return vuses; }
void mudflap_enqueue_decl (tree obj) { if (mf_marked_p (obj)) return; /* We don't need to process variable decls that are internally generated extern. If we did, we'd end up with warnings for them during mudflap_finish_file (). That would confuse the user, since the text would refer to variables that don't show up in the user's source code. */ if (DECL_P (obj) && DECL_EXTERNAL (obj) && DECL_ARTIFICIAL (obj)) return; VEC_safe_push (tree, gc, deferred_static_decls, obj); }
void lto_record_function_out_decl_state (tree fn_decl, struct lto_out_decl_state *state) { int i; /* Strip all hash tables to save some memory. */ for (i = 0; i < LTO_N_DECL_STREAMS; i++) if (state->streams[i].tree_hash_table) { htab_delete (state->streams[i].tree_hash_table); state->streams[i].tree_hash_table = NULL; } state->fn_decl = fn_decl; VEC_safe_push (lto_out_decl_state_ptr, heap, lto_function_decl_states, state); }
tree make_ssa_name_fn (struct function *fn, tree var, gimple stmt) { tree t; use_operand_p imm; gcc_assert (DECL_P (var)); /* If our free list has an element, then use it. */ if (FREE_SSANAMES (fn)) { t = FREE_SSANAMES (fn); FREE_SSANAMES (fn) = TREE_CHAIN (FREE_SSANAMES (fn)); #ifdef GATHER_STATISTICS ssa_name_nodes_reused++; #endif /* The node was cleared out when we put it on the free list, so there is no need to do so again here. */ gcc_assert (ssa_name (SSA_NAME_VERSION (t)) == NULL); VEC_replace (tree, SSANAMES (fn), SSA_NAME_VERSION (t), t); } else { t = make_node (SSA_NAME); SSA_NAME_VERSION (t) = VEC_length (tree, SSANAMES (fn)); VEC_safe_push (tree, gc, SSANAMES (fn), t); #ifdef GATHER_STATISTICS ssa_name_nodes_created++; #endif } TREE_TYPE (t) = TREE_TYPE (var); SSA_NAME_VAR (t) = var; SSA_NAME_DEF_STMT (t) = stmt; SSA_NAME_PTR_INFO (t) = NULL; SSA_NAME_IN_FREE_LIST (t) = 0; SSA_NAME_IS_DEFAULT_DEF (t) = 0; imm = &(SSA_NAME_IMM_USE_NODE (t)); imm->use = NULL; imm->prev = imm; imm->next = imm; imm->loc.ssa_name = t; return t; }
/* Insert call to __slimer_init to initalize things at runtime */ static void insert_slimer_init(void) { int i; gimple stmt; tree decl, proto; proto = build_function_type_list(void_type_node, integer_type_node, NULL_TREE); decl = build_fn_decl("__slimer_init", proto); stmt = gsi_stmt(gsi_start_bb(ENTRY_BLOCK_PTR->next_bb)); insert_call(stmt, decl); for (i=0; i<n_funcs; ++i) insert_add_fn(stmt, i); /* Add this fndecl to our list of things we do not process */ VEC_safe_push(tree, gc, analyized_fns, decl); }
static void lto_symtab_merge_decls_2 (void **slot, bool diagnosed_p) { lto_symtab_entry_t prevailing, e; VEC(tree, heap) *mismatches = NULL; unsigned i; tree decl; /* Nothing to do for a single entry. */ prevailing = (lto_symtab_entry_t) *slot; if (!prevailing->next) return; /* Try to merge each entry with the prevailing one. */ for (e = prevailing->next; e; e = e->next) { if (!lto_symtab_merge (prevailing, e) && !diagnosed_p) VEC_safe_push (tree, heap, mismatches, e->decl); } if (VEC_empty (tree, mismatches)) return; /* Diagnose all mismatched re-declarations. */ FOR_EACH_VEC_ELT (tree, mismatches, i, decl) { if (!types_compatible_p (TREE_TYPE (prevailing->decl), TREE_TYPE (decl))) diagnosed_p |= warning_at (DECL_SOURCE_LOCATION (decl), 0, "type of %qD does not match original " "declaration", decl); else if ((DECL_USER_ALIGN (prevailing->decl) && DECL_USER_ALIGN (decl)) && DECL_ALIGN (prevailing->decl) < DECL_ALIGN (decl)) { diagnosed_p |= warning_at (DECL_SOURCE_LOCATION (decl), 0, "alignment of %qD is bigger than " "original declaration", decl); } } if (diagnosed_p) inform (DECL_SOURCE_LOCATION (prevailing->decl), "previously declared here"); VEC_free (tree, heap, mismatches); }
void new_poly_dr (poly_bb_p pbb, int dr_base_object_set, ppl_Pointset_Powerset_C_Polyhedron_t accesses, enum poly_dr_type type, void *cdr, graphite_dim_t nb_subscripts) { static int id = 0; poly_dr_p pdr = XNEW (struct poly_dr); PDR_ID (pdr) = id++; PDR_BASE_OBJECT_SET (pdr) = dr_base_object_set; PDR_NB_REFS (pdr) = 1; PDR_PBB (pdr) = pbb; PDR_ACCESSES (pdr) = accesses; PDR_TYPE (pdr) = type; PDR_CDR (pdr) = cdr; PDR_NB_SUBSCRIPTS (pdr) = nb_subscripts; VEC_safe_push (poly_dr_p, heap, PBB_DRS (pbb), pdr); }
static VEC (tree, gc) * shared_vuses_from_stmt (tree stmt) { ssa_op_iter iter; tree vuse; if (!stmt) return NULL; VEC_truncate (tree, shared_lookup_vuses, 0); FOR_EACH_SSA_TREE_OPERAND (vuse, stmt, iter, SSA_OP_VUSE) VEC_safe_push (tree, gc, shared_lookup_vuses, vuse); if (VEC_length (tree, shared_lookup_vuses) > 1) sort_vuses (shared_lookup_vuses); return shared_lookup_vuses; }
void release_phi_node (gimple phi) { size_t bucket; size_t len = gimple_phi_capacity (phi); size_t x; for (x = 0; x < gimple_phi_num_args (phi); x++) { use_operand_p imm; imm = gimple_phi_arg_imm_use_ptr (phi, x); delink_imm_use (imm); } bucket = len > NUM_BUCKETS - 1 ? NUM_BUCKETS - 1 : len; bucket -= 2; VEC_safe_push (gimple, gc, free_phinodes[bucket], phi); free_phinode_count++; }
void compile_resource_data (const char *name, const char *buffer, int length) { tree rtype, field = NULL_TREE, data_type, rinit, data, decl; VEC(constructor_elt,gc) *v = NULL; data_type = build_prim_array_type (unsigned_byte_type_node, strlen (name) + length); rtype = make_node (RECORD_TYPE); PUSH_FIELD (input_location, rtype, field, "name_length", unsigned_int_type_node); PUSH_FIELD (input_location, rtype, field, "resource_length", unsigned_int_type_node); PUSH_FIELD (input_location, rtype, field, "data", data_type); FINISH_RECORD (rtype); START_RECORD_CONSTRUCTOR (v, rtype); PUSH_FIELD_VALUE (v, "name_length", build_int_cst (NULL_TREE, strlen (name))); PUSH_FIELD_VALUE (v, "resource_length", build_int_cst (NULL_TREE, length)); data = build_string (strlen(name) + length, buffer); TREE_TYPE (data) = data_type; PUSH_FIELD_VALUE (v, "data", data); FINISH_RECORD_CONSTRUCTOR (rinit, v, rtype); TREE_CONSTANT (rinit) = 1; decl = build_decl (input_location, VAR_DECL, java_mangle_resource_name (name), rtype); TREE_STATIC (decl) = 1; TREE_PUBLIC (decl) = 1; java_hide_decl (decl); DECL_ARTIFICIAL (decl) = 1; DECL_IGNORED_P (decl) = 1; TREE_READONLY (decl) = 1; TREE_THIS_VOLATILE (decl) = 0; DECL_INITIAL (decl) = rinit; layout_decl (decl, 0); pushdecl (decl); rest_of_decl_compilation (decl, global_bindings_p (), 0); varpool_finalize_decl (decl); VEC_safe_push (tree, gc, resources, decl); }
static unsigned int slimer_exec(void) { basic_block bb; gimple stmt; gimple_stmt_iterator gsi; if (has_been_processed(cfun->decl)) return 0; if (DECL_EXTERNAL(cfun->decl)) return 0; if (get_identifier(get_name(cfun->decl)) == get_identifier("main")) insert_slimer_init(); /* Go through the basic blocks of this function */ FOR_EACH_BB(bb) for (gsi=gsi_start_bb(bb); !gsi_end_p(gsi); gsi_next(&gsi)) { stmt = gsi_stmt(gsi); if (is_gimple_call(stmt) || is_gimple_assign(stmt)) { /* If its a call to a function we added already (junk or some * initlization functions), or a function we have previously * analyized, avoid inserting junk data. */ if (is_gimple_call(stmt) && !has_been_processed(gimple_call_fn(stmt))) continue; else if ((max_calls > 0) && ((rand() % 2) == 0)) { insert_call_to_junk_fn(stmt); --max_calls; } } } /* Mark as being analyized so we avoid trying to junkify it again */ VEC_safe_push(tree, gc, analyized_fns, cfun->decl); return 0; }
void update_stmt_operands (gimple stmt) { /* If update_stmt_operands is called before SSA is initialized, do nothing. */ if (!ssa_operands_active ()) return; timevar_push (TV_TREE_OPS); /* If the stmt is a noreturn call queue it to be processed by split_bbs_on_noreturn_calls during cfg cleanup. */ if (is_gimple_call (stmt) && gimple_call_noreturn_p (stmt)) VEC_safe_push (gimple, gc, MODIFIED_NORETURN_CALLS (cfun), stmt); gcc_assert (gimple_modified_p (stmt)); build_ssa_operands (stmt); gimple_set_modified (stmt, false); timevar_pop (TV_TREE_OPS); }
static bool ipcp_add_param_type (struct ipa_node_params *callee_info, int i, tree binfo) { int j, count; if (ipa_param_cannot_devirtualize_p (callee_info, i)) return false; if (callee_info->params[i].types) { count = VEC_length (tree, callee_info->params[i].types); for (j = 0; j < count; j++) if (VEC_index (tree, callee_info->params[i].types, j) == binfo) return false; } if (VEC_length (tree, callee_info->params[i].types) == (unsigned) PARAM_VALUE (PARAM_DEVIRT_TYPE_LIST_SIZE)) return !ipa_set_param_cannot_devirtualize (callee_info, i); VEC_safe_push (tree, heap, callee_info->params[i].types, binfo); return true; }
static bool insn_prefetch_values_to_profile (rtx insn, histogram_values *values) { rtx mem, address; int write; histogram_value hist; /* It only makes sense to look for memory references in ordinary insns. */ if (GET_CODE (insn) != INSN) return false; if (!find_mem_reference (insn, &mem, &write)) return false; address = XEXP (mem, 0); if (side_effects_p (address)) return false; /* APPLE LOCAL begin should be in FSF, and has been submitted. */ if (GET_CODE (PATTERN (insn)) == CLOBBER) return false; /* APPLE LOCAL end should be in FSF, and has been submitted. */ if (CONSTANT_P (address)) return false; hist = ggc_alloc (sizeof (*hist)); hist->value = address; hist->mode = GET_MODE (address); hist->seq = NULL_RTX; hist->insn = insn; hist->type = HIST_TYPE_CONST_DELTA; VEC_safe_push (histogram_value, *values, hist); return true; }
/* Insert a call to the runtime function "__slimer_add_fn" which will add the * "junk" function created at compile-time to an array at runtime */ static void insert_add_fn(gimple stmt, int index) { tree fn; gimple call; gimple_stmt_iterator gsi; static tree decl, proto, idx; if (!decl || !proto) { proto = build_function_type_list(void_type_node, ptr_type_node, integer_type_node, NULL_TREE); decl = build_fn_decl("__slimer_add_fn", proto); /* Add this fndecl to our list of things we do not process */ VEC_safe_push(tree, gc, analyized_fns, decl); } /* Create a constant value and pointer to the function we are to add */ idx = build_int_cst(integer_type_node, index); fn = build_addr(VEC_index(tree, fakes, index), NULL_TREE); call = gimple_build_call(decl, 2, fn, idx); gsi = gsi_for_stmt(stmt); gsi_insert_before(&gsi, call, GSI_NEW_STMT); }
void release_ssa_name (tree var) { if (!var) return; /* Never release the default definition for a symbol. It's a special SSA name that should always exist once it's created. */ if (SSA_NAME_IS_DEFAULT_DEF (var)) return; /* If VAR has been registered for SSA updating, don't remove it. After update_ssa has run, the name will be released. */ if (name_registered_for_update_p (var)) { release_ssa_name_after_update_ssa (var); return; } /* release_ssa_name can be called multiple times on a single SSA_NAME. However, it should only end up on our free list one time. We keep a status bit in the SSA_NAME node itself to indicate it has been put on the free list. Note that once on the freelist you can not reference the SSA_NAME's defining statement. */ if (! SSA_NAME_IN_FREE_LIST (var)) { tree saved_ssa_name_var = SSA_NAME_VAR (var); int saved_ssa_name_version = SSA_NAME_VERSION (var); use_operand_p imm = &(SSA_NAME_IMM_USE_NODE (var)); if (MAY_HAVE_DEBUG_STMTS) insert_debug_temp_for_var_def (NULL, var); #ifdef ENABLE_CHECKING verify_imm_links (stderr, var); #endif while (imm->next != imm) delink_imm_use (imm->next); VEC_replace (tree, SSANAMES (cfun), SSA_NAME_VERSION (var), NULL_TREE); memset (var, 0, tree_size (var)); imm->prev = imm; imm->next = imm; imm->loc.ssa_name = var; /* First put back the right tree node so that the tree checking macros do not complain. */ TREE_SET_CODE (var, SSA_NAME); /* Restore the version number. */ SSA_NAME_VERSION (var) = saved_ssa_name_version; /* Hopefully this can go away once we have the new incremental SSA updating code installed. */ SSA_NAME_VAR (var) = saved_ssa_name_var; /* Note this SSA_NAME is now in the first list. */ SSA_NAME_IN_FREE_LIST (var) = 1; /* And finally put it on the free list. */ VEC_safe_push (tree, gc, FREE_SSANAMES (cfun), var); } }
void walk_dominator_tree (struct dom_walk_data *walk_data, basic_block bb) { void *bd = NULL; basic_block dest; block_stmt_iterator bsi; bool is_interesting; basic_block *worklist = XNEWVEC (basic_block, n_basic_blocks * 2); int sp = 0; while (true) { /* Don't worry about unreachable blocks. */ if (EDGE_COUNT (bb->preds) > 0 || bb == ENTRY_BLOCK_PTR || bb == EXIT_BLOCK_PTR) { /* If block BB is not interesting to the caller, then none of the callbacks that walk the statements in BB are going to be executed. */ is_interesting = walk_data->interesting_blocks == NULL || TEST_BIT (walk_data->interesting_blocks, bb->index); /* Callback to initialize the local data structure. */ if (walk_data->initialize_block_local_data) { bool recycled; /* First get some local data, reusing any local data pointer we may have saved. */ if (VEC_length (void_p, walk_data->free_block_data) > 0) { bd = VEC_pop (void_p, walk_data->free_block_data); recycled = 1; } else { bd = xcalloc (1, walk_data->block_local_data_size); recycled = 0; } /* Push the local data into the local data stack. */ VEC_safe_push (void_p, heap, walk_data->block_data_stack, bd); /* Call the initializer. */ walk_data->initialize_block_local_data (walk_data, bb, recycled); } /* Callback for operations to execute before we have walked the dominator children, but before we walk statements. */ if (walk_data->before_dom_children_before_stmts) (*walk_data->before_dom_children_before_stmts) (walk_data, bb); /* Statement walk before walking dominator children. */ if (is_interesting && walk_data->before_dom_children_walk_stmts) { if (walk_data->walk_stmts_backward) for (bsi = bsi_last (bb); !bsi_end_p (bsi); bsi_prev (&bsi)) (*walk_data->before_dom_children_walk_stmts) (walk_data, bb, bsi); else for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi)) (*walk_data->before_dom_children_walk_stmts) (walk_data, bb, bsi); } /* Callback for operations to execute before we have walked the dominator children, and after we walk statements. */ if (walk_data->before_dom_children_after_stmts) (*walk_data->before_dom_children_after_stmts) (walk_data, bb); /* Mark the current BB to be popped out of the recursion stack once childs are processed. */ worklist[sp++] = bb; worklist[sp++] = NULL; for (dest = first_dom_son (walk_data->dom_direction, bb); dest; dest = next_dom_son (walk_data->dom_direction, dest)) worklist[sp++] = dest; } /* NULL is used to signalize pop operation in recursion stack. */ while (sp > 0 && !worklist[sp - 1]) { --sp; bb = worklist[--sp]; is_interesting = walk_data->interesting_blocks == NULL || TEST_BIT (walk_data->interesting_blocks, bb->index); /* Callback for operations to execute after we have walked the dominator children, but before we walk statements. */ if (walk_data->after_dom_children_before_stmts) (*walk_data->after_dom_children_before_stmts) (walk_data, bb); /* Statement walk after walking dominator children. */ if (is_interesting && walk_data->after_dom_children_walk_stmts) { if (walk_data->walk_stmts_backward) for (bsi = bsi_last (bb); !bsi_end_p (bsi); bsi_prev (&bsi)) (*walk_data->after_dom_children_walk_stmts) (walk_data, bb, bsi); else for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi)) (*walk_data->after_dom_children_walk_stmts) (walk_data, bb, bsi); } /* Callback for operations to execute after we have walked the dominator children and after we have walked statements. */ if (walk_data->after_dom_children_after_stmts) (*walk_data->after_dom_children_after_stmts) (walk_data, bb); if (walk_data->initialize_block_local_data) { /* And finally pop the record off the block local data stack. */ bd = VEC_pop (void_p, walk_data->block_data_stack); /* And save the block data so that we can re-use it. */ VEC_safe_push (void_p, heap, walk_data->free_block_data, bd); } } if (sp) bb = worklist[--sp]; else break; } free (worklist); }
/* Propagate the constant parameters found by ipcp_iterate_stage() to the function's code. */ static void ipcp_insert_stage (void) { struct cgraph_node *node, *node1 = NULL; int i; VEC (cgraph_edge_p, heap) * redirect_callers; VEC (ipa_replace_map_p,gc)* replace_trees; int node_callers, count; tree parm_tree; struct ipa_replace_map *replace_param; fibheap_t heap; long overall_size = 0, new_size = 0; long max_new_size; ipa_check_create_node_params (); ipa_check_create_edge_args (); if (dump_file) fprintf (dump_file, "\nIPA insert stage:\n\n"); dead_nodes = BITMAP_ALLOC (NULL); for (node = cgraph_nodes; node; node = node->next) if (node->analyzed) { if (node->count > max_count) max_count = node->count; overall_size += node->local.inline_summary.self_size; } max_new_size = overall_size; if (max_new_size < PARAM_VALUE (PARAM_LARGE_UNIT_INSNS)) max_new_size = PARAM_VALUE (PARAM_LARGE_UNIT_INSNS); max_new_size = max_new_size * PARAM_VALUE (PARAM_IPCP_UNIT_GROWTH) / 100 + 1; /* First collect all functions we proved to have constant arguments to heap. */ heap = fibheap_new (); for (node = cgraph_nodes; node; node = node->next) { struct ipa_node_params *info; /* Propagation of the constant is forbidden in certain conditions. */ if (!node->analyzed || !ipcp_node_modifiable_p (node)) continue; info = IPA_NODE_REF (node); if (ipa_is_called_with_var_arguments (info)) continue; if (ipcp_const_param_count (node)) node->aux = fibheap_insert (heap, ipcp_estimate_cloning_cost (node), node); } /* Now clone in priority order until code size growth limits are met or heap is emptied. */ while (!fibheap_empty (heap)) { struct ipa_node_params *info; int growth = 0; bitmap args_to_skip; struct cgraph_edge *cs; node = (struct cgraph_node *)fibheap_extract_min (heap); node->aux = NULL; if (dump_file) fprintf (dump_file, "considering function %s\n", cgraph_node_name (node)); growth = ipcp_estimate_growth (node); if (new_size + growth > max_new_size) break; if (growth && optimize_function_for_size_p (DECL_STRUCT_FUNCTION (node->decl))) { if (dump_file) fprintf (dump_file, "Not versioning, cold code would grow"); continue; } info = IPA_NODE_REF (node); count = ipa_get_param_count (info); replace_trees = VEC_alloc (ipa_replace_map_p, gc, 1); if (node->local.can_change_signature) args_to_skip = BITMAP_GGC_ALLOC (); else args_to_skip = NULL; for (i = 0; i < count; i++) { struct ipcp_lattice *lat = ipcp_get_lattice (info, i); parm_tree = ipa_get_param (info, i); /* We can proactively remove obviously unused arguments. */ if (!ipa_is_param_used (info, i)) { if (args_to_skip) bitmap_set_bit (args_to_skip, i); continue; } if (lat->type == IPA_CONST_VALUE) { replace_param = ipcp_create_replace_map (parm_tree, lat); if (replace_param == NULL) break; VEC_safe_push (ipa_replace_map_p, gc, replace_trees, replace_param); if (args_to_skip) bitmap_set_bit (args_to_skip, i); } } if (i < count) { if (dump_file) fprintf (dump_file, "Not versioning, some parameters couldn't be replaced"); continue; } new_size += growth; /* Look if original function becomes dead after cloning. */ for (cs = node->callers; cs != NULL; cs = cs->next_caller) if (cs->caller == node || ipcp_need_redirect_p (cs)) break; if (!cs && cgraph_will_be_removed_from_program_if_no_direct_calls (node)) bitmap_set_bit (dead_nodes, node->uid); /* Compute how many callers node has. */ node_callers = 0; for (cs = node->callers; cs != NULL; cs = cs->next_caller) node_callers++; redirect_callers = VEC_alloc (cgraph_edge_p, heap, node_callers); for (cs = node->callers; cs != NULL; cs = cs->next_caller) if (!cs->indirect_inlining_edge) VEC_quick_push (cgraph_edge_p, redirect_callers, cs); /* Redirecting all the callers of the node to the new versioned node. */ node1 = cgraph_create_virtual_clone (node, redirect_callers, replace_trees, args_to_skip, "constprop"); args_to_skip = NULL; VEC_free (cgraph_edge_p, heap, redirect_callers); replace_trees = NULL; if (node1 == NULL) continue; ipcp_process_devirtualization_opportunities (node1); if (dump_file) fprintf (dump_file, "versioned function %s with growth %i, overall %i\n", cgraph_node_name (node), (int)growth, (int)new_size); ipcp_init_cloned_node (node, node1); info = IPA_NODE_REF (node); for (i = 0; i < count; i++) { struct ipcp_lattice *lat = ipcp_get_lattice (info, i); if (lat->type == IPA_CONST_VALUE) ipcp_discover_new_direct_edges (node1, i, lat->constant); } if (dump_file) dump_function_to_file (node1->decl, dump_file, dump_flags); for (cs = node->callees; cs; cs = cs->next_callee) if (cs->callee->aux) { fibheap_delete_node (heap, (fibnode_t) cs->callee->aux); cs->callee->aux = fibheap_insert (heap, ipcp_estimate_cloning_cost (cs->callee), cs->callee); } } while (!fibheap_empty (heap)) { if (dump_file) fprintf (dump_file, "skipping function %s\n", cgraph_node_name (node)); node = (struct cgraph_node *) fibheap_extract_min (heap); node->aux = NULL; } fibheap_delete (heap); BITMAP_FREE (dead_nodes); ipcp_update_callgraph (); ipcp_update_profiling (); }