static bool all_immediate_uses_same_place (def_operand_p def_p) { tree var = DEF_FROM_PTR (def_p); imm_use_iterator imm_iter; use_operand_p use_p; gimple *firstuse = NULL; FOR_EACH_IMM_USE_FAST (use_p, imm_iter, var) { if (is_gimple_debug (USE_STMT (use_p))) continue; if (firstuse == NULL) firstuse = USE_STMT (use_p); else if (firstuse != USE_STMT (use_p)) return false; } return true; }
static basic_block nearest_common_dominator_of_uses (def_operand_p def_p, bool *debug_stmts) { tree var = DEF_FROM_PTR (def_p); bitmap blocks = BITMAP_ALLOC (NULL); basic_block commondom; unsigned int j; bitmap_iterator bi; imm_use_iterator imm_iter; use_operand_p use_p; FOR_EACH_IMM_USE_FAST (use_p, imm_iter, var) { gimple *usestmt = USE_STMT (use_p); basic_block useblock; if (gphi *phi = dyn_cast <gphi *> (usestmt)) { int idx = PHI_ARG_INDEX_FROM_USE (use_p); useblock = gimple_phi_arg_edge (phi, idx)->src; } else if (is_gimple_debug (usestmt)) { *debug_stmts = true; continue; } else { useblock = gimple_bb (usestmt); } /* Short circuit. Nothing dominates the entry block. */ if (useblock == ENTRY_BLOCK_PTR_FOR_FN (cfun)) { BITMAP_FREE (blocks); return NULL; } bitmap_set_bit (blocks, useblock->index); }
static unsigned int ubsan_pass (void) { basic_block bb; gimple_stmt_iterator gsi; FOR_EACH_BB_FN (bb, cfun) { for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);) { gimple stmt = gsi_stmt (gsi); if (is_gimple_debug (stmt) || gimple_clobber_p (stmt)) { gsi_next (&gsi); continue; } if ((flag_sanitize & SANITIZE_SI_OVERFLOW) && is_gimple_assign (stmt)) instrument_si_overflow (gsi); if (flag_sanitize & SANITIZE_NULL) { if (gimple_store_p (stmt)) instrument_null (gsi, true); if (gimple_assign_load_p (stmt)) instrument_null (gsi, false); } if (flag_sanitize & (SANITIZE_BOOL | SANITIZE_ENUM) && gimple_assign_load_p (stmt)) instrument_bool_enum_load (&gsi); gsi_next (&gsi); } } return 0; }
static void set_location_for_edge (edge e) { if (e->goto_locus) { set_curr_insn_source_location (e->goto_locus); set_curr_insn_block (e->goto_block); } else { basic_block bb = e->src; gimple_stmt_iterator gsi; do { for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi)) { gimple stmt = gsi_stmt (gsi); if (is_gimple_debug (stmt)) continue; if (gimple_has_location (stmt) || gimple_block (stmt)) { set_curr_insn_source_location (gimple_location (stmt)); set_curr_insn_block (gimple_block (stmt)); return; } } /* Nothing found in this basic block. Make a half-assed attempt to continue with another block. */ if (single_pred_p (bb)) bb = single_pred (bb); else bb = e->src; } while (bb != e->src); } }
static void sese_reset_debug_liveouts_bb (sese region, bitmap liveouts, basic_block bb) { gimple_stmt_iterator bsi; ssa_op_iter iter; use_operand_p use_p; for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi)) { gimple *stmt = gsi_stmt (bsi); if (!is_gimple_debug (stmt)) continue; FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_ALL_USES) if (sese_bad_liveouts_use (region, liveouts, bb, USE_FROM_PTR (use_p))) { gimple_debug_bind_reset_value (stmt); update_stmt (stmt); break; } } }
static unsigned int build_cgraph_edges (void) { basic_block bb; struct cgraph_node *node = cgraph_get_node (current_function_decl); struct pointer_set_t *visited_nodes = pointer_set_create (); gimple_stmt_iterator gsi; tree decl; unsigned ix; /* Create the callgraph edges and record the nodes referenced by the function. body. */ FOR_EACH_BB_FN (bb, cfun) { for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) { gimple stmt = gsi_stmt (gsi); tree decl; if (is_gimple_debug (stmt)) continue; if (is_gimple_call (stmt)) { int freq = compute_call_stmt_bb_frequency (current_function_decl, bb); decl = gimple_call_fndecl (stmt); if (decl) cgraph_create_edge (node, cgraph_get_create_node (decl), stmt, bb->count, freq); else if (gimple_call_internal_p (stmt)) ; else cgraph_create_indirect_edge (node, stmt, gimple_call_flags (stmt), bb->count, freq); } ipa_record_stmt_references (node, stmt); if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL && gimple_omp_parallel_child_fn (stmt)) { tree fn = gimple_omp_parallel_child_fn (stmt); ipa_record_reference (node, cgraph_get_create_node (fn), IPA_REF_ADDR, stmt); } if (gimple_code (stmt) == GIMPLE_OMP_TASK) { tree fn = gimple_omp_task_child_fn (stmt); if (fn) ipa_record_reference (node, cgraph_get_create_node (fn), IPA_REF_ADDR, stmt); fn = gimple_omp_task_copy_fn (stmt); if (fn) ipa_record_reference (node, cgraph_get_create_node (fn), IPA_REF_ADDR, stmt); } } for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi)) ipa_record_stmt_references (node, gsi_stmt (gsi)); } /* Look for initializers of constant variables and private statics. */ FOR_EACH_LOCAL_DECL (cfun, ix, decl) if (TREE_CODE (decl) == VAR_DECL && (TREE_STATIC (decl) && !DECL_EXTERNAL (decl)) && !DECL_HAS_VALUE_EXPR_P (decl)) varpool_finalize_decl (decl); record_eh_tables (node, cfun); pointer_set_destroy (visited_nodes); return 0; }
build_vuse = var; } /* Add virtual operands for STMT. FLAGS is as in get_expr_operands. */ static void add_virtual_operand (struct function *fn, gimple *stmt ATTRIBUTE_UNUSED, int flags) { /* Add virtual operands to the stmt, unless the caller has specifically requested not to do that (used when adding operands inside an ADDR_EXPR expression). */ if (flags & opf_no_vops) return; gcc_assert (!is_gimple_debug (stmt)); if (flags & opf_def) append_vdef (gimple_vop (fn)); else append_vuse (gimple_vop (fn)); } /* Add *VAR_P to the appropriate operand array for statement STMT. FLAGS is as in get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to the statement's real operands, otherwise it is added to virtual operands. */ static void add_stmt_operand (struct function *fn, tree *var_p, gimple *stmt, int flags)
/* Look into pointer pointed to by GSIP and figure out what interesting side effects it has. */ static void check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa) { gimple stmt = gsi_stmt (*gsip); if (is_gimple_debug (stmt)) return; if (dump_file) { fprintf (dump_file, " scanning: "); print_gimple_stmt (dump_file, stmt, 0, 0); } if (gimple_has_volatile_ops (stmt) && !gimple_clobber_p (stmt)) { local->pure_const_state = IPA_NEITHER; if (dump_file) fprintf (dump_file, " Volatile stmt is not const/pure\n"); } /* Look for loads and stores. */ walk_stmt_load_store_ops (stmt, local, ipa ? check_ipa_load : check_load, ipa ? check_ipa_store : check_store); if (gimple_code (stmt) != GIMPLE_CALL && stmt_could_throw_p (stmt)) { if (cfun->can_throw_non_call_exceptions) { if (dump_file) fprintf (dump_file, " can throw; looping\n"); local->looping = true; } if (stmt_can_throw_external (stmt)) { if (dump_file) fprintf (dump_file, " can throw externally\n"); local->can_throw = true; } else if (dump_file) fprintf (dump_file, " can throw\n"); } switch (gimple_code (stmt)) { case GIMPLE_CALL: check_call (local, stmt, ipa); break; case GIMPLE_LABEL: if (DECL_NONLOCAL (gimple_label_label (stmt))) /* Target of long jump. */ { if (dump_file) fprintf (dump_file, " nonlocal label is not const/pure\n"); local->pure_const_state = IPA_NEITHER; } break; case GIMPLE_ASM: if (gimple_asm_clobbers_memory_p (stmt)) { if (dump_file) fprintf (dump_file, " memory asm clobber is not const/pure\n"); /* Abandon all hope, ye who enter here. */ local->pure_const_state = IPA_NEITHER; } if (gimple_asm_volatile_p (stmt)) { if (dump_file) fprintf (dump_file, " volatile is not const/pure\n"); /* Abandon all hope, ye who enter here. */ local->pure_const_state = IPA_NEITHER; local->looping = true; } return; default: break; } }
static bool stmt_simple_for_scop_p (basic_block scop_entry, loop_p outermost_loop, gimple stmt, basic_block bb) { loop_p loop = bb->loop_father; gcc_assert (scop_entry); /* GIMPLE_ASM and GIMPLE_CALL may embed arbitrary side effects. Calls have side-effects, except those to const or pure functions. */ if (gimple_has_volatile_ops (stmt) || (gimple_code (stmt) == GIMPLE_CALL && !(gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE))) || (gimple_code (stmt) == GIMPLE_ASM)) return false; if (is_gimple_debug (stmt)) return true; if (!stmt_has_simple_data_refs_p (outermost_loop, stmt)) return false; switch (gimple_code (stmt)) { case GIMPLE_RETURN: case GIMPLE_LABEL: return true; case GIMPLE_COND: { /* We can handle all binary comparisons. Inequalities are also supported as they can be represented with union of polyhedra. */ enum tree_code code = gimple_cond_code (stmt); if (!(code == LT_EXPR || code == GT_EXPR || code == LE_EXPR || code == GE_EXPR || code == EQ_EXPR || code == NE_EXPR)) return false; for (unsigned i = 0; i < 2; ++i) { tree op = gimple_op (stmt, i); if (!graphite_can_represent_expr (scop_entry, loop, op) /* We can not handle REAL_TYPE. Failed for pr39260. */ || TREE_CODE (TREE_TYPE (op)) == REAL_TYPE) return false; } return true; } case GIMPLE_ASSIGN: case GIMPLE_CALL: return true; default: /* These nodes cut a new scope. */ return false; } return false; }
static void output_gimple_stmt (struct output_block *ob, gimple stmt) { unsigned i; enum gimple_code code; enum LTO_tags tag; struct bitpack_d bp; histogram_value hist; /* Emit identifying tag. */ code = gimple_code (stmt); tag = lto_gimple_code_to_tag (code); streamer_write_record_start (ob, tag); /* Emit the tuple header. */ bp = bitpack_create (ob->main_stream); bp_pack_var_len_unsigned (&bp, gimple_num_ops (stmt)); bp_pack_value (&bp, gimple_no_warning_p (stmt), 1); if (is_gimple_assign (stmt)) bp_pack_value (&bp, gimple_assign_nontemporal_move_p (stmt), 1); bp_pack_value (&bp, gimple_has_volatile_ops (stmt), 1); hist = gimple_histogram_value (cfun, stmt); bp_pack_value (&bp, hist != NULL, 1); bp_pack_var_len_unsigned (&bp, stmt->gsbase.subcode); /* Emit location information for the statement. */ stream_output_location (ob, &bp, LOCATION_LOCUS (gimple_location (stmt))); streamer_write_bitpack (&bp); /* Emit the lexical block holding STMT. */ stream_write_tree (ob, gimple_block (stmt), true); /* Emit the operands. */ switch (gimple_code (stmt)) { case GIMPLE_RESX: streamer_write_hwi (ob, gimple_resx_region (stmt)); break; case GIMPLE_EH_MUST_NOT_THROW: stream_write_tree (ob, gimple_eh_must_not_throw_fndecl (stmt), true); break; case GIMPLE_EH_DISPATCH: streamer_write_hwi (ob, gimple_eh_dispatch_region (stmt)); break; case GIMPLE_ASM: streamer_write_uhwi (ob, gimple_asm_ninputs (stmt)); streamer_write_uhwi (ob, gimple_asm_noutputs (stmt)); streamer_write_uhwi (ob, gimple_asm_nclobbers (stmt)); streamer_write_uhwi (ob, gimple_asm_nlabels (stmt)); streamer_write_string (ob, ob->main_stream, gimple_asm_string (stmt), true); /* Fallthru */ case GIMPLE_ASSIGN: case GIMPLE_CALL: case GIMPLE_RETURN: case GIMPLE_SWITCH: case GIMPLE_LABEL: case GIMPLE_COND: case GIMPLE_GOTO: case GIMPLE_DEBUG: for (i = 0; i < gimple_num_ops (stmt); i++) { tree op = gimple_op (stmt, i); tree *basep = NULL; /* Wrap all uses of non-automatic variables inside MEM_REFs so that we do not have to deal with type mismatches on merged symbols during IL read in. The first operand of GIMPLE_DEBUG must be a decl, not MEM_REF, though. */ if (op && (i || !is_gimple_debug (stmt))) { basep = &op; while (handled_component_p (*basep)) basep = &TREE_OPERAND (*basep, 0); if (TREE_CODE (*basep) == VAR_DECL && !auto_var_in_fn_p (*basep, current_function_decl) && !DECL_REGISTER (*basep)) { bool volatilep = TREE_THIS_VOLATILE (*basep); *basep = build2 (MEM_REF, TREE_TYPE (*basep), build_fold_addr_expr (*basep), build_int_cst (build_pointer_type (TREE_TYPE (*basep)), 0)); TREE_THIS_VOLATILE (*basep) = volatilep; } else basep = NULL; } stream_write_tree (ob, op, true); /* Restore the original base if we wrapped it inside a MEM_REF. */ if (basep) *basep = TREE_OPERAND (TREE_OPERAND (*basep, 0), 0); } if (is_gimple_call (stmt)) { if (gimple_call_internal_p (stmt)) streamer_write_enum (ob->main_stream, internal_fn, IFN_LAST, gimple_call_internal_fn (stmt)); else stream_write_tree (ob, gimple_call_fntype (stmt), true); } break; case GIMPLE_NOP: case GIMPLE_PREDICT: break; case GIMPLE_TRANSACTION: gcc_assert (gimple_transaction_body (stmt) == NULL); stream_write_tree (ob, gimple_transaction_label (stmt), true); break; default: gcc_unreachable (); } if (hist) stream_out_histogram_value (ob, hist); }
static void find_tail_calls (basic_block bb, struct tailcall **ret) { tree ass_var = NULL_TREE, ret_var, func, param; gimple stmt, call = NULL; gimple_stmt_iterator gsi, agsi; bool tail_recursion; struct tailcall *nw; edge e; tree m, a; basic_block abb; size_t idx; tree var; if (!single_succ_p (bb)) return; for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi)) { stmt = gsi_stmt (gsi); /* Ignore labels, returns, clobbers and debug stmts. */ if (gimple_code (stmt) == GIMPLE_LABEL || gimple_code (stmt) == GIMPLE_RETURN || gimple_clobber_p (stmt) || is_gimple_debug (stmt)) continue; /* Check for a call. */ if (is_gimple_call (stmt)) { call = stmt; ass_var = gimple_call_lhs (stmt); break; } /* If the statement references memory or volatile operands, fail. */ if (gimple_references_memory_p (stmt) || gimple_has_volatile_ops (stmt)) return; } if (gsi_end_p (gsi)) { edge_iterator ei; /* Recurse to the predecessors. */ FOR_EACH_EDGE (e, ei, bb->preds) find_tail_calls (e->src, ret); return; } /* If the LHS of our call is not just a simple register, we can't transform this into a tail or sibling call. This situation happens, in (e.g.) "*p = foo()" where foo returns a struct. In this case we won't have a temporary here, but we need to carry out the side effect anyway, so tailcall is impossible. ??? In some situations (when the struct is returned in memory via invisible argument) we could deal with this, e.g. by passing 'p' itself as that argument to foo, but it's too early to do this here, and expand_call() will not handle it anyway. If it ever can, then we need to revisit this here, to allow that situation. */ if (ass_var && !is_gimple_reg (ass_var)) return; /* We found the call, check whether it is suitable. */ tail_recursion = false; func = gimple_call_fndecl (call); if (func && !DECL_BUILT_IN (func) && recursive_call_p (current_function_decl, func)) { tree arg; for (param = DECL_ARGUMENTS (func), idx = 0; param && idx < gimple_call_num_args (call); param = DECL_CHAIN (param), idx ++) { arg = gimple_call_arg (call, idx); if (param != arg) { /* Make sure there are no problems with copying. The parameter have a copyable type and the two arguments must have reasonably equivalent types. The latter requirement could be relaxed if we emitted a suitable type conversion statement. */ if (!is_gimple_reg_type (TREE_TYPE (param)) || !useless_type_conversion_p (TREE_TYPE (param), TREE_TYPE (arg))) break; /* The parameter should be a real operand, so that phi node created for it at the start of the function has the meaning of copying the value. This test implies is_gimple_reg_type from the previous condition, however this one could be relaxed by being more careful with copying the new value of the parameter (emitting appropriate GIMPLE_ASSIGN and updating the virtual operands). */ if (!is_gimple_reg (param)) break; } } if (idx == gimple_call_num_args (call) && !param) tail_recursion = true; } /* Make sure the tail invocation of this function does not refer to local variables. */ FOR_EACH_LOCAL_DECL (cfun, idx, var) { if (TREE_CODE (var) != PARM_DECL && auto_var_in_fn_p (var, cfun->decl) && (ref_maybe_used_by_stmt_p (call, var) || call_may_clobber_ref_p (call, var))) return; } /* Now check the statements after the call. None of them has virtual operands, so they may only depend on the call through its return value. The return value should also be dependent on each of them, since we are running after dce. */ m = NULL_TREE; a = NULL_TREE; abb = bb; agsi = gsi; while (1) { tree tmp_a = NULL_TREE; tree tmp_m = NULL_TREE; gsi_next (&agsi); while (gsi_end_p (agsi)) { ass_var = propagate_through_phis (ass_var, single_succ_edge (abb)); abb = single_succ (abb); agsi = gsi_start_bb (abb); } stmt = gsi_stmt (agsi); if (gimple_code (stmt) == GIMPLE_LABEL) continue; if (gimple_code (stmt) == GIMPLE_RETURN) break; if (gimple_clobber_p (stmt)) continue; if (is_gimple_debug (stmt)) continue; if (gimple_code (stmt) != GIMPLE_ASSIGN) return; /* This is a gimple assign. */ if (! process_assignment (stmt, gsi, &tmp_m, &tmp_a, &ass_var)) return; if (tmp_a) { tree type = TREE_TYPE (tmp_a); if (a) a = fold_build2 (PLUS_EXPR, type, fold_convert (type, a), tmp_a); else a = tmp_a; } if (tmp_m) { tree type = TREE_TYPE (tmp_m); if (m) m = fold_build2 (MULT_EXPR, type, fold_convert (type, m), tmp_m); else m = tmp_m; if (a) a = fold_build2 (MULT_EXPR, type, fold_convert (type, a), tmp_m); } } /* See if this is a tail call we can handle. */ ret_var = gimple_return_retval (stmt); /* We may proceed if there either is no return value, or the return value is identical to the call's return. */ if (ret_var && (ret_var != ass_var)) return; /* If this is not a tail recursive call, we cannot handle addends or multiplicands. */ if (!tail_recursion && (m || a)) return; /* For pointers only allow additions. */ if (m && POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl)))) return; nw = XNEW (struct tailcall); nw->call_gsi = gsi; nw->tail_recursion = tail_recursion; nw->mult = m; nw->add = a; nw->next = *ret; *ret = nw; }
static void get_expr_operands (gimple stmt, tree *expr_p, int flags) { enum tree_code code; enum tree_code_class codeclass; tree expr = *expr_p; int uflags = opf_use; if (expr == NULL) return; if (is_gimple_debug (stmt)) uflags |= (flags & opf_no_vops); code = TREE_CODE (expr); codeclass = TREE_CODE_CLASS (code); switch (code) { case ADDR_EXPR: /* Taking the address of a variable does not represent a reference to it, but the fact that the statement takes its address will be of interest to some passes (e.g. alias resolution). */ if ((!(flags & opf_non_addressable) || (flags & opf_not_non_addressable)) && !is_gimple_debug (stmt)) mark_address_taken (TREE_OPERAND (expr, 0)); /* If the address is invariant, there may be no interesting variable references inside. */ if (is_gimple_min_invariant (expr)) return; /* Otherwise, there may be variables referenced inside but there should be no VUSEs created, since the referenced objects are not really accessed. The only operands that we should find here are ARRAY_REF indices which will always be real operands (GIMPLE does not allow non-registers as array indices). */ flags |= opf_no_vops; get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags | opf_not_non_addressable); return; case SSA_NAME: add_stmt_operand (expr_p, stmt, flags); return; case VAR_DECL: case PARM_DECL: case RESULT_DECL: add_stmt_operand (expr_p, stmt, flags); return; case DEBUG_EXPR_DECL: gcc_assert (gimple_debug_bind_p (stmt)); return; case MEM_REF: get_indirect_ref_operands (stmt, expr, flags, true); return; case TARGET_MEM_REF: get_tmr_operands (stmt, expr, flags); return; case ARRAY_REF: case ARRAY_RANGE_REF: case COMPONENT_REF: case REALPART_EXPR: case IMAGPART_EXPR: { if (TREE_THIS_VOLATILE (expr)) gimple_set_has_volatile_ops (stmt, true); get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); if (code == COMPONENT_REF) { if (TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1))) gimple_set_has_volatile_ops (stmt, true); get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags); } else if (code == ARRAY_REF || code == ARRAY_RANGE_REF) { get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags); get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags); get_expr_operands (stmt, &TREE_OPERAND (expr, 3), uflags); } return; } case WITH_SIZE_EXPR: /* WITH_SIZE_EXPR is a pass-through reference to its first argument, and an rvalue reference to its second argument. */ get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags); get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); return; case COND_EXPR: case VEC_COND_EXPR: get_expr_operands (stmt, &TREE_OPERAND (expr, 0), uflags); get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags); get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags); return; case CONSTRUCTOR: { /* General aggregate CONSTRUCTORs have been decomposed, but they are still in use as the COMPLEX_EXPR equivalent for vectors. */ constructor_elt *ce; unsigned HOST_WIDE_INT idx; for (idx = 0; VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce); idx++) get_expr_operands (stmt, &ce->value, uflags); return; } case BIT_FIELD_REF: if (TREE_THIS_VOLATILE (expr)) gimple_set_has_volatile_ops (stmt, true); /* FALLTHRU */ case VIEW_CONVERT_EXPR: do_unary: get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); return; case COMPOUND_EXPR: case OBJ_TYPE_REF: case ASSERT_EXPR: do_binary: { get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags); return; } case DOT_PROD_EXPR: case REALIGN_LOAD_EXPR: case WIDEN_MULT_PLUS_EXPR: case WIDEN_MULT_MINUS_EXPR: case FMA_EXPR: { get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags); get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags); return; } case FUNCTION_DECL: case LABEL_DECL: case CONST_DECL: case CASE_LABEL_EXPR: /* Expressions that make no memory references. */ return; default: if (codeclass == tcc_unary) goto do_unary; if (codeclass == tcc_binary || codeclass == tcc_comparison) goto do_binary; if (codeclass == tcc_constant || codeclass == tcc_type) return; } /* If we get here, something has gone wrong. */ #ifdef ENABLE_CHECKING fprintf (stderr, "unhandled expression in get_expr_operands():\n"); debug_tree (expr); fputs ("\n", stderr); #endif gcc_unreachable (); }
static bool generate_builtin (struct loop *loop, bitmap partition, bool copy_p) { bool res = false; unsigned i, x = 0; basic_block *bbs; gimple write = NULL; gimple_stmt_iterator bsi; tree nb_iter = number_of_exit_cond_executions (loop); if (!nb_iter || nb_iter == chrec_dont_know) return false; bbs = get_loop_body_in_dom_order (loop); for (i = 0; i < loop->num_nodes; i++) { basic_block bb = bbs[i]; for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi)) x++; for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi)) { gimple stmt = gsi_stmt (bsi); if (gimple_code (stmt) == GIMPLE_LABEL || is_gimple_debug (stmt)) continue; if (!bitmap_bit_p (partition, x++)) continue; /* If the stmt has uses outside of the loop fail. */ if (stmt_has_scalar_dependences_outside_loop (stmt)) goto end; if (is_gimple_assign (stmt) && !is_gimple_reg (gimple_assign_lhs (stmt))) { /* Don't generate the builtins when there are more than one memory write. */ if (write != NULL) goto end; write = stmt; if (bb == loop->latch) nb_iter = number_of_latch_executions (loop); } } } if (!stmt_with_adjacent_zero_store_dr_p (write)) goto end; /* The new statements will be placed before LOOP. */ bsi = gsi_last_bb (loop_preheader_edge (loop)->src); generate_memset_zero (write, gimple_assign_lhs (write), nb_iter, bsi); res = true; /* If this is the last partition for which we generate code, we have to destroy the loop. */ if (!copy_p) { unsigned nbbs = loop->num_nodes; edge exit = single_exit (loop); basic_block src = loop_preheader_edge (loop)->src, dest = exit->dest; redirect_edge_pred (exit, src); exit->flags &= ~(EDGE_TRUE_VALUE|EDGE_FALSE_VALUE); exit->flags |= EDGE_FALLTHRU; cancel_loop_tree (loop); rescan_loop_exit (exit, false, true); for (i = 0; i < nbbs; i++) delete_basic_block (bbs[i]); set_immediate_dominator (CDI_DOMINATORS, dest, recompute_dominator (CDI_DOMINATORS, dest)); } end: free (bbs); return res; }
static bool generate_loops_for_partition (struct loop *loop, bitmap partition, bool copy_p) { unsigned i, x; gimple_stmt_iterator bsi; basic_block *bbs; if (copy_p) { loop = copy_loop_before (loop); create_preheader (loop, CP_SIMPLE_PREHEADERS); create_bb_after_loop (loop); } if (loop == NULL) return false; /* Remove stmts not in the PARTITION bitmap. The order in which we visit the phi nodes and the statements is exactly as in stmts_from_loop. */ bbs = get_loop_body_in_dom_order (loop); if (MAY_HAVE_DEBUG_STMTS) for (x = 0, i = 0; i < loop->num_nodes; i++) { basic_block bb = bbs[i]; for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi)) if (!bitmap_bit_p (partition, x++)) reset_debug_uses (gsi_stmt (bsi)); for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi)) { gimple stmt = gsi_stmt (bsi); if (gimple_code (stmt) != GIMPLE_LABEL && !is_gimple_debug (stmt) && !bitmap_bit_p (partition, x++)) reset_debug_uses (stmt); } } for (x = 0, i = 0; i < loop->num_nodes; i++) { basic_block bb = bbs[i]; for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi);) if (!bitmap_bit_p (partition, x++)) { gimple phi = gsi_stmt (bsi); if (!is_gimple_reg (gimple_phi_result (phi))) mark_virtual_phi_result_for_renaming (phi); remove_phi_node (&bsi, true); } else gsi_next (&bsi); for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi);) { gimple stmt = gsi_stmt (bsi); if (gimple_code (stmt) != GIMPLE_LABEL && !is_gimple_debug (stmt) && !bitmap_bit_p (partition, x++)) { unlink_stmt_vdef (stmt); gsi_remove (&bsi, true); release_defs (stmt); } else gsi_next (&bsi); } } free (bbs); return true; }
static bool should_duplicate_loop_header_p (basic_block header, struct loop *loop, int *limit) { gimple_stmt_iterator bsi; gimple *last; gcc_assert (!header->aux); /* Loop header copying usually increases size of the code. This used not to be true, since quite often it is possible to verify that the condition is satisfied in the first iteration and therefore to eliminate it. Jump threading handles these cases now. */ if (optimize_loop_for_size_p (loop)) { if (dump_file && (dump_flags & TDF_DETAILS)) fprintf (dump_file, " Not duplicating bb %i: optimizing for size.\n", header->index); return false; } gcc_assert (EDGE_COUNT (header->succs) > 0); if (single_succ_p (header)) { if (dump_file && (dump_flags & TDF_DETAILS)) fprintf (dump_file, " Not duplicating bb %i: it is single succ.\n", header->index); return false; } if (flow_bb_inside_loop_p (loop, EDGE_SUCC (header, 0)->dest) && flow_bb_inside_loop_p (loop, EDGE_SUCC (header, 1)->dest)) { if (dump_file && (dump_flags & TDF_DETAILS)) fprintf (dump_file, " Not duplicating bb %i: both sucessors are in loop.\n", loop->num); return false; } /* If this is not the original loop header, we want it to have just one predecessor in order to match the && pattern. */ if (header != loop->header && !single_pred_p (header)) { if (dump_file && (dump_flags & TDF_DETAILS)) fprintf (dump_file, " Not duplicating bb %i: it has mutiple predecestors.\n", header->index); return false; } last = last_stmt (header); if (gimple_code (last) != GIMPLE_COND) { if (dump_file && (dump_flags & TDF_DETAILS)) fprintf (dump_file, " Not duplicating bb %i: it does not end by conditional.\n", header->index); return false; } /* Count number of instructions and punt on calls. */ for (bsi = gsi_start_bb (header); !gsi_end_p (bsi); gsi_next (&bsi)) { last = gsi_stmt (bsi); if (gimple_code (last) == GIMPLE_LABEL) continue; if (is_gimple_debug (last)) continue; if (gimple_code (last) == GIMPLE_CALL && !gimple_inexpensive_call_p (as_a <gcall *> (last))) { if (dump_file && (dump_flags & TDF_DETAILS)) fprintf (dump_file, " Not duplicating bb %i: it contains call.\n", header->index); return false; } *limit -= estimate_num_insns (last, &eni_size_weights); if (*limit < 0) { if (dump_file && (dump_flags & TDF_DETAILS)) fprintf (dump_file, " Not duplicating bb %i contains too many insns.\n", header->index); return false; } } if (dump_file && (dump_flags & TDF_DETAILS)) fprintf (dump_file, " Will duplicate bb %i\n", header->index); return true; }