static bool check_range (gimple swtch) { tree min_case, max_case; unsigned int branch_num = gimple_switch_num_labels (swtch); tree range_max; /* The gimplifier has already sorted the cases by CASE_LOW and ensured there is a default label which is the last in the vector. */ min_case = gimple_switch_label (swtch, 1); info.range_min = CASE_LOW (min_case); gcc_assert (branch_num > 1); gcc_assert (CASE_LOW (gimple_switch_label (swtch, 0)) == NULL_TREE); max_case = gimple_switch_label (swtch, branch_num - 1); if (CASE_HIGH (max_case) != NULL_TREE) range_max = CASE_HIGH (max_case); else range_max = CASE_LOW (max_case); gcc_assert (info.range_min); gcc_assert (range_max); info.range_size = int_const_binop (MINUS_EXPR, range_max, info.range_min, 0); gcc_assert (info.range_size); if (!host_integerp (info.range_size, 1)) { info.reason = "index range way too large or otherwise unusable.\n"; return false; } if ((unsigned HOST_WIDE_INT) tree_low_cst (info.range_size, 1) > ((unsigned) branch_num * SWITCH_CONVERSION_BRANCH_RATIO)) { info.reason = "the maximum range-branch ratio exceeded.\n"; return false; } return true; }
tree maybe_fold_tmr (tree ref) { struct mem_address addr; bool changed = false; tree ret, off; get_address_description (ref, &addr); if (addr.base && TREE_CODE (addr.base) == INTEGER_CST && !integer_zerop (addr.base)) { addr.offset = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (addr.offset), addr.offset, addr.base); addr.base = NULL_TREE; changed = true; } if (addr.symbol && TREE_CODE (TREE_OPERAND (addr.symbol, 0)) == MEM_REF) { addr.offset = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (addr.offset), addr.offset, TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 1)); addr.symbol = TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 0); changed = true; } else if (addr.symbol && handled_component_p (TREE_OPERAND (addr.symbol, 0))) { HOST_WIDE_INT offset; addr.symbol = build_fold_addr_expr (get_addr_base_and_unit_offset (TREE_OPERAND (addr.symbol, 0), &offset)); addr.offset = int_const_binop (PLUS_EXPR, addr.offset, size_int (offset)); changed = true; } if (addr.index && TREE_CODE (addr.index) == INTEGER_CST) { off = addr.index; if (addr.step) { off = fold_binary_to_constant (MULT_EXPR, sizetype, off, addr.step); addr.step = NULL_TREE; } addr.offset = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (addr.offset), addr.offset, off); addr.index = NULL_TREE; changed = true; } if (!changed) return NULL_TREE; /* If we have propagated something into this TARGET_MEM_REF and thus ended up folding it, always create a new TARGET_MEM_REF regardless if it is valid in this for on the target - the propagation result wouldn't be anyway. */ ret = create_mem_ref_raw (TREE_TYPE (ref), TREE_TYPE (addr.offset), &addr, false); copy_mem_ref_info (ret, ref); return ret; }
static void emit_case_bit_tests (gswitch *swtch, tree index_expr, tree minval, tree range, tree maxval) { struct case_bit_test test[MAX_CASE_BIT_TESTS]; unsigned int i, j, k; unsigned int count; basic_block switch_bb = gimple_bb (swtch); basic_block default_bb, new_default_bb, new_bb; edge default_edge; bool update_dom = dom_info_available_p (CDI_DOMINATORS); vec<basic_block> bbs_to_fix_dom = vNULL; tree index_type = TREE_TYPE (index_expr); tree unsigned_index_type = unsigned_type_for (index_type); unsigned int branch_num = gimple_switch_num_labels (swtch); gimple_stmt_iterator gsi; gassign *shift_stmt; tree idx, tmp, csui; tree word_type_node = lang_hooks.types.type_for_mode (word_mode, 1); tree word_mode_zero = fold_convert (word_type_node, integer_zero_node); tree word_mode_one = fold_convert (word_type_node, integer_one_node); int prec = TYPE_PRECISION (word_type_node); wide_int wone = wi::one (prec); memset (&test, 0, sizeof (test)); /* Get the edge for the default case. */ tmp = gimple_switch_default_label (swtch); default_bb = label_to_block (CASE_LABEL (tmp)); default_edge = find_edge (switch_bb, default_bb); /* Go through all case labels, and collect the case labels, profile counts, and other information we need to build the branch tests. */ count = 0; for (i = 1; i < branch_num; i++) { unsigned int lo, hi; tree cs = gimple_switch_label (swtch, i); tree label = CASE_LABEL (cs); edge e = find_edge (switch_bb, label_to_block (label)); for (k = 0; k < count; k++) if (e == test[k].target_edge) break; if (k == count) { gcc_checking_assert (count < MAX_CASE_BIT_TESTS); test[k].mask = wi::zero (prec); test[k].target_edge = e; test[k].label = label; test[k].bits = 1; count++; } else test[k].bits++; lo = tree_to_uhwi (int_const_binop (MINUS_EXPR, CASE_LOW (cs), minval)); if (CASE_HIGH (cs) == NULL_TREE) hi = lo; else hi = tree_to_uhwi (int_const_binop (MINUS_EXPR, CASE_HIGH (cs), minval)); for (j = lo; j <= hi; j++) test[k].mask |= wi::lshift (wone, j); } qsort (test, count, sizeof (*test), case_bit_test_cmp); /* If all values are in the 0 .. BITS_PER_WORD-1 range, we can get rid of the minval subtractions, but it might make the mask constants more expensive. So, compare the costs. */ if (compare_tree_int (minval, 0) > 0 && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0) { int cost_diff; HOST_WIDE_INT m = tree_to_uhwi (minval); rtx reg = gen_raw_REG (word_mode, 10000); bool speed_p = optimize_bb_for_speed_p (gimple_bb (swtch)); cost_diff = set_rtx_cost (gen_rtx_PLUS (word_mode, reg, GEN_INT (-m)), speed_p); for (i = 0; i < count; i++) { rtx r = immed_wide_int_const (test[i].mask, word_mode); cost_diff += set_src_cost (gen_rtx_AND (word_mode, reg, r), word_mode, speed_p); r = immed_wide_int_const (wi::lshift (test[i].mask, m), word_mode); cost_diff -= set_src_cost (gen_rtx_AND (word_mode, reg, r), word_mode, speed_p); } if (cost_diff > 0) { for (i = 0; i < count; i++) test[i].mask = wi::lshift (test[i].mask, m); minval = build_zero_cst (TREE_TYPE (minval)); range = maxval; } } /* We generate two jumps to the default case label. Split the default edge, so that we don't have to do any PHI node updating. */ new_default_bb = split_edge (default_edge); if (update_dom) { bbs_to_fix_dom.create (10); bbs_to_fix_dom.quick_push (switch_bb); bbs_to_fix_dom.quick_push (default_bb); bbs_to_fix_dom.quick_push (new_default_bb); } /* Now build the test-and-branch code. */ gsi = gsi_last_bb (switch_bb); /* idx = (unsigned)x - minval. */ idx = fold_convert (unsigned_index_type, index_expr); idx = fold_build2 (MINUS_EXPR, unsigned_index_type, idx, fold_convert (unsigned_index_type, minval)); idx = force_gimple_operand_gsi (&gsi, idx, /*simple=*/true, NULL_TREE, /*before=*/true, GSI_SAME_STMT); /* if (idx > range) goto default */ range = force_gimple_operand_gsi (&gsi, fold_convert (unsigned_index_type, range), /*simple=*/true, NULL_TREE, /*before=*/true, GSI_SAME_STMT); tmp = fold_build2 (GT_EXPR, boolean_type_node, idx, range); new_bb = hoist_edge_and_branch_if_true (&gsi, tmp, default_edge, update_dom); if (update_dom) bbs_to_fix_dom.quick_push (new_bb); gcc_assert (gimple_bb (swtch) == new_bb); gsi = gsi_last_bb (new_bb); /* Any blocks dominated by the GIMPLE_SWITCH, but that are not successors of NEW_BB, are still immediately dominated by SWITCH_BB. Make it so. */ if (update_dom) { vec<basic_block> dom_bbs; basic_block dom_son; dom_bbs = get_dominated_by (CDI_DOMINATORS, new_bb); FOR_EACH_VEC_ELT (dom_bbs, i, dom_son) { edge e = find_edge (new_bb, dom_son); if (e && single_pred_p (e->dest)) continue; set_immediate_dominator (CDI_DOMINATORS, dom_son, switch_bb); bbs_to_fix_dom.safe_push (dom_son); } dom_bbs.release (); }
static void emit_case_bit_tests (gimple swtch, tree index_expr, tree minval, tree range) { struct case_bit_test test[MAX_CASE_BIT_TESTS]; unsigned int i, j, k; unsigned int count; basic_block switch_bb = gimple_bb (swtch); basic_block default_bb, new_default_bb, new_bb; edge default_edge; bool update_dom = dom_info_available_p (CDI_DOMINATORS); vec<basic_block> bbs_to_fix_dom = vNULL; tree index_type = TREE_TYPE (index_expr); tree unsigned_index_type = unsigned_type_for (index_type); unsigned int branch_num = gimple_switch_num_labels (swtch); gimple_stmt_iterator gsi; gimple shift_stmt; tree idx, tmp, csui; tree word_type_node = lang_hooks.types.type_for_mode (word_mode, 1); tree word_mode_zero = fold_convert (word_type_node, integer_zero_node); tree word_mode_one = fold_convert (word_type_node, integer_one_node); memset (&test, 0, sizeof (test)); /* Get the edge for the default case. */ tmp = gimple_switch_default_label (swtch); default_bb = label_to_block (CASE_LABEL (tmp)); default_edge = find_edge (switch_bb, default_bb); /* Go through all case labels, and collect the case labels, profile counts, and other information we need to build the branch tests. */ count = 0; for (i = 1; i < branch_num; i++) { unsigned int lo, hi; tree cs = gimple_switch_label (swtch, i); tree label = CASE_LABEL (cs); edge e = find_edge (switch_bb, label_to_block (label)); for (k = 0; k < count; k++) if (e == test[k].target_edge) break; if (k == count) { gcc_checking_assert (count < MAX_CASE_BIT_TESTS); test[k].hi = 0; test[k].lo = 0; test[k].target_edge = e; test[k].label = label; test[k].bits = 1; count++; } else test[k].bits++; lo = tree_to_uhwi (int_const_binop (MINUS_EXPR, CASE_LOW (cs), minval)); if (CASE_HIGH (cs) == NULL_TREE) hi = lo; else hi = tree_to_uhwi (int_const_binop (MINUS_EXPR, CASE_HIGH (cs), minval)); for (j = lo; j <= hi; j++) if (j >= HOST_BITS_PER_WIDE_INT) test[k].hi |= (HOST_WIDE_INT) 1 << (j - HOST_BITS_PER_INT); else test[k].lo |= (HOST_WIDE_INT) 1 << j; } qsort (test, count, sizeof (*test), case_bit_test_cmp); /* We generate two jumps to the default case label. Split the default edge, so that we don't have to do any PHI node updating. */ new_default_bb = split_edge (default_edge); if (update_dom) { bbs_to_fix_dom.create (10); bbs_to_fix_dom.quick_push (switch_bb); bbs_to_fix_dom.quick_push (default_bb); bbs_to_fix_dom.quick_push (new_default_bb); } /* Now build the test-and-branch code. */ gsi = gsi_last_bb (switch_bb); /* idx = (unsigned)x - minval. */ idx = fold_convert (unsigned_index_type, index_expr); idx = fold_build2 (MINUS_EXPR, unsigned_index_type, idx, fold_convert (unsigned_index_type, minval)); idx = force_gimple_operand_gsi (&gsi, idx, /*simple=*/true, NULL_TREE, /*before=*/true, GSI_SAME_STMT); /* if (idx > range) goto default */ range = force_gimple_operand_gsi (&gsi, fold_convert (unsigned_index_type, range), /*simple=*/true, NULL_TREE, /*before=*/true, GSI_SAME_STMT); tmp = fold_build2 (GT_EXPR, boolean_type_node, idx, range); new_bb = hoist_edge_and_branch_if_true (&gsi, tmp, default_edge, update_dom); if (update_dom) bbs_to_fix_dom.quick_push (new_bb); gcc_assert (gimple_bb (swtch) == new_bb); gsi = gsi_last_bb (new_bb); /* Any blocks dominated by the GIMPLE_SWITCH, but that are not successors of NEW_BB, are still immediately dominated by SWITCH_BB. Make it so. */ if (update_dom) { vec<basic_block> dom_bbs; basic_block dom_son; dom_bbs = get_dominated_by (CDI_DOMINATORS, new_bb); FOR_EACH_VEC_ELT (dom_bbs, i, dom_son) { edge e = find_edge (new_bb, dom_son); if (e && single_pred_p (e->dest)) continue; set_immediate_dominator (CDI_DOMINATORS, dom_son, switch_bb); bbs_to_fix_dom.safe_push (dom_son); } dom_bbs.release (); }
static void instrument_bool_enum_load (gimple_stmt_iterator *gsi) { gimple stmt = gsi_stmt (*gsi); tree rhs = gimple_assign_rhs1 (stmt); tree type = TREE_TYPE (rhs); tree minv = NULL_TREE, maxv = NULL_TREE; if (TREE_CODE (type) == BOOLEAN_TYPE && (flag_sanitize & SANITIZE_BOOL)) { minv = boolean_false_node; maxv = boolean_true_node; } else if (TREE_CODE (type) == ENUMERAL_TYPE && (flag_sanitize & SANITIZE_ENUM) && TREE_TYPE (type) != NULL_TREE && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE && (TYPE_PRECISION (TREE_TYPE (type)) < GET_MODE_PRECISION (TYPE_MODE (type)))) { minv = TYPE_MIN_VALUE (TREE_TYPE (type)); maxv = TYPE_MAX_VALUE (TREE_TYPE (type)); } else return; int modebitsize = GET_MODE_BITSIZE (TYPE_MODE (type)); HOST_WIDE_INT bitsize, bitpos; tree offset; enum machine_mode mode; int volatilep = 0, unsignedp = 0; tree base = get_inner_reference (rhs, &bitsize, &bitpos, &offset, &mode, &unsignedp, &volatilep, false); tree utype = build_nonstandard_integer_type (modebitsize, 1); if ((TREE_CODE (base) == VAR_DECL && DECL_HARD_REGISTER (base)) || (bitpos % modebitsize) != 0 || bitsize != modebitsize || GET_MODE_BITSIZE (TYPE_MODE (utype)) != modebitsize || TREE_CODE (gimple_assign_lhs (stmt)) != SSA_NAME) return; location_t loc = gimple_location (stmt); tree ptype = build_pointer_type (TREE_TYPE (rhs)); tree atype = reference_alias_ptr_type (rhs); gimple g = gimple_build_assign (make_ssa_name (ptype, NULL), build_fold_addr_expr (rhs)); gimple_set_location (g, loc); gsi_insert_before (gsi, g, GSI_SAME_STMT); tree mem = build2 (MEM_REF, utype, gimple_assign_lhs (g), build_int_cst (atype, 0)); tree urhs = make_ssa_name (utype, NULL); g = gimple_build_assign (urhs, mem); gimple_set_location (g, loc); gsi_insert_before (gsi, g, GSI_SAME_STMT); minv = fold_convert (utype, minv); maxv = fold_convert (utype, maxv); if (!integer_zerop (minv)) { g = gimple_build_assign_with_ops (MINUS_EXPR, make_ssa_name (utype, NULL), urhs, minv); gimple_set_location (g, loc); gsi_insert_before (gsi, g, GSI_SAME_STMT); } gimple_stmt_iterator gsi2 = *gsi; basic_block then_bb, fallthru_bb; *gsi = create_cond_insert_point (gsi, true, false, true, &then_bb, &fallthru_bb); g = gimple_build_cond (GT_EXPR, gimple_assign_lhs (g), int_const_binop (MINUS_EXPR, maxv, minv), NULL_TREE, NULL_TREE); gimple_set_location (g, loc); gsi_insert_after (gsi, g, GSI_NEW_STMT); gimple_assign_set_rhs_with_ops (&gsi2, NOP_EXPR, urhs, NULL_TREE); update_stmt (stmt); gsi2 = gsi_after_labels (then_bb); if (flag_sanitize_undefined_trap_on_error) g = gimple_build_call (builtin_decl_explicit (BUILT_IN_TRAP), 0); else { tree data = ubsan_create_data ("__ubsan_invalid_value_data", &loc, NULL, ubsan_type_descriptor (type), NULL_TREE); data = build_fold_addr_expr_loc (loc, data); enum built_in_function bcode = flag_sanitize_recover ? BUILT_IN_UBSAN_HANDLE_LOAD_INVALID_VALUE : BUILT_IN_UBSAN_HANDLE_LOAD_INVALID_VALUE_ABORT; tree fn = builtin_decl_explicit (bcode); tree val = force_gimple_operand_gsi (&gsi2, ubsan_encode_value (urhs), true, NULL_TREE, true, GSI_SAME_STMT); g = gimple_build_call (fn, 2, data, val); } gimple_set_location (g, loc); gsi_insert_before (&gsi2, g, GSI_SAME_STMT); }
static void build_constructors (gimple swtch) { unsigned i, branch_num = gimple_switch_num_labels (swtch); tree pos = info.range_min; for (i = 1; i < branch_num; i++) { tree cs = gimple_switch_label (swtch, i); basic_block bb = label_to_block (CASE_LABEL (cs)); edge e; tree high; gimple_stmt_iterator gsi; int j; if (bb == info.final_bb) e = find_edge (info.switch_bb, bb); else e = single_succ_edge (bb); gcc_assert (e); while (tree_int_cst_lt (pos, CASE_LOW (cs))) { int k; for (k = 0; k < info.phi_count; k++) { constructor_elt *elt; elt = VEC_quick_push (constructor_elt, info.constructors[k], NULL); elt->index = int_const_binop (MINUS_EXPR, pos, info.range_min, 0); elt->value = info.default_values[k]; } pos = int_const_binop (PLUS_EXPR, pos, integer_one_node, 0); } gcc_assert (tree_int_cst_equal (pos, CASE_LOW (cs))); j = 0; if (CASE_HIGH (cs)) high = CASE_HIGH (cs); else high = CASE_LOW (cs); for (gsi = gsi_start_phis (info.final_bb); !gsi_end_p (gsi); gsi_next (&gsi)) { gimple phi = gsi_stmt (gsi); tree val = PHI_ARG_DEF_FROM_EDGE (phi, e); tree low = CASE_LOW (cs); pos = CASE_LOW (cs); do { constructor_elt *elt; elt = VEC_quick_push (constructor_elt, info.constructors[j], NULL); elt->index = int_const_binop (MINUS_EXPR, pos, info.range_min, 0); elt->value = val; pos = int_const_binop (PLUS_EXPR, pos, integer_one_node, 0); } while (!tree_int_cst_lt (high, pos) && tree_int_cst_lt (low, pos)); j++; } } }
static tree forward_propagate_into_cond_1 (tree cond, tree *test_var_p) { tree new_cond = NULL_TREE; enum tree_code cond_code = TREE_CODE (cond); tree test_var = NULL_TREE; tree def; tree def_rhs; /* If the condition is not a lone variable or an equality test of an SSA_NAME against an integral constant, then we do not have an optimizable case. Note these conditions also ensure the COND_EXPR has no virtual operands or other side effects. */ if (cond_code != SSA_NAME && !((cond_code == EQ_EXPR || cond_code == NE_EXPR) && TREE_CODE (TREE_OPERAND (cond, 0)) == SSA_NAME && CONSTANT_CLASS_P (TREE_OPERAND (cond, 1)) && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (cond, 1))))) return NULL_TREE; /* Extract the single variable used in the test into TEST_VAR. */ if (cond_code == SSA_NAME) test_var = cond; else test_var = TREE_OPERAND (cond, 0); /* Now get the defining statement for TEST_VAR. Skip this case if it's not defined by some MODIFY_EXPR. */ def = SSA_NAME_DEF_STMT (test_var); if (TREE_CODE (def) != MODIFY_EXPR) return NULL_TREE; def_rhs = TREE_OPERAND (def, 1); /* If TEST_VAR is set by adding or subtracting a constant from an SSA_NAME, then it is interesting to us as we can adjust the constant in the conditional and thus eliminate the arithmetic operation. */ if (TREE_CODE (def_rhs) == PLUS_EXPR || TREE_CODE (def_rhs) == MINUS_EXPR) { tree op0 = TREE_OPERAND (def_rhs, 0); tree op1 = TREE_OPERAND (def_rhs, 1); /* The first operand must be an SSA_NAME and the second operand must be a constant. */ if (TREE_CODE (op0) != SSA_NAME || !CONSTANT_CLASS_P (op1) || !INTEGRAL_TYPE_P (TREE_TYPE (op1))) return NULL_TREE; /* Don't propagate if the first operand occurs in an abnormal PHI. */ if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op0)) return NULL_TREE; if (has_single_use (test_var)) { enum tree_code new_code; tree t; /* If the variable was defined via X + C, then we must subtract C from the constant in the conditional. Otherwise we add C to the constant in the conditional. The result must fold into a valid gimple operand to be optimizable. */ new_code = (TREE_CODE (def_rhs) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR); t = int_const_binop (new_code, TREE_OPERAND (cond, 1), op1, 0); if (!is_gimple_val (t)) return NULL_TREE; new_cond = build (cond_code, boolean_type_node, op0, t); } } /* These cases require comparisons of a naked SSA_NAME or comparison of an SSA_NAME against zero or one. */ else if (TREE_CODE (cond) == SSA_NAME || integer_zerop (TREE_OPERAND (cond, 1)) || integer_onep (TREE_OPERAND (cond, 1))) { /* If TEST_VAR is set from a relational operation between two SSA_NAMEs or a combination of an SSA_NAME and a constant, then it is interesting. */ if (COMPARISON_CLASS_P (def_rhs)) { tree op0 = TREE_OPERAND (def_rhs, 0); tree op1 = TREE_OPERAND (def_rhs, 1); /* Both operands of DEF_RHS must be SSA_NAMEs or constants. */ if ((TREE_CODE (op0) != SSA_NAME && !is_gimple_min_invariant (op0)) || (TREE_CODE (op1) != SSA_NAME && !is_gimple_min_invariant (op1))) return NULL_TREE; /* Don't propagate if the first operand occurs in an abnormal PHI. */ if (TREE_CODE (op0) == SSA_NAME && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op0)) return NULL_TREE; /* Don't propagate if the second operand occurs in an abnormal PHI. */ if (TREE_CODE (op1) == SSA_NAME && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op1)) return NULL_TREE; if (has_single_use (test_var)) { /* TEST_VAR was set from a relational operator. */ new_cond = build (TREE_CODE (def_rhs), boolean_type_node, op0, op1); /* Invert the conditional if necessary. */ if ((cond_code == EQ_EXPR && integer_zerop (TREE_OPERAND (cond, 1))) || (cond_code == NE_EXPR && integer_onep (TREE_OPERAND (cond, 1)))) { new_cond = invert_truthvalue (new_cond); /* If we did not get a simple relational expression or bare SSA_NAME, then we can not optimize this case. */ if (!COMPARISON_CLASS_P (new_cond) && TREE_CODE (new_cond) != SSA_NAME) new_cond = NULL_TREE; } } } /* If TEST_VAR is set from a TRUTH_NOT_EXPR, then it is interesting. */ else if (TREE_CODE (def_rhs) == TRUTH_NOT_EXPR) { enum tree_code new_code; def_rhs = TREE_OPERAND (def_rhs, 0); /* DEF_RHS must be an SSA_NAME or constant. */ if (TREE_CODE (def_rhs) != SSA_NAME && !is_gimple_min_invariant (def_rhs)) return NULL_TREE; /* Don't propagate if the operand occurs in an abnormal PHI. */ if (TREE_CODE (def_rhs) == SSA_NAME && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def_rhs)) return NULL_TREE; if (cond_code == SSA_NAME || (cond_code == NE_EXPR && integer_zerop (TREE_OPERAND (cond, 1))) || (cond_code == EQ_EXPR && integer_onep (TREE_OPERAND (cond, 1)))) new_code = EQ_EXPR; else new_code = NE_EXPR; new_cond = build2 (new_code, boolean_type_node, def_rhs, fold_convert (TREE_TYPE (def_rhs), integer_zero_node)); } /* If TEST_VAR was set from a cast of an integer type to a boolean type or a cast of a boolean to an integral, then it is interesting. */ else if (TREE_CODE (def_rhs) == NOP_EXPR || TREE_CODE (def_rhs) == CONVERT_EXPR) { tree outer_type; tree inner_type; outer_type = TREE_TYPE (def_rhs); inner_type = TREE_TYPE (TREE_OPERAND (def_rhs, 0)); if ((TREE_CODE (outer_type) == BOOLEAN_TYPE && INTEGRAL_TYPE_P (inner_type)) || (TREE_CODE (inner_type) == BOOLEAN_TYPE && INTEGRAL_TYPE_P (outer_type))) ; else if (INTEGRAL_TYPE_P (outer_type) && INTEGRAL_TYPE_P (inner_type) && TREE_CODE (TREE_OPERAND (def_rhs, 0)) == SSA_NAME && ssa_name_defined_by_comparison_p (TREE_OPERAND (def_rhs, 0))) ; else return NULL_TREE; /* Don't propagate if the operand occurs in an abnormal PHI. */ if (TREE_CODE (TREE_OPERAND (def_rhs, 0)) == SSA_NAME && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (def_rhs, 0))) return NULL_TREE; if (has_single_use (test_var)) { enum tree_code new_code; tree new_arg; if (cond_code == SSA_NAME || (cond_code == NE_EXPR && integer_zerop (TREE_OPERAND (cond, 1))) || (cond_code == EQ_EXPR && integer_onep (TREE_OPERAND (cond, 1)))) new_code = NE_EXPR; else new_code = EQ_EXPR; new_arg = TREE_OPERAND (def_rhs, 0); new_cond = build2 (new_code, boolean_type_node, new_arg, fold_convert (TREE_TYPE (new_arg), integer_zero_node)); } } } *test_var_p = test_var; return new_cond; }
static void substitute_single_use_vars (varray_type *cond_worklist, varray_type vars_worklist) { while (VARRAY_ACTIVE_SIZE (vars_worklist) > 0) { tree test_var = VARRAY_TOP_TREE (vars_worklist); tree def = SSA_NAME_DEF_STMT (test_var); dataflow_t df; int j, num_uses, propagated_uses; VARRAY_POP (vars_worklist); /* Now compute the immediate uses of TEST_VAR. */ df = get_immediate_uses (def); num_uses = num_immediate_uses (df); propagated_uses = 0; /* If TEST_VAR is used more than once and is not a boolean set via TRUTH_NOT_EXPR with another SSA_NAME as its argument, then we can not optimize. */ if (num_uses == 1 || (TREE_CODE (TREE_TYPE (test_var)) == BOOLEAN_TYPE && TREE_CODE (TREE_OPERAND (def, 1)) == TRUTH_NOT_EXPR && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (def, 1), 0)) == SSA_NAME))) ; else continue; /* Walk over each use and try to forward propagate the RHS of DEF into the use. */ for (j = 0; j < num_uses; j++) { tree cond_stmt; tree cond; enum tree_code cond_code; tree def_rhs; enum tree_code def_rhs_code; tree new_cond; cond_stmt = immediate_use (df, j); /* For now we can only propagate into COND_EXPRs. */ if (TREE_CODE (cond_stmt) != COND_EXPR) continue; cond = COND_EXPR_COND (cond_stmt); cond_code = TREE_CODE (cond); def_rhs = TREE_OPERAND (def, 1); def_rhs_code = TREE_CODE (def_rhs); /* If the definition of the single use variable was from an arithmetic operation, then we just need to adjust the constant in the COND_EXPR_COND and update the variable tested. */ if (def_rhs_code == PLUS_EXPR || def_rhs_code == MINUS_EXPR) { tree op0 = TREE_OPERAND (def_rhs, 0); tree op1 = TREE_OPERAND (def_rhs, 1); enum tree_code new_code; tree t; /* If the variable was defined via X + C, then we must subtract C from the constant in the conditional. Otherwise we add C to the constant in the conditional. The result must fold into a valid gimple operand to be optimizable. */ new_code = def_rhs_code == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR; t = int_const_binop (new_code, TREE_OPERAND (cond, 1), op1, 0); if (!is_gimple_val (t)) continue; new_cond = build (cond_code, boolean_type_node, op0, t); } /* If the variable is defined by a conditional expression... */ else if (TREE_CODE_CLASS (def_rhs_code) == tcc_comparison) { /* TEST_VAR was set from a relational operator. */ tree op0 = TREE_OPERAND (def_rhs, 0); tree op1 = TREE_OPERAND (def_rhs, 1); new_cond = build (def_rhs_code, boolean_type_node, op0, op1); /* Invert the conditional if necessary. */ if ((cond_code == EQ_EXPR && integer_zerop (TREE_OPERAND (cond, 1))) || (cond_code == NE_EXPR && integer_onep (TREE_OPERAND (cond, 1)))) { new_cond = invert_truthvalue (new_cond); /* If we did not get a simple relational expression or bare SSA_NAME, then we can not optimize this case. */ if (!COMPARISON_CLASS_P (new_cond) && TREE_CODE (new_cond) != SSA_NAME) continue; } } else { bool invert = false; enum tree_code new_code; tree new_arg; /* TEST_VAR was set from a TRUTH_NOT_EXPR or a NOP_EXPR. */ if (def_rhs_code == TRUTH_NOT_EXPR) invert = true; /* If we don't have <NE_EXPR/EQ_EXPR x INT_CST>, then we cannot optimize this case. */ if ((cond_code == NE_EXPR || cond_code == EQ_EXPR) && TREE_CODE (TREE_OPERAND (cond, 1)) != INTEGER_CST) continue; if (cond_code == SSA_NAME || (cond_code == NE_EXPR && integer_zerop (TREE_OPERAND (cond, 1))) || (cond_code == EQ_EXPR && integer_onep (TREE_OPERAND (cond, 1)))) new_code = NE_EXPR; else new_code = EQ_EXPR; if (invert) new_code = (new_code == EQ_EXPR ? NE_EXPR : EQ_EXPR); new_arg = TREE_OPERAND (def_rhs, 0); new_cond = build2 (new_code, boolean_type_node, new_arg, fold_convert (TREE_TYPE (new_arg), integer_zero_node)); } /* Dump details. */ if (dump_file && (dump_flags & TDF_DETAILS)) { fprintf (dump_file, " Replaced '"); print_generic_expr (dump_file, cond, dump_flags); fprintf (dump_file, "' with '"); print_generic_expr (dump_file, new_cond, dump_flags); fprintf (dump_file, "'\n"); } /* Replace the condition. */ COND_EXPR_COND (cond_stmt) = new_cond; modify_stmt (cond_stmt); propagated_uses++; VARRAY_PUSH_TREE (*cond_worklist, cond_stmt); } /* If we propagated into all the uses, then we can delete DEF. Unfortunately, we have to find the defining statement in whatever block it might be in. */ if (num_uses && num_uses == propagated_uses) { block_stmt_iterator bsi = bsi_for_stmt (def); bsi_remove (&bsi); } } }