void do_jump (tree exp, rtx if_false_label, rtx if_true_label, int prob) { enum tree_code code = TREE_CODE (exp); rtx temp; int i; tree type; enum machine_mode mode; rtx drop_through_label = 0; switch (code) { case ERROR_MARK: break; case INTEGER_CST: temp = integer_zerop (exp) ? if_false_label : if_true_label; if (temp) emit_jump (temp); break; #if 0 /* This is not true with #pragma weak */ case ADDR_EXPR: /* The address of something can never be zero. */ if (if_true_label) emit_jump (if_true_label); break; #endif case NOP_EXPR: if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF) goto normal; case CONVERT_EXPR: /* If we are narrowing the operand, we have to do the compare in the narrower mode. */ if ((TYPE_PRECISION (TREE_TYPE (exp)) < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))) goto normal; case NON_LVALUE_EXPR: case ABS_EXPR: case NEGATE_EXPR: case LROTATE_EXPR: case RROTATE_EXPR: /* These cannot change zero->nonzero or vice versa. */ do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label, prob); break; case TRUTH_NOT_EXPR: do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label, inv (prob)); break; case COND_EXPR: { rtx label1 = gen_label_rtx (); if (!if_true_label || !if_false_label) { drop_through_label = gen_label_rtx (); if (!if_true_label) if_true_label = drop_through_label; if (!if_false_label) if_false_label = drop_through_label; } do_pending_stack_adjust (); do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX, -1); do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label, prob); emit_label (label1); do_jump (TREE_OPERAND (exp, 2), if_false_label, if_true_label, prob); break; } case COMPOUND_EXPR: /* Lowered by gimplify.c. */ gcc_unreachable (); case COMPONENT_REF: case BIT_FIELD_REF: case ARRAY_REF: case ARRAY_RANGE_REF: { HOST_WIDE_INT bitsize, bitpos; int unsignedp; enum machine_mode mode; tree type; tree offset; int volatilep = 0; /* Get description of this reference. We don't actually care about the underlying object here. */ get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode, &unsignedp, &volatilep, false); type = lang_hooks.types.type_for_size (bitsize, unsignedp); if (! SLOW_BYTE_ACCESS && type != 0 && bitsize >= 0 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp)) && have_insn_for (COMPARE, TYPE_MODE (type))) { do_jump (fold_convert (type, exp), if_false_label, if_true_label, prob); break; } goto normal; } case MINUS_EXPR: /* Nonzero iff operands of minus differ. */ code = NE_EXPR; /* FALLTHRU */ case EQ_EXPR: case NE_EXPR: case LT_EXPR: case LE_EXPR: case GT_EXPR: case GE_EXPR: case ORDERED_EXPR: case UNORDERED_EXPR: case UNLT_EXPR: case UNLE_EXPR: case UNGT_EXPR: case UNGE_EXPR: case UNEQ_EXPR: case LTGT_EXPR: case TRUTH_ANDIF_EXPR: case TRUTH_ORIF_EXPR: other_code: do_jump_1 (code, TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), if_false_label, if_true_label, prob); break; case BIT_AND_EXPR: /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1. See if the former is preferred for jump tests and restore it if so. */ if (integer_onep (TREE_OPERAND (exp, 1))) { tree exp0 = TREE_OPERAND (exp, 0); rtx set_label, clr_label; int setclr_prob = prob; /* Strip narrowing integral type conversions. */ while (CONVERT_EXPR_P (exp0) && TREE_OPERAND (exp0, 0) != error_mark_node && TYPE_PRECISION (TREE_TYPE (exp0)) <= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp0, 0)))) exp0 = TREE_OPERAND (exp0, 0); /* "exp0 ^ 1" inverts the sense of the single bit test. */ if (TREE_CODE (exp0) == BIT_XOR_EXPR && integer_onep (TREE_OPERAND (exp0, 1))) { exp0 = TREE_OPERAND (exp0, 0); clr_label = if_true_label; set_label = if_false_label; setclr_prob = inv (prob); } else { clr_label = if_false_label; set_label = if_true_label; } if (TREE_CODE (exp0) == RSHIFT_EXPR) { tree arg = TREE_OPERAND (exp0, 0); tree shift = TREE_OPERAND (exp0, 1); tree argtype = TREE_TYPE (arg); if (TREE_CODE (shift) == INTEGER_CST && compare_tree_int (shift, 0) >= 0 && compare_tree_int (shift, HOST_BITS_PER_WIDE_INT) < 0 && prefer_and_bit_test (TYPE_MODE (argtype), TREE_INT_CST_LOW (shift))) { unsigned HOST_WIDE_INT mask = (unsigned HOST_WIDE_INT) 1 << TREE_INT_CST_LOW (shift); do_jump (build2 (BIT_AND_EXPR, argtype, arg, build_int_cstu (argtype, mask)), clr_label, set_label, setclr_prob); break; } } } /* If we are AND'ing with a small constant, do this comparison in the smallest type that fits. If the machine doesn't have comparisons that small, it will be converted back to the wider comparison. This helps if we are testing the sign bit of a narrower object. combine can't do this for us because it can't know whether a ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */ if (! SLOW_BYTE_ACCESS && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode && (type = lang_hooks.types.type_for_mode (mode, 1)) != 0 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp)) && have_insn_for (COMPARE, TYPE_MODE (type))) { do_jump (fold_convert (type, exp), if_false_label, if_true_label, prob); break; } if (TYPE_PRECISION (TREE_TYPE (exp)) > 1 || TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST) goto normal; /* Boolean comparisons can be compiled as TRUTH_AND_EXPR. */ case TRUTH_AND_EXPR: /* High branch cost, expand as the bitwise AND of the conditions. Do the same if the RHS has side effects, because we're effectively turning a TRUTH_AND_EXPR into a TRUTH_ANDIF_EXPR. */ if (BRANCH_COST (optimize_insn_for_speed_p (), false) >= 4 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1))) goto normal; code = TRUTH_ANDIF_EXPR; goto other_code; case BIT_IOR_EXPR: case TRUTH_OR_EXPR: /* High branch cost, expand as the bitwise OR of the conditions. Do the same if the RHS has side effects, because we're effectively turning a TRUTH_OR_EXPR into a TRUTH_ORIF_EXPR. */ if (BRANCH_COST (optimize_insn_for_speed_p (), false) >= 4 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1))) goto normal; code = TRUTH_ORIF_EXPR; goto other_code; /* Fall through and generate the normal code. */ default: normal: temp = expand_normal (exp); do_pending_stack_adjust (); /* The RTL optimizers prefer comparisons against pseudos. */ if (GET_CODE (temp) == SUBREG) { /* Compare promoted variables in their promoted mode. */ if (SUBREG_PROMOTED_VAR_P (temp) && REG_P (XEXP (temp, 0))) temp = XEXP (temp, 0); else temp = copy_to_reg (temp); } do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)), NE, TYPE_UNSIGNED (TREE_TYPE (exp)), GET_MODE (temp), NULL_RTX, if_false_label, if_true_label, prob); } if (drop_through_label) { do_pending_stack_adjust (); emit_label (drop_through_label); } }
void do_compare_rtx_and_jump (rtx op0, rtx op1, enum rtx_code code, int unsignedp, enum machine_mode mode, rtx size, rtx if_false_label, rtx if_true_label, int prob) { rtx tem; rtx dummy_label = NULL_RTX; rtx last; /* Reverse the comparison if that is safe and we want to jump if it is false. Also convert to the reverse comparison if the target can implement it. */ if ((! if_true_label || ! can_compare_p (code, mode, ccp_jump)) && (! FLOAT_MODE_P (mode) || code == ORDERED || code == UNORDERED || (! HONOR_NANS (mode) && (code == LTGT || code == UNEQ)) || (! HONOR_SNANS (mode) && (code == EQ || code == NE)))) { enum rtx_code rcode; if (FLOAT_MODE_P (mode)) rcode = reverse_condition_maybe_unordered (code); else rcode = reverse_condition (code); /* Canonicalize to UNORDERED for the libcall. */ if (can_compare_p (rcode, mode, ccp_jump) || (code == ORDERED && ! can_compare_p (ORDERED, mode, ccp_jump))) { tem = if_true_label; if_true_label = if_false_label; if_false_label = tem; code = rcode; prob = inv (prob); } } /* If one operand is constant, make it the second one. Only do this if the other operand is not constant as well. */ if (swap_commutative_operands_p (op0, op1)) { tem = op0; op0 = op1; op1 = tem; code = swap_condition (code); } do_pending_stack_adjust (); code = unsignedp ? unsigned_condition (code) : code; if (0 != (tem = simplify_relational_operation (code, mode, VOIDmode, op0, op1))) { if (CONSTANT_P (tem)) { rtx label = (tem == const0_rtx || tem == CONST0_RTX (mode)) ? if_false_label : if_true_label; if (label) emit_jump (label); return; } code = GET_CODE (tem); mode = GET_MODE (tem); op0 = XEXP (tem, 0); op1 = XEXP (tem, 1); unsignedp = (code == GTU || code == LTU || code == GEU || code == LEU); } if (! if_true_label) dummy_label = if_true_label = gen_label_rtx (); if (GET_MODE_CLASS (mode) == MODE_INT && ! can_compare_p (code, mode, ccp_jump)) { switch (code) { case LTU: do_jump_by_parts_greater_rtx (mode, 1, op1, op0, if_false_label, if_true_label, prob); break; case LEU: do_jump_by_parts_greater_rtx (mode, 1, op0, op1, if_true_label, if_false_label, inv (prob)); break; case GTU: do_jump_by_parts_greater_rtx (mode, 1, op0, op1, if_false_label, if_true_label, prob); break; case GEU: do_jump_by_parts_greater_rtx (mode, 1, op1, op0, if_true_label, if_false_label, inv (prob)); break; case LT: do_jump_by_parts_greater_rtx (mode, 0, op1, op0, if_false_label, if_true_label, prob); break; case LE: do_jump_by_parts_greater_rtx (mode, 0, op0, op1, if_true_label, if_false_label, inv (prob)); break; case GT: do_jump_by_parts_greater_rtx (mode, 0, op0, op1, if_false_label, if_true_label, prob); break; case GE: do_jump_by_parts_greater_rtx (mode, 0, op1, op0, if_true_label, if_false_label, inv (prob)); break; case EQ: do_jump_by_parts_equality_rtx (mode, op0, op1, if_false_label, if_true_label, prob); break; case NE: do_jump_by_parts_equality_rtx (mode, op0, op1, if_true_label, if_false_label, inv (prob)); break; default: gcc_unreachable (); } } else { if (GET_MODE_CLASS (mode) == MODE_FLOAT && ! can_compare_p (code, mode, ccp_jump) && can_compare_p (swap_condition (code), mode, ccp_jump)) { rtx tmp; code = swap_condition (code); tmp = op0; op0 = op1; op1 = tmp; } else if (GET_MODE_CLASS (mode) == MODE_FLOAT && ! can_compare_p (code, mode, ccp_jump) /* Never split ORDERED and UNORDERED. These must be implemented. */ && (code != ORDERED && code != UNORDERED) /* Split a floating-point comparison if we can jump on other conditions... */ && (have_insn_for (COMPARE, mode) /* ... or if there is no libcall for it. */ || code_to_optab[code] == NULL)) { enum rtx_code first_code; bool and_them = split_comparison (code, mode, &first_code, &code); /* If there are no NaNs, the first comparison should always fall through. */ if (!HONOR_NANS (mode)) gcc_assert (first_code == (and_them ? ORDERED : UNORDERED)); else { if (and_them) { rtx dest_label; /* If we only jump if true, just bypass the second jump. */ if (! if_false_label) { if (! dummy_label) dummy_label = gen_label_rtx (); dest_label = dummy_label; } else dest_label = if_false_label; do_compare_rtx_and_jump (op0, op1, first_code, unsignedp, mode, size, dest_label, NULL_RTX, prob); } else do_compare_rtx_and_jump (op0, op1, first_code, unsignedp, mode, size, NULL_RTX, if_true_label, prob); } } last = get_last_insn (); emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, if_true_label); if (prob != -1 && profile_status != PROFILE_ABSENT) { for (last = NEXT_INSN (last); last && NEXT_INSN (last); last = NEXT_INSN (last)) if (JUMP_P (last)) break; if (!last || !JUMP_P (last) || NEXT_INSN (last) || !any_condjump_p (last)) { if (dump_file) fprintf (dump_file, "Failed to add probability note\n"); } else { gcc_assert (!find_reg_note (last, REG_BR_PROB, 0)); add_reg_note (last, REG_BR_PROB, GEN_INT (prob)); } } } if (if_false_label) emit_jump (if_false_label); if (dummy_label) emit_label (dummy_label); }
void do_compare_rtx_and_jump (rtx op0, rtx op1, enum rtx_code code, int unsignedp, machine_mode mode, rtx size, rtx_code_label *if_false_label, rtx_code_label *if_true_label, int prob) { rtx tem; rtx_code_label *dummy_label = NULL; /* Reverse the comparison if that is safe and we want to jump if it is false. Also convert to the reverse comparison if the target can implement it. */ if ((! if_true_label || ! can_compare_p (code, mode, ccp_jump)) && (! FLOAT_MODE_P (mode) || code == ORDERED || code == UNORDERED || (! HONOR_NANS (mode) && (code == LTGT || code == UNEQ)) || (! HONOR_SNANS (mode) && (code == EQ || code == NE)))) { enum rtx_code rcode; if (FLOAT_MODE_P (mode)) rcode = reverse_condition_maybe_unordered (code); else rcode = reverse_condition (code); /* Canonicalize to UNORDERED for the libcall. */ if (can_compare_p (rcode, mode, ccp_jump) || (code == ORDERED && ! can_compare_p (ORDERED, mode, ccp_jump))) { std::swap (if_true_label, if_false_label); code = rcode; prob = inv (prob); } } /* If one operand is constant, make it the second one. Only do this if the other operand is not constant as well. */ if (swap_commutative_operands_p (op0, op1)) { std::swap (op0, op1); code = swap_condition (code); } do_pending_stack_adjust (); code = unsignedp ? unsigned_condition (code) : code; if (0 != (tem = simplify_relational_operation (code, mode, VOIDmode, op0, op1))) { if (CONSTANT_P (tem)) { rtx_code_label *label = (tem == const0_rtx || tem == CONST0_RTX (mode)) ? if_false_label : if_true_label; if (label) emit_jump (label); return; } code = GET_CODE (tem); mode = GET_MODE (tem); op0 = XEXP (tem, 0); op1 = XEXP (tem, 1); unsignedp = (code == GTU || code == LTU || code == GEU || code == LEU); } if (! if_true_label) dummy_label = if_true_label = gen_label_rtx (); if (GET_MODE_CLASS (mode) == MODE_INT && ! can_compare_p (code, mode, ccp_jump)) { switch (code) { case LTU: do_jump_by_parts_greater_rtx (mode, 1, op1, op0, if_false_label, if_true_label, prob); break; case LEU: do_jump_by_parts_greater_rtx (mode, 1, op0, op1, if_true_label, if_false_label, inv (prob)); break; case GTU: do_jump_by_parts_greater_rtx (mode, 1, op0, op1, if_false_label, if_true_label, prob); break; case GEU: do_jump_by_parts_greater_rtx (mode, 1, op1, op0, if_true_label, if_false_label, inv (prob)); break; case LT: do_jump_by_parts_greater_rtx (mode, 0, op1, op0, if_false_label, if_true_label, prob); break; case LE: do_jump_by_parts_greater_rtx (mode, 0, op0, op1, if_true_label, if_false_label, inv (prob)); break; case GT: do_jump_by_parts_greater_rtx (mode, 0, op0, op1, if_false_label, if_true_label, prob); break; case GE: do_jump_by_parts_greater_rtx (mode, 0, op1, op0, if_true_label, if_false_label, inv (prob)); break; case EQ: do_jump_by_parts_equality_rtx (mode, op0, op1, if_false_label, if_true_label, prob); break; case NE: do_jump_by_parts_equality_rtx (mode, op0, op1, if_true_label, if_false_label, inv (prob)); break; default: gcc_unreachable (); } } else { if (SCALAR_FLOAT_MODE_P (mode) && ! can_compare_p (code, mode, ccp_jump) && can_compare_p (swap_condition (code), mode, ccp_jump)) { code = swap_condition (code); std::swap (op0, op1); } else if (SCALAR_FLOAT_MODE_P (mode) && ! can_compare_p (code, mode, ccp_jump) /* Never split ORDERED and UNORDERED. These must be implemented. */ && (code != ORDERED && code != UNORDERED) /* Split a floating-point comparison if we can jump on other conditions... */ && (have_insn_for (COMPARE, mode) /* ... or if there is no libcall for it. */ || code_to_optab (code) == unknown_optab)) { enum rtx_code first_code; bool and_them = split_comparison (code, mode, &first_code, &code); /* If there are no NaNs, the first comparison should always fall through. */ if (!HONOR_NANS (mode)) gcc_assert (first_code == (and_them ? ORDERED : UNORDERED)); else { int first_prob = prob; if (first_code == UNORDERED) first_prob = REG_BR_PROB_BASE / 100; else if (first_code == ORDERED) first_prob = REG_BR_PROB_BASE - REG_BR_PROB_BASE / 100; if (and_them) { rtx_code_label *dest_label; /* If we only jump if true, just bypass the second jump. */ if (! if_false_label) { if (! dummy_label) dummy_label = gen_label_rtx (); dest_label = dummy_label; } else dest_label = if_false_label; do_compare_rtx_and_jump (op0, op1, first_code, unsignedp, mode, size, dest_label, NULL, first_prob); } else do_compare_rtx_and_jump (op0, op1, first_code, unsignedp, mode, size, NULL, if_true_label, first_prob); } } emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, if_true_label, prob); } if (if_false_label) emit_jump (if_false_label); if (dummy_label) emit_label (dummy_label); }