rtx rtx_vector_builder::find_cached_value () { if (encoded_nelts () != 1) return NULL_RTX; rtx elt = (*this)[0]; if (GET_MODE_CLASS (m_mode) == MODE_VECTOR_BOOL) { if (elt == const1_rtx || elt == constm1_rtx) return CONST1_RTX (m_mode); else if (elt == const0_rtx) return CONST0_RTX (m_mode); else gcc_unreachable (); } /* We can be called before the global vector constants are set up, but in that case we'll just return null. */ scalar_mode inner_mode = GET_MODE_INNER (m_mode); if (elt == CONST0_RTX (inner_mode)) return CONST0_RTX (m_mode); else if (elt == CONST1_RTX (inner_mode)) return CONST1_RTX (m_mode); else if (elt == CONSTM1_RTX (inner_mode)) return CONSTM1_RTX (m_mode); return NULL_RTX; }
static inline int const0_operand_1 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED) #line 577 "../.././gcc/config/i386/predicates.md" { if (mode == VOIDmode) mode = GET_MODE (op); return op == CONST0_RTX (mode); }
static inline int zero_extended_scalar_load_operand_1 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED) #line 747 "../.././gcc/config/i386/predicates.md" { unsigned n_elts; op = maybe_get_pool_constant (op); if (!op) return 0; if (GET_CODE (op) != CONST_VECTOR) return 0; n_elts = (GET_MODE_SIZE (GET_MODE (op)) / GET_MODE_SIZE (GET_MODE_INNER (GET_MODE (op)))); for (n_elts--; n_elts > 0; n_elts--) { rtx elt = CONST_VECTOR_ELT (op, n_elts); if (elt != CONST0_RTX (GET_MODE_INNER (GET_MODE (op)))) return 0; } return 1; }
void do_compare_rtx_and_jump (rtx op0, rtx op1, enum rtx_code code, int unsignedp, enum machine_mode mode, rtx size, rtx if_false_label, rtx if_true_label, int prob) { rtx tem; rtx dummy_label = NULL_RTX; rtx last; /* Reverse the comparison if that is safe and we want to jump if it is false. Also convert to the reverse comparison if the target can implement it. */ if ((! if_true_label || ! can_compare_p (code, mode, ccp_jump)) && (! FLOAT_MODE_P (mode) || code == ORDERED || code == UNORDERED || (! HONOR_NANS (mode) && (code == LTGT || code == UNEQ)) || (! HONOR_SNANS (mode) && (code == EQ || code == NE)))) { enum rtx_code rcode; if (FLOAT_MODE_P (mode)) rcode = reverse_condition_maybe_unordered (code); else rcode = reverse_condition (code); /* Canonicalize to UNORDERED for the libcall. */ if (can_compare_p (rcode, mode, ccp_jump) || (code == ORDERED && ! can_compare_p (ORDERED, mode, ccp_jump))) { tem = if_true_label; if_true_label = if_false_label; if_false_label = tem; code = rcode; prob = inv (prob); } } /* If one operand is constant, make it the second one. Only do this if the other operand is not constant as well. */ if (swap_commutative_operands_p (op0, op1)) { tem = op0; op0 = op1; op1 = tem; code = swap_condition (code); } do_pending_stack_adjust (); code = unsignedp ? unsigned_condition (code) : code; if (0 != (tem = simplify_relational_operation (code, mode, VOIDmode, op0, op1))) { if (CONSTANT_P (tem)) { rtx label = (tem == const0_rtx || tem == CONST0_RTX (mode)) ? if_false_label : if_true_label; if (label) emit_jump (label); return; } code = GET_CODE (tem); mode = GET_MODE (tem); op0 = XEXP (tem, 0); op1 = XEXP (tem, 1); unsignedp = (code == GTU || code == LTU || code == GEU || code == LEU); } if (! if_true_label) dummy_label = if_true_label = gen_label_rtx (); if (GET_MODE_CLASS (mode) == MODE_INT && ! can_compare_p (code, mode, ccp_jump)) { switch (code) { case LTU: do_jump_by_parts_greater_rtx (mode, 1, op1, op0, if_false_label, if_true_label, prob); break; case LEU: do_jump_by_parts_greater_rtx (mode, 1, op0, op1, if_true_label, if_false_label, inv (prob)); break; case GTU: do_jump_by_parts_greater_rtx (mode, 1, op0, op1, if_false_label, if_true_label, prob); break; case GEU: do_jump_by_parts_greater_rtx (mode, 1, op1, op0, if_true_label, if_false_label, inv (prob)); break; case LT: do_jump_by_parts_greater_rtx (mode, 0, op1, op0, if_false_label, if_true_label, prob); break; case LE: do_jump_by_parts_greater_rtx (mode, 0, op0, op1, if_true_label, if_false_label, inv (prob)); break; case GT: do_jump_by_parts_greater_rtx (mode, 0, op0, op1, if_false_label, if_true_label, prob); break; case GE: do_jump_by_parts_greater_rtx (mode, 0, op1, op0, if_true_label, if_false_label, inv (prob)); break; case EQ: do_jump_by_parts_equality_rtx (mode, op0, op1, if_false_label, if_true_label, prob); break; case NE: do_jump_by_parts_equality_rtx (mode, op0, op1, if_true_label, if_false_label, inv (prob)); break; default: gcc_unreachable (); } } else { if (GET_MODE_CLASS (mode) == MODE_FLOAT && ! can_compare_p (code, mode, ccp_jump) && can_compare_p (swap_condition (code), mode, ccp_jump)) { rtx tmp; code = swap_condition (code); tmp = op0; op0 = op1; op1 = tmp; } else if (GET_MODE_CLASS (mode) == MODE_FLOAT && ! can_compare_p (code, mode, ccp_jump) /* Never split ORDERED and UNORDERED. These must be implemented. */ && (code != ORDERED && code != UNORDERED) /* Split a floating-point comparison if we can jump on other conditions... */ && (have_insn_for (COMPARE, mode) /* ... or if there is no libcall for it. */ || code_to_optab[code] == NULL)) { enum rtx_code first_code; bool and_them = split_comparison (code, mode, &first_code, &code); /* If there are no NaNs, the first comparison should always fall through. */ if (!HONOR_NANS (mode)) gcc_assert (first_code == (and_them ? ORDERED : UNORDERED)); else { if (and_them) { rtx dest_label; /* If we only jump if true, just bypass the second jump. */ if (! if_false_label) { if (! dummy_label) dummy_label = gen_label_rtx (); dest_label = dummy_label; } else dest_label = if_false_label; do_compare_rtx_and_jump (op0, op1, first_code, unsignedp, mode, size, dest_label, NULL_RTX, prob); } else do_compare_rtx_and_jump (op0, op1, first_code, unsignedp, mode, size, NULL_RTX, if_true_label, prob); } } last = get_last_insn (); emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, if_true_label); if (prob != -1 && profile_status != PROFILE_ABSENT) { for (last = NEXT_INSN (last); last && NEXT_INSN (last); last = NEXT_INSN (last)) if (JUMP_P (last)) break; if (!last || !JUMP_P (last) || NEXT_INSN (last) || !any_condjump_p (last)) { if (dump_file) fprintf (dump_file, "Failed to add probability note\n"); } else { gcc_assert (!find_reg_note (last, REG_BR_PROB, 0)); add_reg_note (last, REG_BR_PROB, GEN_INT (prob)); } } } if (if_false_label) emit_jump (if_false_label); if (dummy_label) emit_label (dummy_label); }
void do_jump (tree exp, rtx if_false_label, rtx if_true_label, int prob) { enum tree_code code = TREE_CODE (exp); rtx temp; int i; tree type; enum machine_mode mode; rtx drop_through_label = 0; switch (code) { case ERROR_MARK: break; case INTEGER_CST: temp = integer_zerop (exp) ? if_false_label : if_true_label; if (temp) emit_jump (temp); break; #if 0 /* This is not true with #pragma weak */ case ADDR_EXPR: /* The address of something can never be zero. */ if (if_true_label) emit_jump (if_true_label); break; #endif case NOP_EXPR: if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF) goto normal; case CONVERT_EXPR: /* If we are narrowing the operand, we have to do the compare in the narrower mode. */ if ((TYPE_PRECISION (TREE_TYPE (exp)) < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))) goto normal; case NON_LVALUE_EXPR: case ABS_EXPR: case NEGATE_EXPR: case LROTATE_EXPR: case RROTATE_EXPR: /* These cannot change zero->nonzero or vice versa. */ do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label, prob); break; case TRUTH_NOT_EXPR: do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label, inv (prob)); break; case COND_EXPR: { rtx label1 = gen_label_rtx (); if (!if_true_label || !if_false_label) { drop_through_label = gen_label_rtx (); if (!if_true_label) if_true_label = drop_through_label; if (!if_false_label) if_false_label = drop_through_label; } do_pending_stack_adjust (); do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX, -1); do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label, prob); emit_label (label1); do_jump (TREE_OPERAND (exp, 2), if_false_label, if_true_label, prob); break; } case COMPOUND_EXPR: /* Lowered by gimplify.c. */ gcc_unreachable (); case COMPONENT_REF: case BIT_FIELD_REF: case ARRAY_REF: case ARRAY_RANGE_REF: { HOST_WIDE_INT bitsize, bitpos; int unsignedp; enum machine_mode mode; tree type; tree offset; int volatilep = 0; /* Get description of this reference. We don't actually care about the underlying object here. */ get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode, &unsignedp, &volatilep, false); type = lang_hooks.types.type_for_size (bitsize, unsignedp); if (! SLOW_BYTE_ACCESS && type != 0 && bitsize >= 0 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp)) && have_insn_for (COMPARE, TYPE_MODE (type))) { do_jump (fold_convert (type, exp), if_false_label, if_true_label, prob); break; } goto normal; } case MINUS_EXPR: /* Nonzero iff operands of minus differ. */ code = NE_EXPR; /* FALLTHRU */ case EQ_EXPR: case NE_EXPR: case LT_EXPR: case LE_EXPR: case GT_EXPR: case GE_EXPR: case ORDERED_EXPR: case UNORDERED_EXPR: case UNLT_EXPR: case UNLE_EXPR: case UNGT_EXPR: case UNGE_EXPR: case UNEQ_EXPR: case LTGT_EXPR: case TRUTH_ANDIF_EXPR: case TRUTH_ORIF_EXPR: other_code: do_jump_1 (code, TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), if_false_label, if_true_label, prob); break; case BIT_AND_EXPR: /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1. See if the former is preferred for jump tests and restore it if so. */ if (integer_onep (TREE_OPERAND (exp, 1))) { tree exp0 = TREE_OPERAND (exp, 0); rtx set_label, clr_label; int setclr_prob = prob; /* Strip narrowing integral type conversions. */ while (CONVERT_EXPR_P (exp0) && TREE_OPERAND (exp0, 0) != error_mark_node && TYPE_PRECISION (TREE_TYPE (exp0)) <= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp0, 0)))) exp0 = TREE_OPERAND (exp0, 0); /* "exp0 ^ 1" inverts the sense of the single bit test. */ if (TREE_CODE (exp0) == BIT_XOR_EXPR && integer_onep (TREE_OPERAND (exp0, 1))) { exp0 = TREE_OPERAND (exp0, 0); clr_label = if_true_label; set_label = if_false_label; setclr_prob = inv (prob); } else { clr_label = if_false_label; set_label = if_true_label; } if (TREE_CODE (exp0) == RSHIFT_EXPR) { tree arg = TREE_OPERAND (exp0, 0); tree shift = TREE_OPERAND (exp0, 1); tree argtype = TREE_TYPE (arg); if (TREE_CODE (shift) == INTEGER_CST && compare_tree_int (shift, 0) >= 0 && compare_tree_int (shift, HOST_BITS_PER_WIDE_INT) < 0 && prefer_and_bit_test (TYPE_MODE (argtype), TREE_INT_CST_LOW (shift))) { unsigned HOST_WIDE_INT mask = (unsigned HOST_WIDE_INT) 1 << TREE_INT_CST_LOW (shift); do_jump (build2 (BIT_AND_EXPR, argtype, arg, build_int_cstu (argtype, mask)), clr_label, set_label, setclr_prob); break; } } } /* If we are AND'ing with a small constant, do this comparison in the smallest type that fits. If the machine doesn't have comparisons that small, it will be converted back to the wider comparison. This helps if we are testing the sign bit of a narrower object. combine can't do this for us because it can't know whether a ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */ if (! SLOW_BYTE_ACCESS && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode && (type = lang_hooks.types.type_for_mode (mode, 1)) != 0 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp)) && have_insn_for (COMPARE, TYPE_MODE (type))) { do_jump (fold_convert (type, exp), if_false_label, if_true_label, prob); break; } if (TYPE_PRECISION (TREE_TYPE (exp)) > 1 || TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST) goto normal; /* Boolean comparisons can be compiled as TRUTH_AND_EXPR. */ case TRUTH_AND_EXPR: /* High branch cost, expand as the bitwise AND of the conditions. Do the same if the RHS has side effects, because we're effectively turning a TRUTH_AND_EXPR into a TRUTH_ANDIF_EXPR. */ if (BRANCH_COST (optimize_insn_for_speed_p (), false) >= 4 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1))) goto normal; code = TRUTH_ANDIF_EXPR; goto other_code; case BIT_IOR_EXPR: case TRUTH_OR_EXPR: /* High branch cost, expand as the bitwise OR of the conditions. Do the same if the RHS has side effects, because we're effectively turning a TRUTH_OR_EXPR into a TRUTH_ORIF_EXPR. */ if (BRANCH_COST (optimize_insn_for_speed_p (), false) >= 4 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1))) goto normal; code = TRUTH_ORIF_EXPR; goto other_code; /* Fall through and generate the normal code. */ default: normal: temp = expand_normal (exp); do_pending_stack_adjust (); /* The RTL optimizers prefer comparisons against pseudos. */ if (GET_CODE (temp) == SUBREG) { /* Compare promoted variables in their promoted mode. */ if (SUBREG_PROMOTED_VAR_P (temp) && REG_P (XEXP (temp, 0))) temp = XEXP (temp, 0); else temp = copy_to_reg (temp); } do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)), NE, TYPE_UNSIGNED (TREE_TYPE (exp)), GET_MODE (temp), NULL_RTX, if_false_label, if_true_label, prob); } if (drop_through_label) { do_pending_stack_adjust (); emit_label (drop_through_label); } }
void do_compare_rtx_and_jump (rtx op0, rtx op1, enum rtx_code code, int unsignedp, enum machine_mode mode, rtx size, rtx if_false_label, rtx if_true_label) { rtx tem; int dummy_true_label = 0; /* Reverse the comparison if that is safe and we want to jump if it is false. */ if (! if_true_label && ! FLOAT_MODE_P (mode)) { if_true_label = if_false_label; if_false_label = 0; code = reverse_condition (code); } /* If one operand is constant, make it the second one. Only do this if the other operand is not constant as well. */ if (swap_commutative_operands_p (op0, op1)) { tem = op0; op0 = op1; op1 = tem; code = swap_condition (code); } do_pending_stack_adjust (); code = unsignedp ? unsigned_condition (code) : code; if (0 != (tem = simplify_relational_operation (code, mode, VOIDmode, op0, op1))) { if (CONSTANT_P (tem)) { rtx label = (tem == const0_rtx || tem == CONST0_RTX (mode)) ? if_false_label : if_true_label; if (label) emit_jump (label); return; } code = GET_CODE (tem); mode = GET_MODE (tem); op0 = XEXP (tem, 0); op1 = XEXP (tem, 1); unsignedp = (code == GTU || code == LTU || code == GEU || code == LEU); } if (! if_true_label) { dummy_true_label = 1; if_true_label = gen_label_rtx (); } if (GET_MODE_CLASS (mode) == MODE_INT && ! can_compare_p (code, mode, ccp_jump)) { switch (code) { case LTU: do_jump_by_parts_greater_rtx (mode, 1, op1, op0, if_false_label, if_true_label); break; case LEU: do_jump_by_parts_greater_rtx (mode, 1, op0, op1, if_true_label, if_false_label); break; case GTU: do_jump_by_parts_greater_rtx (mode, 1, op0, op1, if_false_label, if_true_label); break; case GEU: do_jump_by_parts_greater_rtx (mode, 1, op1, op0, if_true_label, if_false_label); break; case LT: do_jump_by_parts_greater_rtx (mode, 0, op1, op0, if_false_label, if_true_label); break; case LE: do_jump_by_parts_greater_rtx (mode, 0, op0, op1, if_true_label, if_false_label); break; case GT: do_jump_by_parts_greater_rtx (mode, 0, op0, op1, if_false_label, if_true_label); break; case GE: do_jump_by_parts_greater_rtx (mode, 0, op1, op0, if_true_label, if_false_label); break; case EQ: do_jump_by_parts_equality_rtx (mode, op0, op1, if_false_label, if_true_label); break; case NE: do_jump_by_parts_equality_rtx (mode, op0, op1, if_true_label, if_false_label); break; default: gcc_unreachable (); } } else emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, if_true_label); if (if_false_label) emit_jump (if_false_label); if (dummy_true_label) emit_label (if_true_label); }
void do_jump (tree exp, rtx if_false_label, rtx if_true_label) { enum tree_code code = TREE_CODE (exp); rtx temp; int i; tree type; enum machine_mode mode; rtx drop_through_label = 0; switch (code) { case ERROR_MARK: break; case INTEGER_CST: temp = integer_zerop (exp) ? if_false_label : if_true_label; if (temp) emit_jump (temp); break; #if 0 /* This is not true with #pragma weak */ case ADDR_EXPR: /* The address of something can never be zero. */ if (if_true_label) emit_jump (if_true_label); break; #endif case NOP_EXPR: if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF) goto normal; case CONVERT_EXPR: /* If we are narrowing the operand, we have to do the compare in the narrower mode. */ if ((TYPE_PRECISION (TREE_TYPE (exp)) < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))) goto normal; case NON_LVALUE_EXPR: case ABS_EXPR: case NEGATE_EXPR: case LROTATE_EXPR: case RROTATE_EXPR: /* These cannot change zero->nonzero or vice versa. */ do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label); break; case BIT_AND_EXPR: /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1. See if the former is preferred for jump tests and restore it if so. */ if (integer_onep (TREE_OPERAND (exp, 1))) { tree exp0 = TREE_OPERAND (exp, 0); rtx set_label, clr_label; /* Strip narrowing integral type conversions. */ while ((TREE_CODE (exp0) == NOP_EXPR || TREE_CODE (exp0) == CONVERT_EXPR || TREE_CODE (exp0) == NON_LVALUE_EXPR) && TREE_OPERAND (exp0, 0) != error_mark_node && TYPE_PRECISION (TREE_TYPE (exp0)) <= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp0, 0)))) exp0 = TREE_OPERAND (exp0, 0); /* "exp0 ^ 1" inverts the sense of the single bit test. */ if (TREE_CODE (exp0) == BIT_XOR_EXPR && integer_onep (TREE_OPERAND (exp0, 1))) { exp0 = TREE_OPERAND (exp0, 0); clr_label = if_true_label; set_label = if_false_label; } else { clr_label = if_false_label; set_label = if_true_label; } if (TREE_CODE (exp0) == RSHIFT_EXPR) { tree arg = TREE_OPERAND (exp0, 0); tree shift = TREE_OPERAND (exp0, 1); tree argtype = TREE_TYPE (arg); if (TREE_CODE (shift) == INTEGER_CST && compare_tree_int (shift, 0) >= 0 && compare_tree_int (shift, HOST_BITS_PER_WIDE_INT) < 0 && prefer_and_bit_test (TYPE_MODE (argtype), TREE_INT_CST_LOW (shift))) { HOST_WIDE_INT mask = (HOST_WIDE_INT) 1 << TREE_INT_CST_LOW (shift); do_jump (build2 (BIT_AND_EXPR, argtype, arg, build_int_cst_type (argtype, mask)), clr_label, set_label); break; } } } /* If we are AND'ing with a small constant, do this comparison in the smallest type that fits. If the machine doesn't have comparisons that small, it will be converted back to the wider comparison. This helps if we are testing the sign bit of a narrower object. combine can't do this for us because it can't know whether a ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */ if (! SLOW_BYTE_ACCESS && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode && (type = lang_hooks.types.type_for_mode (mode, 1)) != 0 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp)) && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code != CODE_FOR_nothing)) { do_jump (fold_convert (type, exp), if_false_label, if_true_label); break; } goto normal; case TRUTH_NOT_EXPR: do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label); break; case COND_EXPR: { rtx label1 = gen_label_rtx (); if (!if_true_label || !if_false_label) { drop_through_label = gen_label_rtx (); if (!if_true_label) if_true_label = drop_through_label; if (!if_false_label) if_false_label = drop_through_label; } do_pending_stack_adjust (); do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX); do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label); emit_label (label1); do_jump (TREE_OPERAND (exp, 2), if_false_label, if_true_label); break; } case TRUTH_ANDIF_EXPR: case TRUTH_ORIF_EXPR: case COMPOUND_EXPR: /* Lowered by gimplify.c. */ gcc_unreachable (); case COMPONENT_REF: case BIT_FIELD_REF: case ARRAY_REF: case ARRAY_RANGE_REF: { HOST_WIDE_INT bitsize, bitpos; int unsignedp; enum machine_mode mode; tree type; tree offset; int volatilep = 0; /* Get description of this reference. We don't actually care about the underlying object here. */ get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode, &unsignedp, &volatilep, false); type = lang_hooks.types.type_for_size (bitsize, unsignedp); if (! SLOW_BYTE_ACCESS && type != 0 && bitsize >= 0 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp)) && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code != CODE_FOR_nothing)) { do_jump (fold_convert (type, exp), if_false_label, if_true_label); break; } goto normal; } case EQ_EXPR: { tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type)) != MODE_COMPLEX_FLOAT); gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type)) != MODE_COMPLEX_INT); if (integer_zerop (TREE_OPERAND (exp, 1))) do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label); else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump)) do_jump_by_parts_equality (exp, if_false_label, if_true_label); else do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label); break; } case MINUS_EXPR: /* Nonzero iff operands of minus differ. */ exp = build2 (NE_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1)); /* FALLTHRU */ case NE_EXPR: { tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type)) != MODE_COMPLEX_FLOAT); gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type)) != MODE_COMPLEX_INT); if (integer_zerop (TREE_OPERAND (exp, 1))) do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label); else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump)) do_jump_by_parts_equality (exp, if_true_label, if_false_label); else do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label); break; } case LT_EXPR: mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); if (GET_MODE_CLASS (mode) == MODE_INT && ! can_compare_p (LT, mode, ccp_jump)) do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label); else do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label); break; case LE_EXPR: mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); if (GET_MODE_CLASS (mode) == MODE_INT && ! can_compare_p (LE, mode, ccp_jump)) do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label); else do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label); break; case GT_EXPR: mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); if (GET_MODE_CLASS (mode) == MODE_INT && ! can_compare_p (GT, mode, ccp_jump)) do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label); else do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label); break; case GE_EXPR: mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); if (GET_MODE_CLASS (mode) == MODE_INT && ! can_compare_p (GE, mode, ccp_jump)) do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label); else do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label); break; case UNORDERED_EXPR: case ORDERED_EXPR: { enum rtx_code cmp, rcmp; int do_rev; if (code == UNORDERED_EXPR) cmp = UNORDERED, rcmp = ORDERED; else cmp = ORDERED, rcmp = UNORDERED; mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); do_rev = 0; if (! can_compare_p (cmp, mode, ccp_jump) && (can_compare_p (rcmp, mode, ccp_jump) /* If the target doesn't provide either UNORDERED or ORDERED comparisons, canonicalize on UNORDERED for the library. */ || rcmp == UNORDERED)) do_rev = 1; if (! do_rev) do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label); else do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label); } break; { enum rtx_code rcode1; enum tree_code tcode1, tcode2; case UNLT_EXPR: rcode1 = UNLT; tcode1 = UNORDERED_EXPR; tcode2 = LT_EXPR; goto unordered_bcc; case UNLE_EXPR: rcode1 = UNLE; tcode1 = UNORDERED_EXPR; tcode2 = LE_EXPR; goto unordered_bcc; case UNGT_EXPR: rcode1 = UNGT; tcode1 = UNORDERED_EXPR; tcode2 = GT_EXPR; goto unordered_bcc; case UNGE_EXPR: rcode1 = UNGE; tcode1 = UNORDERED_EXPR; tcode2 = GE_EXPR; goto unordered_bcc; case UNEQ_EXPR: rcode1 = UNEQ; tcode1 = UNORDERED_EXPR; tcode2 = EQ_EXPR; goto unordered_bcc; case LTGT_EXPR: /* It is ok for LTGT_EXPR to trap when the result is unordered, so expand to (a < b) || (a > b). */ rcode1 = LTGT; tcode1 = LT_EXPR; tcode2 = GT_EXPR; goto unordered_bcc; unordered_bcc: mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); if (can_compare_p (rcode1, mode, ccp_jump)) do_compare_and_jump (exp, rcode1, rcode1, if_false_label, if_true_label); else { tree op0 = save_expr (TREE_OPERAND (exp, 0)); tree op1 = save_expr (TREE_OPERAND (exp, 1)); tree cmp0, cmp1; /* If the target doesn't support combined unordered compares, decompose into two comparisons. */ if (if_true_label == 0) drop_through_label = if_true_label = gen_label_rtx (); cmp0 = fold_build2 (tcode1, TREE_TYPE (exp), op0, op1); cmp1 = fold_build2 (tcode2, TREE_TYPE (exp), op0, op1); do_jump (cmp0, 0, if_true_label); do_jump (cmp1, if_false_label, if_true_label); } } break; case TRUTH_AND_EXPR: /* High branch cost, expand as the bitwise AND of the conditions. Do the same if the RHS has side effects, because we're effectively turning a TRUTH_AND_EXPR into a TRUTH_ANDIF_EXPR. */ if (BRANCH_COST >= 4 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1))) goto normal; if (if_false_label == NULL_RTX) { drop_through_label = gen_label_rtx (); do_jump (TREE_OPERAND (exp, 0), drop_through_label, NULL_RTX); do_jump (TREE_OPERAND (exp, 1), NULL_RTX, if_true_label); } else { do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX); do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label); } break; case TRUTH_OR_EXPR: /* High branch cost, expand as the bitwise OR of the conditions. Do the same if the RHS has side effects, because we're effectively turning a TRUTH_OR_EXPR into a TRUTH_ORIF_EXPR. */ if (BRANCH_COST >= 4 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1))) goto normal; if (if_true_label == NULL_RTX) { drop_through_label = gen_label_rtx (); do_jump (TREE_OPERAND (exp, 0), NULL_RTX, drop_through_label); do_jump (TREE_OPERAND (exp, 1), if_false_label, NULL_RTX); } else { do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label); do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label); } break; /* Special case: __builtin_expect (<test>, 0) and __builtin_expect (<test>, 1) We need to do this here, so that <test> is not converted to a SCC operation on machines that use condition code registers and COMPARE like the PowerPC, and then the jump is done based on whether the SCC operation produced a 1 or 0. */ case CALL_EXPR: /* Check for a built-in function. */ { tree fndecl = get_callee_fndecl (exp); tree arglist = TREE_OPERAND (exp, 1); if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT && arglist != NULL_TREE && TREE_CHAIN (arglist) != NULL_TREE) { rtx seq = expand_builtin_expect_jump (exp, if_false_label, if_true_label); if (seq != NULL_RTX) { emit_insn (seq); return; } } } /* Fall through and generate the normal code. */ default: normal: temp = expand_normal (exp); do_pending_stack_adjust (); /* The RTL optimizers prefer comparisons against pseudos. */ if (GET_CODE (temp) == SUBREG) { /* Compare promoted variables in their promoted mode. */ if (SUBREG_PROMOTED_VAR_P (temp) && REG_P (XEXP (temp, 0))) temp = XEXP (temp, 0); else temp = copy_to_reg (temp); } do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)), NE, TYPE_UNSIGNED (TREE_TYPE (exp)), GET_MODE (temp), NULL_RTX, if_false_label, if_true_label); } if (drop_through_label) { do_pending_stack_adjust (); emit_label (drop_through_label); } }
static void initialize_uninitialized_regs (void) { basic_block bb; bitmap already_genned = BITMAP_ALLOC (NULL); if (optimize == 1) { df_live_add_problem (); df_live_set_all_dirty (); } df_analyze (); FOR_EACH_BB_FN (bb, cfun) { rtx_insn *insn; bitmap lr = DF_LR_IN (bb); bitmap ur = DF_LIVE_IN (bb); bitmap_clear (already_genned); FOR_BB_INSNS (bb, insn) { df_ref use; if (!NONDEBUG_INSN_P (insn)) continue; FOR_EACH_INSN_USE (use, insn) { unsigned int regno = DF_REF_REGNO (use); /* Only do this for the pseudos. */ if (regno < FIRST_PSEUDO_REGISTER) continue; /* Do not generate multiple moves for the same regno. This is common for sequences of subreg operations. They would be deleted during combine but there is no reason to churn the system. */ if (bitmap_bit_p (already_genned, regno)) continue; /* A use is MUST uninitialized if it reaches the top of the block from the inside of the block (the lr test) and no def for it reaches the top of the block from outside of the block (the ur test). */ if (bitmap_bit_p (lr, regno) && (!bitmap_bit_p (ur, regno))) { rtx_insn *move_insn; rtx reg = DF_REF_REAL_REG (use); bitmap_set_bit (already_genned, regno); start_sequence (); emit_move_insn (reg, CONST0_RTX (GET_MODE (reg))); move_insn = get_insns (); end_sequence (); emit_insn_before (move_insn, insn); if (dump_file) fprintf (dump_file, "adding initialization in %s of reg %d at in block %d for insn %d.\n", current_function_name (), regno, bb->index, INSN_UID (insn)); } }
bool legitimate_constant_p (rtx x) { switch (GET_CODE (x)) { case CONST: x = XEXP (x, 0); if (GET_CODE (x) == PLUS) { if (GET_CODE (XEXP (x, 1)) != CONST_INT) return false; x = XEXP (x, 0); } /* Only some unspecs are valid as "constants". */ if (GET_CODE (x) == UNSPEC) switch (XINT (x, 1)) { case UNSPEC_GOTOFF: return false; case UNSPEC_TPOFF: case UNSPEC_NTPOFF: x = XVECEXP (x, 0, 0); return false; case UNSPEC_DTPOFF: x = XVECEXP (x, 0, 0); return false; default: return false; } /* We must have drilled down to a symbol. */ if (GET_CODE (x) == LABEL_REF) return true; if (GET_CODE (x) != SYMBOL_REF) return false; /* FALLTHRU */ case SYMBOL_REF: break; case PLUS: { rtx left = XEXP (x, 0); rtx right = XEXP (x, 1); bool left_is_constant = legitimate_constant_p (left); bool right_is_constant = legitimate_constant_p (right); return left_is_constant && right_is_constant; } break; /* APPLE LOCAL end dynamic-no-pic */ case CONST_DOUBLE: if (GET_MODE (x) == TImode && x != CONST0_RTX (TImode)) return false; break; case CONST_VECTOR: return false; default: break; } /* Otherwise we handle everything else in the move patterns. */ return true; }
void do_jump (tree exp, rtx if_false_label, rtx if_true_label) { enum tree_code code = TREE_CODE (exp); /* Some cases need to create a label to jump to in order to properly fall through. These cases set DROP_THROUGH_LABEL nonzero. */ rtx drop_through_label = 0; rtx temp; int i; tree type; enum machine_mode mode; emit_queue (); switch (code) { case ERROR_MARK: break; case INTEGER_CST: temp = integer_zerop (exp) ? if_false_label : if_true_label; if (temp) emit_jump (temp); break; #if 0 /* This is not true with #pragma weak */ case ADDR_EXPR: /* The address of something can never be zero. */ if (if_true_label) emit_jump (if_true_label); break; #endif case UNSAVE_EXPR: do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label); TREE_OPERAND (exp, 0) = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0)); break; case NOP_EXPR: if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF) goto normal; case CONVERT_EXPR: /* If we are narrowing the operand, we have to do the compare in the narrower mode. */ if ((TYPE_PRECISION (TREE_TYPE (exp)) < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))) goto normal; case NON_LVALUE_EXPR: case REFERENCE_EXPR: case ABS_EXPR: case NEGATE_EXPR: case LROTATE_EXPR: case RROTATE_EXPR: /* These cannot change zero->nonzero or vice versa. */ do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label); break; case WITH_RECORD_EXPR: /* Put the object on the placeholder list, recurse through our first operand, and pop the list. */ placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE, placeholder_list); do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label); placeholder_list = TREE_CHAIN (placeholder_list); break; #if 0 /* This is never less insns than evaluating the PLUS_EXPR followed by a test and can be longer if the test is eliminated. */ case PLUS_EXPR: /* Reduce to minus. */ exp = build (MINUS_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0), fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1)))); /* Process as MINUS. */ #endif case MINUS_EXPR: /* Nonzero iff operands of minus differ. */ do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1)), NE, NE, if_false_label, if_true_label); break; case BIT_AND_EXPR: /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1. See if the former is preferred for jump tests and restore it if so. */ if (integer_onep (TREE_OPERAND (exp, 1))) { tree exp0 = TREE_OPERAND (exp, 0); rtx set_label, clr_label; /* Strip narrowing integral type conversions. */ while ((TREE_CODE (exp0) == NOP_EXPR || TREE_CODE (exp0) == CONVERT_EXPR || TREE_CODE (exp0) == NON_LVALUE_EXPR) && TREE_OPERAND (exp0, 0) != error_mark_node && TYPE_PRECISION (TREE_TYPE (exp0)) <= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp0, 0)))) exp0 = TREE_OPERAND (exp0, 0); /* "exp0 ^ 1" inverts the sense of the single bit test. */ if (TREE_CODE (exp0) == BIT_XOR_EXPR && integer_onep (TREE_OPERAND (exp0, 1))) { exp0 = TREE_OPERAND (exp0, 0); clr_label = if_true_label; set_label = if_false_label; } else { clr_label = if_false_label; set_label = if_true_label; } if (TREE_CODE (exp0) == RSHIFT_EXPR) { tree arg = TREE_OPERAND (exp0, 0); tree shift = TREE_OPERAND (exp0, 1); tree argtype = TREE_TYPE (arg); if (TREE_CODE (shift) == INTEGER_CST && compare_tree_int (shift, 0) >= 0 && compare_tree_int (shift, HOST_BITS_PER_WIDE_INT) < 0 && prefer_and_bit_test (TYPE_MODE (argtype), TREE_INT_CST_LOW (shift))) { HOST_WIDE_INT mask = (HOST_WIDE_INT) 1 << TREE_INT_CST_LOW (shift); tree t = build_int_2 (mask, 0); TREE_TYPE (t) = argtype; do_jump (build (BIT_AND_EXPR, argtype, arg, t), clr_label, set_label); break; } } } /* If we are AND'ing with a small constant, do this comparison in the smallest type that fits. If the machine doesn't have comparisons that small, it will be converted back to the wider comparison. This helps if we are testing the sign bit of a narrower object. combine can't do this for us because it can't know whether a ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */ if (! SLOW_BYTE_ACCESS && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode && (type = (*lang_hooks.types.type_for_mode) (mode, 1)) != 0 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp)) && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code != CODE_FOR_nothing)) { do_jump (convert (type, exp), if_false_label, if_true_label); break; } goto normal; case TRUTH_NOT_EXPR: do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label); break; case TRUTH_ANDIF_EXPR: if (if_false_label == 0) if_false_label = drop_through_label = gen_label_rtx (); do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX); start_cleanup_deferral (); do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label); end_cleanup_deferral (); break; case TRUTH_ORIF_EXPR: if (if_true_label == 0) if_true_label = drop_through_label = gen_label_rtx (); do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label); start_cleanup_deferral (); do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label); end_cleanup_deferral (); break; case COMPOUND_EXPR: push_temp_slots (); expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0); preserve_temp_slots (NULL_RTX); free_temp_slots (); pop_temp_slots (); emit_queue (); do_pending_stack_adjust (); do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label); break; case COMPONENT_REF: case BIT_FIELD_REF: case ARRAY_REF: case ARRAY_RANGE_REF: { HOST_WIDE_INT bitsize, bitpos; int unsignedp; enum machine_mode mode; tree type; tree offset; int volatilep = 0; /* Get description of this reference. We don't actually care about the underlying object here. */ get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode, &unsignedp, &volatilep); type = (*lang_hooks.types.type_for_size) (bitsize, unsignedp); if (! SLOW_BYTE_ACCESS && type != 0 && bitsize >= 0 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp)) && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code != CODE_FOR_nothing)) { do_jump (convert (type, exp), if_false_label, if_true_label); break; } goto normal; } case COND_EXPR: /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */ if (integer_onep (TREE_OPERAND (exp, 1)) && integer_zerop (TREE_OPERAND (exp, 2))) do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label); else if (integer_zerop (TREE_OPERAND (exp, 1)) && integer_onep (TREE_OPERAND (exp, 2))) do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label); else { rtx label1 = gen_label_rtx (); drop_through_label = gen_label_rtx (); do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX); start_cleanup_deferral (); /* Now the THEN-expression. */ do_jump (TREE_OPERAND (exp, 1), if_false_label ? if_false_label : drop_through_label, if_true_label ? if_true_label : drop_through_label); /* In case the do_jump just above never jumps. */ do_pending_stack_adjust (); emit_label (label1); /* Now the ELSE-expression. */ do_jump (TREE_OPERAND (exp, 2), if_false_label ? if_false_label : drop_through_label, if_true_label ? if_true_label : drop_through_label); end_cleanup_deferral (); } break; case EQ_EXPR: { tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT) { tree exp0 = save_expr (TREE_OPERAND (exp, 0)); tree exp1 = save_expr (TREE_OPERAND (exp, 1)); do_jump (fold (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp), fold (build (EQ_EXPR, TREE_TYPE (exp), fold (build1 (REALPART_EXPR, TREE_TYPE (inner_type), exp0)), fold (build1 (REALPART_EXPR, TREE_TYPE (inner_type), exp1)))), fold (build (EQ_EXPR, TREE_TYPE (exp), fold (build1 (IMAGPART_EXPR, TREE_TYPE (inner_type), exp0)), fold (build1 (IMAGPART_EXPR, TREE_TYPE (inner_type), exp1)))))), if_false_label, if_true_label); } else if (integer_zerop (TREE_OPERAND (exp, 1))) do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label); else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump)) do_jump_by_parts_equality (exp, if_false_label, if_true_label); else do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label); break; } case NE_EXPR: { tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT) { tree exp0 = save_expr (TREE_OPERAND (exp, 0)); tree exp1 = save_expr (TREE_OPERAND (exp, 1)); do_jump (fold (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), fold (build (NE_EXPR, TREE_TYPE (exp), fold (build1 (REALPART_EXPR, TREE_TYPE (inner_type), exp0)), fold (build1 (REALPART_EXPR, TREE_TYPE (inner_type), exp1)))), fold (build (NE_EXPR, TREE_TYPE (exp), fold (build1 (IMAGPART_EXPR, TREE_TYPE (inner_type), exp0)), fold (build1 (IMAGPART_EXPR, TREE_TYPE (inner_type), exp1)))))), if_false_label, if_true_label); } else if (integer_zerop (TREE_OPERAND (exp, 1))) do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label); else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump)) do_jump_by_parts_equality (exp, if_true_label, if_false_label); else do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label); break; } case LT_EXPR: mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); if (GET_MODE_CLASS (mode) == MODE_INT && ! can_compare_p (LT, mode, ccp_jump)) do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label); else do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label); break; case LE_EXPR: mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); if (GET_MODE_CLASS (mode) == MODE_INT && ! can_compare_p (LE, mode, ccp_jump)) do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label); else do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label); break; case GT_EXPR: mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); if (GET_MODE_CLASS (mode) == MODE_INT && ! can_compare_p (GT, mode, ccp_jump)) do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label); else do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label); break; case GE_EXPR: mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); if (GET_MODE_CLASS (mode) == MODE_INT && ! can_compare_p (GE, mode, ccp_jump)) do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label); else do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label); break; case UNORDERED_EXPR: case ORDERED_EXPR: { enum rtx_code cmp, rcmp; int do_rev; if (code == UNORDERED_EXPR) cmp = UNORDERED, rcmp = ORDERED; else cmp = ORDERED, rcmp = UNORDERED; mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); do_rev = 0; if (! can_compare_p (cmp, mode, ccp_jump) && (can_compare_p (rcmp, mode, ccp_jump) /* If the target doesn't provide either UNORDERED or ORDERED comparisons, canonicalize on UNORDERED for the library. */ || rcmp == UNORDERED)) do_rev = 1; if (! do_rev) do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label); else do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label); } break; { enum rtx_code rcode1; enum tree_code tcode2; case UNLT_EXPR: rcode1 = UNLT; tcode2 = LT_EXPR; goto unordered_bcc; case UNLE_EXPR: rcode1 = UNLE; tcode2 = LE_EXPR; goto unordered_bcc; case UNGT_EXPR: rcode1 = UNGT; tcode2 = GT_EXPR; goto unordered_bcc; case UNGE_EXPR: rcode1 = UNGE; tcode2 = GE_EXPR; goto unordered_bcc; case UNEQ_EXPR: rcode1 = UNEQ; tcode2 = EQ_EXPR; goto unordered_bcc; unordered_bcc: mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); if (can_compare_p (rcode1, mode, ccp_jump)) do_compare_and_jump (exp, rcode1, rcode1, if_false_label, if_true_label); else { tree op0 = save_expr (TREE_OPERAND (exp, 0)); tree op1 = save_expr (TREE_OPERAND (exp, 1)); tree cmp0, cmp1; /* If the target doesn't support combined unordered compares, decompose into UNORDERED + comparison. */ cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1)); cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1)); exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1); do_jump (exp, if_false_label, if_true_label); } } break; /* Special case: __builtin_expect (<test>, 0) and __builtin_expect (<test>, 1) We need to do this here, so that <test> is not converted to a SCC operation on machines that use condition code registers and COMPARE like the PowerPC, and then the jump is done based on whether the SCC operation produced a 1 or 0. */ case CALL_EXPR: /* Check for a built-in function. */ { tree fndecl = get_callee_fndecl (exp); tree arglist = TREE_OPERAND (exp, 1); if (fndecl && DECL_BUILT_IN (fndecl) && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT && arglist != NULL_TREE && TREE_CHAIN (arglist) != NULL_TREE) { rtx seq = expand_builtin_expect_jump (exp, if_false_label, if_true_label); if (seq != NULL_RTX) { emit_insn (seq); return; } } } /* Fall through and generate the normal code. */ default: normal: temp = expand_expr (exp, NULL_RTX, VOIDmode, 0); #if 0 /* This is not needed any more and causes poor code since it causes comparisons and tests from non-SI objects to have different code sequences. */ /* Copy to register to avoid generating bad insns by cse from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */ if (!cse_not_expected && GET_CODE (temp) == MEM) temp = copy_to_reg (temp); #endif do_pending_stack_adjust (); /* Do any postincrements in the expression that was tested. */ emit_queue (); if (GET_CODE (temp) == CONST_INT || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode) || GET_CODE (temp) == LABEL_REF) { rtx target = temp == const0_rtx ? if_false_label : if_true_label; if (target) emit_jump (target); } else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT && ! can_compare_p (NE, GET_MODE (temp), ccp_jump)) /* Note swapping the labels gives us not-equal. */ do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label); else if (GET_MODE (temp) != VOIDmode) { /* The RTL optimizers prefer comparisons against pseudos. */ if (GET_CODE (temp) == SUBREG) { /* Compare promoted variables in their promoted mode. */ if (SUBREG_PROMOTED_VAR_P (temp) && GET_CODE (XEXP (temp, 0)) == REG) temp = XEXP (temp, 0); else temp = copy_to_reg (temp); } do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)), NE, TREE_UNSIGNED (TREE_TYPE (exp)), GET_MODE (temp), NULL_RTX, if_false_label, if_true_label); } else abort (); } if (drop_through_label) { /* If do_jump produces code that might be jumped around, do any stack adjusts from that code, before the place where control merges in. */ do_pending_stack_adjust (); emit_label (drop_through_label); } }
void do_compare_rtx_and_jump (rtx op0, rtx op1, enum rtx_code code, int unsignedp, machine_mode mode, rtx size, rtx_code_label *if_false_label, rtx_code_label *if_true_label, int prob) { rtx tem; rtx_code_label *dummy_label = NULL; /* Reverse the comparison if that is safe and we want to jump if it is false. Also convert to the reverse comparison if the target can implement it. */ if ((! if_true_label || ! can_compare_p (code, mode, ccp_jump)) && (! FLOAT_MODE_P (mode) || code == ORDERED || code == UNORDERED || (! HONOR_NANS (mode) && (code == LTGT || code == UNEQ)) || (! HONOR_SNANS (mode) && (code == EQ || code == NE)))) { enum rtx_code rcode; if (FLOAT_MODE_P (mode)) rcode = reverse_condition_maybe_unordered (code); else rcode = reverse_condition (code); /* Canonicalize to UNORDERED for the libcall. */ if (can_compare_p (rcode, mode, ccp_jump) || (code == ORDERED && ! can_compare_p (ORDERED, mode, ccp_jump))) { std::swap (if_true_label, if_false_label); code = rcode; prob = inv (prob); } } /* If one operand is constant, make it the second one. Only do this if the other operand is not constant as well. */ if (swap_commutative_operands_p (op0, op1)) { std::swap (op0, op1); code = swap_condition (code); } do_pending_stack_adjust (); code = unsignedp ? unsigned_condition (code) : code; if (0 != (tem = simplify_relational_operation (code, mode, VOIDmode, op0, op1))) { if (CONSTANT_P (tem)) { rtx_code_label *label = (tem == const0_rtx || tem == CONST0_RTX (mode)) ? if_false_label : if_true_label; if (label) emit_jump (label); return; } code = GET_CODE (tem); mode = GET_MODE (tem); op0 = XEXP (tem, 0); op1 = XEXP (tem, 1); unsignedp = (code == GTU || code == LTU || code == GEU || code == LEU); } if (! if_true_label) dummy_label = if_true_label = gen_label_rtx (); if (GET_MODE_CLASS (mode) == MODE_INT && ! can_compare_p (code, mode, ccp_jump)) { switch (code) { case LTU: do_jump_by_parts_greater_rtx (mode, 1, op1, op0, if_false_label, if_true_label, prob); break; case LEU: do_jump_by_parts_greater_rtx (mode, 1, op0, op1, if_true_label, if_false_label, inv (prob)); break; case GTU: do_jump_by_parts_greater_rtx (mode, 1, op0, op1, if_false_label, if_true_label, prob); break; case GEU: do_jump_by_parts_greater_rtx (mode, 1, op1, op0, if_true_label, if_false_label, inv (prob)); break; case LT: do_jump_by_parts_greater_rtx (mode, 0, op1, op0, if_false_label, if_true_label, prob); break; case LE: do_jump_by_parts_greater_rtx (mode, 0, op0, op1, if_true_label, if_false_label, inv (prob)); break; case GT: do_jump_by_parts_greater_rtx (mode, 0, op0, op1, if_false_label, if_true_label, prob); break; case GE: do_jump_by_parts_greater_rtx (mode, 0, op1, op0, if_true_label, if_false_label, inv (prob)); break; case EQ: do_jump_by_parts_equality_rtx (mode, op0, op1, if_false_label, if_true_label, prob); break; case NE: do_jump_by_parts_equality_rtx (mode, op0, op1, if_true_label, if_false_label, inv (prob)); break; default: gcc_unreachable (); } } else { if (SCALAR_FLOAT_MODE_P (mode) && ! can_compare_p (code, mode, ccp_jump) && can_compare_p (swap_condition (code), mode, ccp_jump)) { code = swap_condition (code); std::swap (op0, op1); } else if (SCALAR_FLOAT_MODE_P (mode) && ! can_compare_p (code, mode, ccp_jump) /* Never split ORDERED and UNORDERED. These must be implemented. */ && (code != ORDERED && code != UNORDERED) /* Split a floating-point comparison if we can jump on other conditions... */ && (have_insn_for (COMPARE, mode) /* ... or if there is no libcall for it. */ || code_to_optab (code) == unknown_optab)) { enum rtx_code first_code; bool and_them = split_comparison (code, mode, &first_code, &code); /* If there are no NaNs, the first comparison should always fall through. */ if (!HONOR_NANS (mode)) gcc_assert (first_code == (and_them ? ORDERED : UNORDERED)); else { int first_prob = prob; if (first_code == UNORDERED) first_prob = REG_BR_PROB_BASE / 100; else if (first_code == ORDERED) first_prob = REG_BR_PROB_BASE - REG_BR_PROB_BASE / 100; if (and_them) { rtx_code_label *dest_label; /* If we only jump if true, just bypass the second jump. */ if (! if_false_label) { if (! dummy_label) dummy_label = gen_label_rtx (); dest_label = dummy_label; } else dest_label = if_false_label; do_compare_rtx_and_jump (op0, op1, first_code, unsignedp, mode, size, dest_label, NULL, first_prob); } else do_compare_rtx_and_jump (op0, op1, first_code, unsignedp, mode, size, NULL, if_true_label, first_prob); } } emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, if_true_label, prob); } if (if_false_label) emit_jump (if_false_label); if (dummy_label) emit_label (dummy_label); }
void do_compare_rtx_and_jump (rtx op0, rtx op1, enum rtx_code code, int unsignedp, enum machine_mode mode, rtx size, rtx if_false_label, rtx if_true_label) { rtx tem; int dummy_true_label = 0; /* Reverse the comparison if that is safe and we want to jump if it is false. */ if (! if_true_label && ! FLOAT_MODE_P (mode)) { if_true_label = if_false_label; if_false_label = 0; code = reverse_condition (code); } /* If one operand is constant, make it the second one. Only do this if the other operand is not constant as well. */ if (swap_commutative_operands_p (op0, op1)) { tem = op0; op0 = op1; op1 = tem; code = swap_condition (code); } do_pending_stack_adjust (); code = unsignedp ? unsigned_condition (code) : code; if (0 != (tem = simplify_relational_operation (code, mode, VOIDmode, op0, op1))) { if (CONSTANT_P (tem)) { rtx label = (tem == const0_rtx || tem == CONST0_RTX (mode)) ? if_false_label : if_true_label; if (label) emit_jump (label); return; } code = GET_CODE (tem); mode = GET_MODE (tem); op0 = XEXP (tem, 0); op1 = XEXP (tem, 1); unsignedp = (code == GTU || code == LTU || code == GEU || code == LEU); } if (! if_true_label) { dummy_true_label = 1; if_true_label = gen_label_rtx (); } emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, if_true_label); if (if_false_label) emit_jump (if_false_label); if (dummy_true_label) emit_label (if_true_label); }