Пример #1
0
static bool
expand_switch_using_bit_tests_p (tree range,
				 unsigned int uniq,
				 unsigned int count, bool speed_p)
{
  return (((uniq == 1 && count >= 3)
	   || (uniq == 2 && count >= 5)
	   || (uniq == 3 && count >= 6))
	  && lshift_cheap_p (speed_p)
	  && compare_tree_int (range, GET_MODE_BITSIZE (word_mode)) < 0
	  && compare_tree_int (range, 0) > 0);
}
Пример #2
0
static void
emit_case_bit_tests (gswitch *swtch, tree index_expr,
		     tree minval, tree range, tree maxval)
{
  struct case_bit_test test[MAX_CASE_BIT_TESTS];
  unsigned int i, j, k;
  unsigned int count;

  basic_block switch_bb = gimple_bb (swtch);
  basic_block default_bb, new_default_bb, new_bb;
  edge default_edge;
  bool update_dom = dom_info_available_p (CDI_DOMINATORS);

  vec<basic_block> bbs_to_fix_dom = vNULL;

  tree index_type = TREE_TYPE (index_expr);
  tree unsigned_index_type = unsigned_type_for (index_type);
  unsigned int branch_num = gimple_switch_num_labels (swtch);

  gimple_stmt_iterator gsi;
  gassign *shift_stmt;

  tree idx, tmp, csui;
  tree word_type_node = lang_hooks.types.type_for_mode (word_mode, 1);
  tree word_mode_zero = fold_convert (word_type_node, integer_zero_node);
  tree word_mode_one = fold_convert (word_type_node, integer_one_node);
  int prec = TYPE_PRECISION (word_type_node);
  wide_int wone = wi::one (prec);

  memset (&test, 0, sizeof (test));

  /* Get the edge for the default case.  */
  tmp = gimple_switch_default_label (swtch);
  default_bb = label_to_block (CASE_LABEL (tmp));
  default_edge = find_edge (switch_bb, default_bb);

  /* Go through all case labels, and collect the case labels, profile
     counts, and other information we need to build the branch tests.  */
  count = 0;
  for (i = 1; i < branch_num; i++)
    {
      unsigned int lo, hi;
      tree cs = gimple_switch_label (swtch, i);
      tree label = CASE_LABEL (cs);
      edge e = find_edge (switch_bb, label_to_block (label));
      for (k = 0; k < count; k++)
	if (e == test[k].target_edge)
	  break;

      if (k == count)
	{
	  gcc_checking_assert (count < MAX_CASE_BIT_TESTS);
	  test[k].mask = wi::zero (prec);
	  test[k].target_edge = e;
	  test[k].label = label;
	  test[k].bits = 1;
	  count++;
	}
      else
        test[k].bits++;

      lo = tree_to_uhwi (int_const_binop (MINUS_EXPR,
					  CASE_LOW (cs), minval));
      if (CASE_HIGH (cs) == NULL_TREE)
	hi = lo;
      else
	hi = tree_to_uhwi (int_const_binop (MINUS_EXPR,
					    CASE_HIGH (cs), minval));

      for (j = lo; j <= hi; j++)
	test[k].mask |= wi::lshift (wone, j);
    }

  qsort (test, count, sizeof (*test), case_bit_test_cmp);

  /* If all values are in the 0 .. BITS_PER_WORD-1 range, we can get rid of
     the minval subtractions, but it might make the mask constants more
     expensive.  So, compare the costs.  */
  if (compare_tree_int (minval, 0) > 0
      && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0)
    {
      int cost_diff;
      HOST_WIDE_INT m = tree_to_uhwi (minval);
      rtx reg = gen_raw_REG (word_mode, 10000);
      bool speed_p = optimize_bb_for_speed_p (gimple_bb (swtch));
      cost_diff = set_rtx_cost (gen_rtx_PLUS (word_mode, reg,
					      GEN_INT (-m)), speed_p);
      for (i = 0; i < count; i++)
	{
	  rtx r = immed_wide_int_const (test[i].mask, word_mode);
	  cost_diff += set_src_cost (gen_rtx_AND (word_mode, reg, r),
				     word_mode, speed_p);
	  r = immed_wide_int_const (wi::lshift (test[i].mask, m), word_mode);
	  cost_diff -= set_src_cost (gen_rtx_AND (word_mode, reg, r),
				     word_mode, speed_p);
	}
      if (cost_diff > 0)
	{
	  for (i = 0; i < count; i++)
	    test[i].mask = wi::lshift (test[i].mask, m);
	  minval = build_zero_cst (TREE_TYPE (minval));
	  range = maxval;
	}
    }

  /* We generate two jumps to the default case label.
     Split the default edge, so that we don't have to do any PHI node
     updating.  */
  new_default_bb = split_edge (default_edge);

  if (update_dom)
    {
      bbs_to_fix_dom.create (10);
      bbs_to_fix_dom.quick_push (switch_bb);
      bbs_to_fix_dom.quick_push (default_bb);
      bbs_to_fix_dom.quick_push (new_default_bb);
    }

  /* Now build the test-and-branch code.  */

  gsi = gsi_last_bb (switch_bb);

  /* idx = (unsigned)x - minval.  */
  idx = fold_convert (unsigned_index_type, index_expr);
  idx = fold_build2 (MINUS_EXPR, unsigned_index_type, idx,
		     fold_convert (unsigned_index_type, minval));
  idx = force_gimple_operand_gsi (&gsi, idx,
				  /*simple=*/true, NULL_TREE,
				  /*before=*/true, GSI_SAME_STMT);

  /* if (idx > range) goto default */
  range = force_gimple_operand_gsi (&gsi,
				    fold_convert (unsigned_index_type, range),
				    /*simple=*/true, NULL_TREE,
				    /*before=*/true, GSI_SAME_STMT);
  tmp = fold_build2 (GT_EXPR, boolean_type_node, idx, range);
  new_bb = hoist_edge_and_branch_if_true (&gsi, tmp, default_edge, update_dom);
  if (update_dom)
    bbs_to_fix_dom.quick_push (new_bb);
  gcc_assert (gimple_bb (swtch) == new_bb);
  gsi = gsi_last_bb (new_bb);

  /* Any blocks dominated by the GIMPLE_SWITCH, but that are not successors
     of NEW_BB, are still immediately dominated by SWITCH_BB.  Make it so.  */
  if (update_dom)
    {
      vec<basic_block> dom_bbs;
      basic_block dom_son;

      dom_bbs = get_dominated_by (CDI_DOMINATORS, new_bb);
      FOR_EACH_VEC_ELT (dom_bbs, i, dom_son)
	{
	  edge e = find_edge (new_bb, dom_son);
	  if (e && single_pred_p (e->dest))
	    continue;
	  set_immediate_dominator (CDI_DOMINATORS, dom_son, switch_bb);
	  bbs_to_fix_dom.safe_push (dom_son);
	}
      dom_bbs.release ();
    }
Пример #3
0
void
do_jump (tree exp, rtx if_false_label, rtx if_true_label, int prob)
{
  enum tree_code code = TREE_CODE (exp);
  rtx temp;
  int i;
  tree type;
  enum machine_mode mode;
  rtx drop_through_label = 0;

  switch (code)
    {
    case ERROR_MARK:
      break;

    case INTEGER_CST:
      temp = integer_zerop (exp) ? if_false_label : if_true_label;
      if (temp)
        emit_jump (temp);
      break;

#if 0
      /* This is not true with #pragma weak  */
    case ADDR_EXPR:
      /* The address of something can never be zero.  */
      if (if_true_label)
        emit_jump (if_true_label);
      break;
#endif

    case NOP_EXPR:
      if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
          || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
          || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
          || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
        goto normal;
    case CONVERT_EXPR:
      /* If we are narrowing the operand, we have to do the compare in the
         narrower mode.  */
      if ((TYPE_PRECISION (TREE_TYPE (exp))
           < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
        goto normal;
    case NON_LVALUE_EXPR:
    case ABS_EXPR:
    case NEGATE_EXPR:
    case LROTATE_EXPR:
    case RROTATE_EXPR:
      /* These cannot change zero->nonzero or vice versa.  */
      do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label, prob);
      break;

    case TRUTH_NOT_EXPR:
      do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label,
	       inv (prob));
      break;

    case COND_EXPR:
      {
	rtx label1 = gen_label_rtx ();
	if (!if_true_label || !if_false_label)
	  {
	    drop_through_label = gen_label_rtx ();
	    if (!if_true_label)
	      if_true_label = drop_through_label;
	    if (!if_false_label)
	      if_false_label = drop_through_label;
	  }

        do_pending_stack_adjust ();
	do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX, -1);
	do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label, prob);
        emit_label (label1);
	do_jump (TREE_OPERAND (exp, 2), if_false_label, if_true_label, prob);
	break;
      }

    case COMPOUND_EXPR:
      /* Lowered by gimplify.c.  */
      gcc_unreachable ();

    case COMPONENT_REF:
    case BIT_FIELD_REF:
    case ARRAY_REF:
    case ARRAY_RANGE_REF:
      {
        HOST_WIDE_INT bitsize, bitpos;
        int unsignedp;
        enum machine_mode mode;
        tree type;
        tree offset;
        int volatilep = 0;

        /* Get description of this reference.  We don't actually care
           about the underlying object here.  */
        get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
                             &unsignedp, &volatilep, false);

        type = lang_hooks.types.type_for_size (bitsize, unsignedp);
        if (! SLOW_BYTE_ACCESS
            && type != 0 && bitsize >= 0
            && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
            && have_insn_for (COMPARE, TYPE_MODE (type)))
          {
	    do_jump (fold_convert (type, exp), if_false_label, if_true_label,
		     prob);
            break;
          }
        goto normal;
      }

    case MINUS_EXPR:
      /* Nonzero iff operands of minus differ.  */
      code = NE_EXPR;

      /* FALLTHRU */
    case EQ_EXPR:
    case NE_EXPR:
    case LT_EXPR:
    case LE_EXPR:
    case GT_EXPR:
    case GE_EXPR:
    case ORDERED_EXPR:
    case UNORDERED_EXPR:
    case UNLT_EXPR:
    case UNLE_EXPR:
    case UNGT_EXPR:
    case UNGE_EXPR:
    case UNEQ_EXPR:
    case LTGT_EXPR:
    case TRUTH_ANDIF_EXPR:
    case TRUTH_ORIF_EXPR:
    other_code:
      do_jump_1 (code, TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
		 if_false_label, if_true_label, prob);
      break;

    case BIT_AND_EXPR:
      /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1.
	 See if the former is preferred for jump tests and restore it
	 if so.  */
      if (integer_onep (TREE_OPERAND (exp, 1)))
	{
	  tree exp0 = TREE_OPERAND (exp, 0);
	  rtx set_label, clr_label;
	  int setclr_prob = prob;

	  /* Strip narrowing integral type conversions.  */
	  while (CONVERT_EXPR_P (exp0)
		 && TREE_OPERAND (exp0, 0) != error_mark_node
		 && TYPE_PRECISION (TREE_TYPE (exp0))
		    <= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp0, 0))))
	    exp0 = TREE_OPERAND (exp0, 0);

	  /* "exp0 ^ 1" inverts the sense of the single bit test.  */
	  if (TREE_CODE (exp0) == BIT_XOR_EXPR
	      && integer_onep (TREE_OPERAND (exp0, 1)))
	    {
	      exp0 = TREE_OPERAND (exp0, 0);
	      clr_label = if_true_label;
	      set_label = if_false_label;
	      setclr_prob = inv (prob);
	    }
	  else
	    {
	      clr_label = if_false_label;
	      set_label = if_true_label;
	    }

	  if (TREE_CODE (exp0) == RSHIFT_EXPR)
	    {
	      tree arg = TREE_OPERAND (exp0, 0);
	      tree shift = TREE_OPERAND (exp0, 1);
	      tree argtype = TREE_TYPE (arg);
	      if (TREE_CODE (shift) == INTEGER_CST
		  && compare_tree_int (shift, 0) >= 0
		  && compare_tree_int (shift, HOST_BITS_PER_WIDE_INT) < 0
		  && prefer_and_bit_test (TYPE_MODE (argtype),
					  TREE_INT_CST_LOW (shift)))
		{
		  unsigned HOST_WIDE_INT mask
		    = (unsigned HOST_WIDE_INT) 1 << TREE_INT_CST_LOW (shift);
		  do_jump (build2 (BIT_AND_EXPR, argtype, arg,
				   build_int_cstu (argtype, mask)),
			   clr_label, set_label, setclr_prob);
		  break;
		}
	    }
	}

      /* If we are AND'ing with a small constant, do this comparison in the
         smallest type that fits.  If the machine doesn't have comparisons
         that small, it will be converted back to the wider comparison.
         This helps if we are testing the sign bit of a narrower object.
         combine can't do this for us because it can't know whether a
         ZERO_EXTRACT or a compare in a smaller mode exists, but we do.  */

      if (! SLOW_BYTE_ACCESS
          && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
          && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
          && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
          && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
          && (type = lang_hooks.types.type_for_mode (mode, 1)) != 0
          && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
          && have_insn_for (COMPARE, TYPE_MODE (type)))
        {
	  do_jump (fold_convert (type, exp), if_false_label, if_true_label,
		   prob);
          break;
        }

      if (TYPE_PRECISION (TREE_TYPE (exp)) > 1
	  || TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
	goto normal;

      /* Boolean comparisons can be compiled as TRUTH_AND_EXPR.  */

    case TRUTH_AND_EXPR:
      /* High branch cost, expand as the bitwise AND of the conditions.
	 Do the same if the RHS has side effects, because we're effectively
	 turning a TRUTH_AND_EXPR into a TRUTH_ANDIF_EXPR.  */
      if (BRANCH_COST (optimize_insn_for_speed_p (),
		       false) >= 4
	  || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
	goto normal;
      code = TRUTH_ANDIF_EXPR;
      goto other_code;

    case BIT_IOR_EXPR:
    case TRUTH_OR_EXPR:
      /* High branch cost, expand as the bitwise OR of the conditions.
	 Do the same if the RHS has side effects, because we're effectively
	 turning a TRUTH_OR_EXPR into a TRUTH_ORIF_EXPR.  */
      if (BRANCH_COST (optimize_insn_for_speed_p (), false) >= 4
	  || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
	goto normal;
      code = TRUTH_ORIF_EXPR;
      goto other_code;

      /* Fall through and generate the normal code.  */
    default:
    normal:
      temp = expand_normal (exp);
      do_pending_stack_adjust ();
      /* The RTL optimizers prefer comparisons against pseudos.  */
      if (GET_CODE (temp) == SUBREG)
	{
	  /* Compare promoted variables in their promoted mode.  */
	  if (SUBREG_PROMOTED_VAR_P (temp)
	      && REG_P (XEXP (temp, 0)))
	    temp = XEXP (temp, 0);
	  else
	    temp = copy_to_reg (temp);
	}
      do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
			       NE, TYPE_UNSIGNED (TREE_TYPE (exp)),
			       GET_MODE (temp), NULL_RTX,
			       if_false_label, if_true_label, prob);
    }

  if (drop_through_label)
    {
      do_pending_stack_adjust ();
      emit_label (drop_through_label);
    }
}
Пример #4
0
void
do_jump (tree exp, rtx if_false_label, rtx if_true_label)
{
  enum tree_code code = TREE_CODE (exp);
  rtx temp;
  int i;
  tree type;
  enum machine_mode mode;
  rtx drop_through_label = 0;

  switch (code)
    {
    case ERROR_MARK:
      break;

    case INTEGER_CST:
      temp = integer_zerop (exp) ? if_false_label : if_true_label;
      if (temp)
        emit_jump (temp);
      break;

#if 0
      /* This is not true with #pragma weak  */
    case ADDR_EXPR:
      /* The address of something can never be zero.  */
      if (if_true_label)
        emit_jump (if_true_label);
      break;
#endif

    case NOP_EXPR:
      if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
          || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
          || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
          || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
        goto normal;
    case CONVERT_EXPR:
      /* If we are narrowing the operand, we have to do the compare in the
         narrower mode.  */
      if ((TYPE_PRECISION (TREE_TYPE (exp))
           < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
        goto normal;
    case NON_LVALUE_EXPR:
    case ABS_EXPR:
    case NEGATE_EXPR:
    case LROTATE_EXPR:
    case RROTATE_EXPR:
      /* These cannot change zero->nonzero or vice versa.  */
      do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
      break;

    case BIT_AND_EXPR:
      /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1.
         See if the former is preferred for jump tests and restore it
         if so.  */
      if (integer_onep (TREE_OPERAND (exp, 1)))
        {
          tree exp0 = TREE_OPERAND (exp, 0);
          rtx set_label, clr_label;

          /* Strip narrowing integral type conversions.  */
          while ((TREE_CODE (exp0) == NOP_EXPR
                  || TREE_CODE (exp0) == CONVERT_EXPR
                  || TREE_CODE (exp0) == NON_LVALUE_EXPR)
                 && TREE_OPERAND (exp0, 0) != error_mark_node
                 && TYPE_PRECISION (TREE_TYPE (exp0))
                    <= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp0, 0))))
            exp0 = TREE_OPERAND (exp0, 0);

          /* "exp0 ^ 1" inverts the sense of the single bit test.  */
          if (TREE_CODE (exp0) == BIT_XOR_EXPR
              && integer_onep (TREE_OPERAND (exp0, 1)))
            {
              exp0 = TREE_OPERAND (exp0, 0);
              clr_label = if_true_label;
              set_label = if_false_label;
            }
          else
            {
              clr_label = if_false_label;
              set_label = if_true_label;
            }

          if (TREE_CODE (exp0) == RSHIFT_EXPR)
            {
              tree arg = TREE_OPERAND (exp0, 0);
              tree shift = TREE_OPERAND (exp0, 1);
              tree argtype = TREE_TYPE (arg);
              if (TREE_CODE (shift) == INTEGER_CST
                  && compare_tree_int (shift, 0) >= 0
                  && compare_tree_int (shift, HOST_BITS_PER_WIDE_INT) < 0
                  && prefer_and_bit_test (TYPE_MODE (argtype),
                                          TREE_INT_CST_LOW (shift)))
                {
                  HOST_WIDE_INT mask = (HOST_WIDE_INT) 1
                                       << TREE_INT_CST_LOW (shift);
                  do_jump (build2 (BIT_AND_EXPR, argtype, arg,
                                   build_int_cst_type (argtype, mask)),
                           clr_label, set_label);
                  break;
                }
            }
        }

      /* If we are AND'ing with a small constant, do this comparison in the
         smallest type that fits.  If the machine doesn't have comparisons
         that small, it will be converted back to the wider comparison.
         This helps if we are testing the sign bit of a narrower object.
         combine can't do this for us because it can't know whether a
         ZERO_EXTRACT or a compare in a smaller mode exists, but we do.  */

      if (! SLOW_BYTE_ACCESS
          && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
          && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
          && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
          && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
          && (type = lang_hooks.types.type_for_mode (mode, 1)) != 0
          && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
          && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
              != CODE_FOR_nothing))
        {
          do_jump (fold_convert (type, exp), if_false_label, if_true_label);
          break;
        }
      goto normal;

    case TRUTH_NOT_EXPR:
      do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
      break;

    case COND_EXPR:
      {
        rtx label1 = gen_label_rtx ();
        if (!if_true_label || !if_false_label)
          {
            drop_through_label = gen_label_rtx ();
            if (!if_true_label)
              if_true_label = drop_through_label;
            if (!if_false_label)
              if_false_label = drop_through_label;
          }

        do_pending_stack_adjust ();
        do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
        do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
        emit_label (label1);
        do_jump (TREE_OPERAND (exp, 2), if_false_label, if_true_label);
        break;
      }

    case TRUTH_ANDIF_EXPR:
    case TRUTH_ORIF_EXPR:
    case COMPOUND_EXPR:
      /* Lowered by gimplify.c.  */
      gcc_unreachable ();

    case COMPONENT_REF:
    case BIT_FIELD_REF:
    case ARRAY_REF:
    case ARRAY_RANGE_REF:
      {
        HOST_WIDE_INT bitsize, bitpos;
        int unsignedp;
        enum machine_mode mode;
        tree type;
        tree offset;
        int volatilep = 0;

        /* Get description of this reference.  We don't actually care
           about the underlying object here.  */
        get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
                             &unsignedp, &volatilep, false);

        type = lang_hooks.types.type_for_size (bitsize, unsignedp);
        if (! SLOW_BYTE_ACCESS
            && type != 0 && bitsize >= 0
            && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
            && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
                != CODE_FOR_nothing))
          {
            do_jump (fold_convert (type, exp), if_false_label, if_true_label);
            break;
          }
        goto normal;
      }

    case EQ_EXPR:
      {
        tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));

        gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
                    != MODE_COMPLEX_FLOAT);
        gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
                    != MODE_COMPLEX_INT);
        
        if (integer_zerop (TREE_OPERAND (exp, 1)))
          do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
        else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
                 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
          do_jump_by_parts_equality (exp, if_false_label, if_true_label);
        else
          do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
        break;
      }

    case MINUS_EXPR:
      /* Nonzero iff operands of minus differ.  */
      exp = build2 (NE_EXPR, TREE_TYPE (exp),
                    TREE_OPERAND (exp, 0),
                    TREE_OPERAND (exp, 1));
      /* FALLTHRU */
    case NE_EXPR:
      {
        tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));

        gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
                    != MODE_COMPLEX_FLOAT);
        gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
                    != MODE_COMPLEX_INT);
        
        if (integer_zerop (TREE_OPERAND (exp, 1)))
          do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
        else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
           && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
          do_jump_by_parts_equality (exp, if_true_label, if_false_label);
        else
          do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
        break;
      }

    case LT_EXPR:
      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
      if (GET_MODE_CLASS (mode) == MODE_INT
          && ! can_compare_p (LT, mode, ccp_jump))
        do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
      else
        do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
      break;

    case LE_EXPR:
      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
      if (GET_MODE_CLASS (mode) == MODE_INT
          && ! can_compare_p (LE, mode, ccp_jump))
        do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
      else
        do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
      break;

    case GT_EXPR:
      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
      if (GET_MODE_CLASS (mode) == MODE_INT
          && ! can_compare_p (GT, mode, ccp_jump))
        do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
      else
        do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
      break;

    case GE_EXPR:
      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
      if (GET_MODE_CLASS (mode) == MODE_INT
          && ! can_compare_p (GE, mode, ccp_jump))
        do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
      else
        do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
      break;

    case UNORDERED_EXPR:
    case ORDERED_EXPR:
      {
        enum rtx_code cmp, rcmp;
        int do_rev;

        if (code == UNORDERED_EXPR)
          cmp = UNORDERED, rcmp = ORDERED;
        else
          cmp = ORDERED, rcmp = UNORDERED;
        mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));

        do_rev = 0;
        if (! can_compare_p (cmp, mode, ccp_jump)
            && (can_compare_p (rcmp, mode, ccp_jump)
          /* If the target doesn't provide either UNORDERED or ORDERED
             comparisons, canonicalize on UNORDERED for the library.  */
          || rcmp == UNORDERED))
          do_rev = 1;

        if (! do_rev)
          do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
        else
          do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
      }
      break;

    {
      enum rtx_code rcode1;
      enum tree_code tcode1, tcode2;

      case UNLT_EXPR:
        rcode1 = UNLT;
        tcode1 = UNORDERED_EXPR;
        tcode2 = LT_EXPR;
        goto unordered_bcc;
      case UNLE_EXPR:
        rcode1 = UNLE;
        tcode1 = UNORDERED_EXPR;
        tcode2 = LE_EXPR;
        goto unordered_bcc;
      case UNGT_EXPR:
        rcode1 = UNGT;
        tcode1 = UNORDERED_EXPR;
        tcode2 = GT_EXPR;
        goto unordered_bcc;
      case UNGE_EXPR:
        rcode1 = UNGE;
        tcode1 = UNORDERED_EXPR;
        tcode2 = GE_EXPR;
        goto unordered_bcc;
      case UNEQ_EXPR:
        rcode1 = UNEQ;
        tcode1 = UNORDERED_EXPR;
        tcode2 = EQ_EXPR;
        goto unordered_bcc;
      case LTGT_EXPR:
        /* It is ok for LTGT_EXPR to trap when the result is unordered,
           so expand to (a < b) || (a > b).  */
        rcode1 = LTGT;
        tcode1 = LT_EXPR;
        tcode2 = GT_EXPR;
        goto unordered_bcc;

      unordered_bcc:
        mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
        if (can_compare_p (rcode1, mode, ccp_jump))
          do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
                               if_true_label);
        else
          {
            tree op0 = save_expr (TREE_OPERAND (exp, 0));
            tree op1 = save_expr (TREE_OPERAND (exp, 1));
            tree cmp0, cmp1;

            /* If the target doesn't support combined unordered
               compares, decompose into two comparisons.  */
            if (if_true_label == 0)
              drop_through_label = if_true_label = gen_label_rtx ();
              
            cmp0 = fold_build2 (tcode1, TREE_TYPE (exp), op0, op1);
            cmp1 = fold_build2 (tcode2, TREE_TYPE (exp), op0, op1);
            do_jump (cmp0, 0, if_true_label);
            do_jump (cmp1, if_false_label, if_true_label);
          }
      }
      break;

    case TRUTH_AND_EXPR:
      /* High branch cost, expand as the bitwise AND of the conditions.
         Do the same if the RHS has side effects, because we're effectively
         turning a TRUTH_AND_EXPR into a TRUTH_ANDIF_EXPR.  */
      if (BRANCH_COST >= 4 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
        goto normal;

      if (if_false_label == NULL_RTX)
        {
          drop_through_label = gen_label_rtx ();
          do_jump (TREE_OPERAND (exp, 0), drop_through_label, NULL_RTX);
          do_jump (TREE_OPERAND (exp, 1), NULL_RTX, if_true_label);
        }
      else
        {
          do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
          do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
        }
      break;

    case TRUTH_OR_EXPR:
      /* High branch cost, expand as the bitwise OR of the conditions.
         Do the same if the RHS has side effects, because we're effectively
         turning a TRUTH_OR_EXPR into a TRUTH_ORIF_EXPR.  */
      if (BRANCH_COST >= 4 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
        goto normal;

      if (if_true_label == NULL_RTX)
        {
          drop_through_label = gen_label_rtx ();
          do_jump (TREE_OPERAND (exp, 0), NULL_RTX, drop_through_label);
          do_jump (TREE_OPERAND (exp, 1), if_false_label, NULL_RTX);
        }
      else
        {
          do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
          do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
        }
      break;

      /* Special case:
          __builtin_expect (<test>, 0)        and
          __builtin_expect (<test>, 1)

         We need to do this here, so that <test> is not converted to a SCC
         operation on machines that use condition code registers and COMPARE
         like the PowerPC, and then the jump is done based on whether the SCC
         operation produced a 1 or 0.  */
    case CALL_EXPR:
      /* Check for a built-in function.  */
      {
        tree fndecl = get_callee_fndecl (exp);
        tree arglist = TREE_OPERAND (exp, 1);

        if (fndecl
            && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
            && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
            && arglist != NULL_TREE
            && TREE_CHAIN (arglist) != NULL_TREE)
          {
            rtx seq = expand_builtin_expect_jump (exp, if_false_label,
                                                  if_true_label);

            if (seq != NULL_RTX)
              {
                emit_insn (seq);
                return;
              }
          }
      }
 
      /* Fall through and generate the normal code.  */
    default:
    normal:
      temp = expand_normal (exp);
      do_pending_stack_adjust ();
      /* The RTL optimizers prefer comparisons against pseudos.  */
      if (GET_CODE (temp) == SUBREG)
        {
          /* Compare promoted variables in their promoted mode.  */
          if (SUBREG_PROMOTED_VAR_P (temp)
              && REG_P (XEXP (temp, 0)))
            temp = XEXP (temp, 0);
          else
            temp = copy_to_reg (temp);
        }
      do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
                               NE, TYPE_UNSIGNED (TREE_TYPE (exp)),
                               GET_MODE (temp), NULL_RTX,
                               if_false_label, if_true_label);
    }

  if (drop_through_label)
    {
      do_pending_stack_adjust ();
      emit_label (drop_through_label);
    }
}
void
tree_rest_of_compilation (tree fndecl)
{
  location_t saved_loc;

  timevar_push (TV_EXPAND);

  gcc_assert (cgraph_global_info_ready);

  /* Initialize the default bitmap obstack.  */
  bitmap_obstack_initialize (NULL);

  /* Initialize the RTL code for the function.  */
  current_function_decl = fndecl;
  saved_loc = input_location;
  input_location = DECL_SOURCE_LOCATION (fndecl);
  init_function_start (fndecl);

  /* Even though we're inside a function body, we still don't want to
     call expand_expr to calculate the size of a variable-sized array.
     We haven't necessarily assigned RTL to all variables yet, so it's
     not safe to try to expand expressions involving them.  */
  cfun->dont_save_pending_sizes_p = 1;

  gimple_register_cfg_hooks ();

  bitmap_obstack_initialize (&reg_obstack); /* FIXME, only at RTL generation*/

  execute_all_ipa_transforms ();

  /* Perform all tree transforms and optimizations.  */

  /* Signal the start of passes.  */
  invoke_plugin_callbacks (PLUGIN_ALL_PASSES_START, NULL);

  execute_pass_list (all_passes);

  /* Signal the end of passes.  */
  invoke_plugin_callbacks (PLUGIN_ALL_PASSES_END, NULL);

  bitmap_obstack_release (&reg_obstack);

  /* Release the default bitmap obstack.  */
  bitmap_obstack_release (NULL);

  set_cfun (NULL);

  /* If requested, warn about function definitions where the function will
     return a value (usually of some struct or union type) which itself will
     take up a lot of stack space.  */
  if (warn_larger_than && !DECL_EXTERNAL (fndecl) && TREE_TYPE (fndecl))
    {
      tree ret_type = TREE_TYPE (TREE_TYPE (fndecl));

      if (ret_type && TYPE_SIZE_UNIT (ret_type)
	  && TREE_CODE (TYPE_SIZE_UNIT (ret_type)) == INTEGER_CST
	  && 0 < compare_tree_int (TYPE_SIZE_UNIT (ret_type),
				   larger_than_size))
	{
	  unsigned int size_as_int
	    = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (ret_type));

	  if (compare_tree_int (TYPE_SIZE_UNIT (ret_type), size_as_int) == 0)
	    warning (OPT_Wlarger_than_eq, "size of return value of %q+D is %u bytes",
                     fndecl, size_as_int);
	  else
	    warning (OPT_Wlarger_than_eq, "size of return value of %q+D is larger than %wd bytes",
                     fndecl, larger_than_size);
	}
    }

  gimple_set_body (fndecl, NULL);
  if (DECL_STRUCT_FUNCTION (fndecl) == 0
      && !cgraph_node (fndecl)->origin)
    {
      /* Stop pointing to the local nodes about to be freed.
	 But DECL_INITIAL must remain nonzero so we know this
	 was an actual function definition.
	 For a nested function, this is done in c_pop_function_context.
	 If rest_of_compilation set this to 0, leave it 0.  */
      if (DECL_INITIAL (fndecl) != 0)
	DECL_INITIAL (fndecl) = error_mark_node;
    }

  input_location = saved_loc;

  ggc_collect ();
  timevar_pop (TV_EXPAND);
}
Пример #6
0
static tree
avm2_handle_cconv_attribute (tree *node, tree name,
                             tree args,
                             int flags ATTRIBUTE_UNUSED,
                             bool *no_add_attrs)
{
    if (TREE_CODE (*node) != FUNCTION_TYPE
            && TREE_CODE (*node) != METHOD_TYPE
            && TREE_CODE (*node) != FIELD_DECL
            && TREE_CODE (*node) != TYPE_DECL)
    {
        warning (OPT_Wattributes, "%qs attribute only applies to functions",
                 IDENTIFIER_POINTER (name));
        *no_add_attrs = true;
        return NULL_TREE;
    }

    /* Can combine regparm with all attributes but fastcall.  */
    if (is_attribute_p ("regparm", name))
    {
        tree cst;

        if (lookup_attribute ("fastcall", TYPE_ATTRIBUTES (*node)))
        {
            error ("fastcall and regparm attributes are not compatible");
        }

        cst = TREE_VALUE (args);
        if (TREE_CODE (cst) != INTEGER_CST)
        {
            warning (OPT_Wattributes,
                     "%qs attribute requires an integer constant argument",
                     IDENTIFIER_POINTER (name));
            *no_add_attrs = true;
        }
        else if (compare_tree_int (cst, REGPARM_MAX) > 0)
        {
            warning (OPT_Wattributes, "argument to %qs attribute larger than %d",
                     IDENTIFIER_POINTER (name), REGPARM_MAX);
            *no_add_attrs = true;
        }
        return NULL_TREE;
    }

    /* Can combine fastcall with stdcall (redundant) and sseregparm.  */
    /* APPLE LOCAL begin regparmandstackparm */
    if (is_attribute_p ("fastcall", name)
            || is_attribute_p ("regparmandstackparm", name))
        /* APPLE LOCAL end regparmandstackparm */
    {
        if (lookup_attribute ("cdecl", TYPE_ATTRIBUTES (*node)))
        {
            error ("fastcall and cdecl attributes are not compatible");
        }
        if (lookup_attribute ("stdcall", TYPE_ATTRIBUTES (*node)))
        {
            error ("fastcall and stdcall attributes are not compatible");
        }
        if (lookup_attribute ("regparm", TYPE_ATTRIBUTES (*node)))
        {
            error ("fastcall and regparm attributes are not compatible");
        }
    }

    /* Can combine stdcall with fastcall (redundant), regparm and
       sseregparm.  */
    else if (is_attribute_p ("stdcall", name))
    {
        if (lookup_attribute ("cdecl", TYPE_ATTRIBUTES (*node)))
        {
            error ("stdcall and cdecl attributes are not compatible");
        }
        /* APPLE LOCAL begin regparmandstackparm */
        if (lookup_attribute ("fastcall", TYPE_ATTRIBUTES (*node))
                || lookup_attribute ("regparmandstackparm", TYPE_ATTRIBUTES (*node)))
            /* APPLE LOCAL end regparmandstackparm */
        {
            error ("stdcall and fastcall attributes are not compatible");
        }
    }

    /* Can combine cdecl with regparm and sseregparm.  */
    else if (is_attribute_p ("cdecl", name))
    {
        if (lookup_attribute ("stdcall", TYPE_ATTRIBUTES (*node)))
        {
            error ("stdcall and cdecl attributes are not compatible");
        }
        if (lookup_attribute ("fastcall", TYPE_ATTRIBUTES (*node)))
        {
            error ("fastcall and cdecl attributes are not compatible");
        }
    }

    /* Can combine sseregparm with all attributes.  */

    return NULL_TREE;
}
Пример #7
0
void
do_jump (tree exp, rtx if_false_label, rtx if_true_label)
{
  enum tree_code code = TREE_CODE (exp);
  /* Some cases need to create a label to jump to
     in order to properly fall through.
     These cases set DROP_THROUGH_LABEL nonzero.  */
  rtx drop_through_label = 0;
  rtx temp;
  int i;
  tree type;
  enum machine_mode mode;

  emit_queue ();

  switch (code)
    {
    case ERROR_MARK:
      break;

    case INTEGER_CST:
      temp = integer_zerop (exp) ? if_false_label : if_true_label;
      if (temp)
        emit_jump (temp);
      break;

#if 0
      /* This is not true with #pragma weak  */
    case ADDR_EXPR:
      /* The address of something can never be zero.  */
      if (if_true_label)
        emit_jump (if_true_label);
      break;
#endif

    case UNSAVE_EXPR:
      do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
      TREE_OPERAND (exp, 0)
	= (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
      break;

    case NOP_EXPR:
      if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
          || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
          || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
          || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
        goto normal;
    case CONVERT_EXPR:
      /* If we are narrowing the operand, we have to do the compare in the
         narrower mode.  */
      if ((TYPE_PRECISION (TREE_TYPE (exp))
           < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
        goto normal;
    case NON_LVALUE_EXPR:
    case REFERENCE_EXPR:
    case ABS_EXPR:
    case NEGATE_EXPR:
    case LROTATE_EXPR:
    case RROTATE_EXPR:
      /* These cannot change zero->nonzero or vice versa.  */
      do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
      break;

    case WITH_RECORD_EXPR:
      /* Put the object on the placeholder list, recurse through our first
         operand, and pop the list.  */
      placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
                                    placeholder_list);
      do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
      placeholder_list = TREE_CHAIN (placeholder_list);
      break;

#if 0
      /* This is never less insns than evaluating the PLUS_EXPR followed by
         a test and can be longer if the test is eliminated.  */
    case PLUS_EXPR:
      /* Reduce to minus.  */
      exp = build (MINUS_EXPR, TREE_TYPE (exp),
                   TREE_OPERAND (exp, 0),
                   fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
                                 TREE_OPERAND (exp, 1))));
      /* Process as MINUS.  */
#endif

    case MINUS_EXPR:
      /* Nonzero iff operands of minus differ.  */
      do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
                                  TREE_OPERAND (exp, 0),
                                  TREE_OPERAND (exp, 1)),
                           NE, NE, if_false_label, if_true_label);
      break;

    case BIT_AND_EXPR:
      /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1.
	 See if the former is preferred for jump tests and restore it
	 if so.  */
      if (integer_onep (TREE_OPERAND (exp, 1)))
	{
	  tree exp0 = TREE_OPERAND (exp, 0);
	  rtx set_label, clr_label;

	  /* Strip narrowing integral type conversions.  */
	  while ((TREE_CODE (exp0) == NOP_EXPR
		  || TREE_CODE (exp0) == CONVERT_EXPR
		  || TREE_CODE (exp0) == NON_LVALUE_EXPR)
		 && TREE_OPERAND (exp0, 0) != error_mark_node
		 && TYPE_PRECISION (TREE_TYPE (exp0))
		    <= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp0, 0))))
	    exp0 = TREE_OPERAND (exp0, 0);

	  /* "exp0 ^ 1" inverts the sense of the single bit test.  */
	  if (TREE_CODE (exp0) == BIT_XOR_EXPR
	      && integer_onep (TREE_OPERAND (exp0, 1)))
	    {
	      exp0 = TREE_OPERAND (exp0, 0);
	      clr_label = if_true_label;
	      set_label = if_false_label;
	    }
	  else
	    {
	      clr_label = if_false_label;
	      set_label = if_true_label;
	    }

	  if (TREE_CODE (exp0) == RSHIFT_EXPR)
	    {
	      tree arg = TREE_OPERAND (exp0, 0);
	      tree shift = TREE_OPERAND (exp0, 1);
	      tree argtype = TREE_TYPE (arg);
	      if (TREE_CODE (shift) == INTEGER_CST
		  && compare_tree_int (shift, 0) >= 0
		  && compare_tree_int (shift, HOST_BITS_PER_WIDE_INT) < 0
		  && prefer_and_bit_test (TYPE_MODE (argtype),
					  TREE_INT_CST_LOW (shift)))
		{
		  HOST_WIDE_INT mask = (HOST_WIDE_INT) 1
				       << TREE_INT_CST_LOW (shift);
		  tree t = build_int_2 (mask, 0);
		  TREE_TYPE (t) = argtype;
		  do_jump (build (BIT_AND_EXPR, argtype, arg, t),
			   clr_label, set_label);
		  break;
		}
	    }
	}

      /* If we are AND'ing with a small constant, do this comparison in the
         smallest type that fits.  If the machine doesn't have comparisons
         that small, it will be converted back to the wider comparison.
         This helps if we are testing the sign bit of a narrower object.
         combine can't do this for us because it can't know whether a
         ZERO_EXTRACT or a compare in a smaller mode exists, but we do.  */

      if (! SLOW_BYTE_ACCESS
          && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
          && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
          && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
          && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
          && (type = (*lang_hooks.types.type_for_mode) (mode, 1)) != 0
          && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
          && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
              != CODE_FOR_nothing))
        {
          do_jump (convert (type, exp), if_false_label, if_true_label);
          break;
        }
      goto normal;

    case TRUTH_NOT_EXPR:
      do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
      break;

    case TRUTH_ANDIF_EXPR:
      if (if_false_label == 0)
        if_false_label = drop_through_label = gen_label_rtx ();
      do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
      start_cleanup_deferral ();
      do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
      end_cleanup_deferral ();
      break;

    case TRUTH_ORIF_EXPR:
      if (if_true_label == 0)
        if_true_label = drop_through_label = gen_label_rtx ();
      do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
      start_cleanup_deferral ();
      do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
      end_cleanup_deferral ();
      break;

    case COMPOUND_EXPR:
      push_temp_slots ();
      expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
      preserve_temp_slots (NULL_RTX);
      free_temp_slots ();
      pop_temp_slots ();
      emit_queue ();
      do_pending_stack_adjust ();
      do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
      break;

    case COMPONENT_REF:
    case BIT_FIELD_REF:
    case ARRAY_REF:
    case ARRAY_RANGE_REF:
      {
        HOST_WIDE_INT bitsize, bitpos;
        int unsignedp;
        enum machine_mode mode;
        tree type;
        tree offset;
        int volatilep = 0;

        /* Get description of this reference.  We don't actually care
           about the underlying object here.  */
        get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
                             &unsignedp, &volatilep);

        type = (*lang_hooks.types.type_for_size) (bitsize, unsignedp);
        if (! SLOW_BYTE_ACCESS
            && type != 0 && bitsize >= 0
            && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
            && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
          != CODE_FOR_nothing))
          {
            do_jump (convert (type, exp), if_false_label, if_true_label);
            break;
          }
        goto normal;
      }

    case COND_EXPR:
      /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases.  */
      if (integer_onep (TREE_OPERAND (exp, 1))
          && integer_zerop (TREE_OPERAND (exp, 2)))
        do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);

      else if (integer_zerop (TREE_OPERAND (exp, 1))
               && integer_onep (TREE_OPERAND (exp, 2)))
        do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);

      else
      {
        rtx label1 = gen_label_rtx ();
        drop_through_label = gen_label_rtx ();

        do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);

        start_cleanup_deferral ();
        /* Now the THEN-expression.  */
        do_jump (TREE_OPERAND (exp, 1),
                 if_false_label ? if_false_label : drop_through_label,
                 if_true_label ? if_true_label : drop_through_label);
        /* In case the do_jump just above never jumps.  */
        do_pending_stack_adjust ();
        emit_label (label1);

        /* Now the ELSE-expression.  */
        do_jump (TREE_OPERAND (exp, 2),
           if_false_label ? if_false_label : drop_through_label,
           if_true_label ? if_true_label : drop_through_label);
        end_cleanup_deferral ();
      }
      break;

    case EQ_EXPR:
      {
        tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));

        if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
            || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
          {
            tree exp0 = save_expr (TREE_OPERAND (exp, 0));
            tree exp1 = save_expr (TREE_OPERAND (exp, 1));
            do_jump
              (fold
               (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
                 fold (build (EQ_EXPR, TREE_TYPE (exp),
                  fold (build1 (REALPART_EXPR,
                    TREE_TYPE (inner_type),
                    exp0)),
                  fold (build1 (REALPART_EXPR,
                    TREE_TYPE (inner_type),
                    exp1)))),
                 fold (build (EQ_EXPR, TREE_TYPE (exp),
                  fold (build1 (IMAGPART_EXPR,
                    TREE_TYPE (inner_type),
                    exp0)),
                  fold (build1 (IMAGPART_EXPR,
                    TREE_TYPE (inner_type),
                    exp1)))))),
               if_false_label, if_true_label);
          }

        else if (integer_zerop (TREE_OPERAND (exp, 1)))
          do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);

        else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
                 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
          do_jump_by_parts_equality (exp, if_false_label, if_true_label);
        else
          do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
        break;
      }

    case NE_EXPR:
      {
        tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));

        if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
            || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
          {
            tree exp0 = save_expr (TREE_OPERAND (exp, 0));
            tree exp1 = save_expr (TREE_OPERAND (exp, 1));
            do_jump
              (fold
               (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
                 fold (build (NE_EXPR, TREE_TYPE (exp),
                  fold (build1 (REALPART_EXPR,
                    TREE_TYPE (inner_type),
                    exp0)),
                  fold (build1 (REALPART_EXPR,
                    TREE_TYPE (inner_type),
                    exp1)))),
                 fold (build (NE_EXPR, TREE_TYPE (exp),
                    fold (build1 (IMAGPART_EXPR,
                      TREE_TYPE (inner_type),
                      exp0)),
                    fold (build1 (IMAGPART_EXPR,
                      TREE_TYPE (inner_type),
                      exp1)))))),
               if_false_label, if_true_label);
          }

        else if (integer_zerop (TREE_OPERAND (exp, 1)))
          do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);

        else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
           && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
          do_jump_by_parts_equality (exp, if_true_label, if_false_label);
        else
          do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
        break;
      }

    case LT_EXPR:
      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
      if (GET_MODE_CLASS (mode) == MODE_INT
          && ! can_compare_p (LT, mode, ccp_jump))
        do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
      else
        do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
      break;

    case LE_EXPR:
      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
      if (GET_MODE_CLASS (mode) == MODE_INT
          && ! can_compare_p (LE, mode, ccp_jump))
        do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
      else
        do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
      break;

    case GT_EXPR:
      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
      if (GET_MODE_CLASS (mode) == MODE_INT
          && ! can_compare_p (GT, mode, ccp_jump))
        do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
      else
        do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
      break;

    case GE_EXPR:
      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
      if (GET_MODE_CLASS (mode) == MODE_INT
          && ! can_compare_p (GE, mode, ccp_jump))
        do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
      else
        do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
      break;

    case UNORDERED_EXPR:
    case ORDERED_EXPR:
      {
        enum rtx_code cmp, rcmp;
        int do_rev;

        if (code == UNORDERED_EXPR)
          cmp = UNORDERED, rcmp = ORDERED;
        else
          cmp = ORDERED, rcmp = UNORDERED;
        mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));

        do_rev = 0;
        if (! can_compare_p (cmp, mode, ccp_jump)
            && (can_compare_p (rcmp, mode, ccp_jump)
          /* If the target doesn't provide either UNORDERED or ORDERED
             comparisons, canonicalize on UNORDERED for the library.  */
          || rcmp == UNORDERED))
          do_rev = 1;

        if (! do_rev)
          do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
        else
          do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
      }
      break;

    {
      enum rtx_code rcode1;
      enum tree_code tcode2;

      case UNLT_EXPR:
        rcode1 = UNLT;
        tcode2 = LT_EXPR;
        goto unordered_bcc;
      case UNLE_EXPR:
        rcode1 = UNLE;
        tcode2 = LE_EXPR;
        goto unordered_bcc;
      case UNGT_EXPR:
        rcode1 = UNGT;
        tcode2 = GT_EXPR;
        goto unordered_bcc;
      case UNGE_EXPR:
        rcode1 = UNGE;
        tcode2 = GE_EXPR;
        goto unordered_bcc;
      case UNEQ_EXPR:
        rcode1 = UNEQ;
        tcode2 = EQ_EXPR;
        goto unordered_bcc;

      unordered_bcc:
        mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
        if (can_compare_p (rcode1, mode, ccp_jump))
          do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
                               if_true_label);
        else
          {
            tree op0 = save_expr (TREE_OPERAND (exp, 0));
            tree op1 = save_expr (TREE_OPERAND (exp, 1));
            tree cmp0, cmp1;

            /* If the target doesn't support combined unordered
               compares, decompose into UNORDERED + comparison.  */
            cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
            cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
            exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
            do_jump (exp, if_false_label, if_true_label);
          }
      }
      break;

      /* Special case:
          __builtin_expect (<test>, 0)	and
          __builtin_expect (<test>, 1)

         We need to do this here, so that <test> is not converted to a SCC
         operation on machines that use condition code registers and COMPARE
         like the PowerPC, and then the jump is done based on whether the SCC
         operation produced a 1 or 0.  */
    case CALL_EXPR:
      /* Check for a built-in function.  */
      {
	tree fndecl = get_callee_fndecl (exp);
	tree arglist = TREE_OPERAND (exp, 1);

	if (fndecl
	    && DECL_BUILT_IN (fndecl)
	    && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
	    && arglist != NULL_TREE
	    && TREE_CHAIN (arglist) != NULL_TREE)
	  {
	    rtx seq = expand_builtin_expect_jump (exp, if_false_label,
						  if_true_label);

	    if (seq != NULL_RTX)
	      {
		emit_insn (seq);
		return;
	      }
	  }
      }
      /* Fall through and generate the normal code.  */

    default:
    normal:
      temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
#if 0
      /* This is not needed any more and causes poor code since it causes
         comparisons and tests from non-SI objects to have different code
         sequences.  */
      /* Copy to register to avoid generating bad insns by cse
         from (set (mem ...) (arithop))  (set (cc0) (mem ...)).  */
      if (!cse_not_expected && GET_CODE (temp) == MEM)
        temp = copy_to_reg (temp);
#endif
      do_pending_stack_adjust ();
      /* Do any postincrements in the expression that was tested.  */
      emit_queue ();

      if (GET_CODE (temp) == CONST_INT
          || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
          || GET_CODE (temp) == LABEL_REF)
        {
          rtx target = temp == const0_rtx ? if_false_label : if_true_label;
          if (target)
            emit_jump (target);
        }
      else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
               && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
        /* Note swapping the labels gives us not-equal.  */
        do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
      else if (GET_MODE (temp) != VOIDmode)
	{
	  /* The RTL optimizers prefer comparisons against pseudos.  */
	  if (GET_CODE (temp) == SUBREG)
	    {
	      /* Compare promoted variables in their promoted mode.  */
	      if (SUBREG_PROMOTED_VAR_P (temp)
		  && GET_CODE (XEXP (temp, 0)) == REG)
		temp = XEXP (temp, 0);
	      else
		temp = copy_to_reg (temp);
	    }
	  do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
				   NE, TREE_UNSIGNED (TREE_TYPE (exp)),
				   GET_MODE (temp), NULL_RTX,
				   if_false_label, if_true_label);
	}
      else
        abort ();
    }

  if (drop_through_label)
    {
      /* If do_jump produces code that might be jumped around,
         do any stack adjusts from that code, before the place
         where control merges in.  */
      do_pending_stack_adjust ();
      emit_label (drop_through_label);
    }
}
Пример #8
0
static tree
c_fully_fold_internal (tree expr, bool in_init, bool *maybe_const_operands,
		       bool *maybe_const_itself, bool for_int_const)
{
  tree ret = expr;
  enum tree_code code = TREE_CODE (expr);
  enum tree_code_class kind = TREE_CODE_CLASS (code);
  location_t loc = EXPR_LOCATION (expr);
  tree op0, op1, op2, op3;
  tree orig_op0, orig_op1, orig_op2;
  bool op0_const = true, op1_const = true, op2_const = true;
  bool op0_const_self = true, op1_const_self = true, op2_const_self = true;
  bool nowarning = TREE_NO_WARNING (expr);
  bool unused_p;
  source_range old_range;

  /* Constants, declarations, statements, errors, SAVE_EXPRs and
     anything else not counted as an expression cannot usefully be
     folded further at this point.  */
  if (!IS_EXPR_CODE_CLASS (kind)
      || kind == tcc_statement
      || code == SAVE_EXPR)
    return expr;

  if (IS_EXPR_CODE_CLASS (kind))
    old_range = EXPR_LOCATION_RANGE (expr);

  /* Operands of variable-length expressions (function calls) have
     already been folded, as have __builtin_* function calls, and such
     expressions cannot occur in constant expressions.  */
  if (kind == tcc_vl_exp)
    {
      *maybe_const_operands = false;
      ret = fold (expr);
      goto out;
    }

  if (code == C_MAYBE_CONST_EXPR)
    {
      tree pre = C_MAYBE_CONST_EXPR_PRE (expr);
      tree inner = C_MAYBE_CONST_EXPR_EXPR (expr);
      if (C_MAYBE_CONST_EXPR_NON_CONST (expr))
	*maybe_const_operands = false;
      if (C_MAYBE_CONST_EXPR_INT_OPERANDS (expr))
	{
	  *maybe_const_itself = false;
	  inner = c_fully_fold_internal (inner, in_init, maybe_const_operands,
					 maybe_const_itself, true);
	}
      if (pre && !in_init)
	ret = build2 (COMPOUND_EXPR, TREE_TYPE (expr), pre, inner);
      else
	ret = inner;
      goto out;
    }

  /* Assignment, increment, decrement, function call and comma
     operators, and statement expressions, cannot occur in constant
     expressions if evaluated / outside of sizeof.  (Function calls
     were handled above, though VA_ARG_EXPR is treated like a function
     call here, and statement expressions are handled through
     C_MAYBE_CONST_EXPR to avoid folding inside them.)  */
  switch (code)
    {
    case MODIFY_EXPR:
    case PREDECREMENT_EXPR:
    case PREINCREMENT_EXPR:
    case POSTDECREMENT_EXPR:
    case POSTINCREMENT_EXPR:
    case COMPOUND_EXPR:
      *maybe_const_operands = false;
      break;

    case VA_ARG_EXPR:
    case TARGET_EXPR:
    case BIND_EXPR:
    case OBJ_TYPE_REF:
      *maybe_const_operands = false;
      ret = fold (expr);
      goto out;

    default:
      break;
    }

  /* Fold individual tree codes as appropriate.  */
  switch (code)
    {
    case COMPOUND_LITERAL_EXPR:
      /* Any non-constancy will have been marked in a containing
	 C_MAYBE_CONST_EXPR; there is no more folding to do here.  */
      goto out;

    case COMPONENT_REF:
      orig_op0 = op0 = TREE_OPERAND (expr, 0);
      op1 = TREE_OPERAND (expr, 1);
      op2 = TREE_OPERAND (expr, 2);
      op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
				   maybe_const_itself, for_int_const);
      STRIP_TYPE_NOPS (op0);
      if (op0 != orig_op0)
	ret = build3 (COMPONENT_REF, TREE_TYPE (expr), op0, op1, op2);
      if (ret != expr)
	{
	  TREE_READONLY (ret) = TREE_READONLY (expr);
	  TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (expr);
	}
      goto out;

    case ARRAY_REF:
      orig_op0 = op0 = TREE_OPERAND (expr, 0);
      orig_op1 = op1 = TREE_OPERAND (expr, 1);
      op2 = TREE_OPERAND (expr, 2);
      op3 = TREE_OPERAND (expr, 3);
      op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
				   maybe_const_itself, for_int_const);
      STRIP_TYPE_NOPS (op0);
      op1 = c_fully_fold_internal (op1, in_init, maybe_const_operands,
				   maybe_const_itself, for_int_const);
      STRIP_TYPE_NOPS (op1);
      op1 = decl_constant_value_for_optimization (op1);
      if (op0 != orig_op0 || op1 != orig_op1)
	ret = build4 (ARRAY_REF, TREE_TYPE (expr), op0, op1, op2, op3);
      if (ret != expr)
	{
	  TREE_READONLY (ret) = TREE_READONLY (expr);
	  TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (expr);
	  TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (expr);
	}
      ret = fold (ret);
      goto out;

    case COMPOUND_EXPR:
    case MODIFY_EXPR:
    case PREDECREMENT_EXPR:
    case PREINCREMENT_EXPR:
    case POSTDECREMENT_EXPR:
    case POSTINCREMENT_EXPR:
    case PLUS_EXPR:
    case MINUS_EXPR:
    case MULT_EXPR:
    case POINTER_PLUS_EXPR:
    case TRUNC_DIV_EXPR:
    case CEIL_DIV_EXPR:
    case FLOOR_DIV_EXPR:
    case TRUNC_MOD_EXPR:
    case RDIV_EXPR:
    case EXACT_DIV_EXPR:
    case LSHIFT_EXPR:
    case RSHIFT_EXPR:
    case BIT_IOR_EXPR:
    case BIT_XOR_EXPR:
    case BIT_AND_EXPR:
    case LT_EXPR:
    case LE_EXPR:
    case GT_EXPR:
    case GE_EXPR:
    case EQ_EXPR:
    case NE_EXPR:
    case COMPLEX_EXPR:
    case TRUTH_AND_EXPR:
    case TRUTH_OR_EXPR:
    case TRUTH_XOR_EXPR:
    case UNORDERED_EXPR:
    case ORDERED_EXPR:
    case UNLT_EXPR:
    case UNLE_EXPR:
    case UNGT_EXPR:
    case UNGE_EXPR:
    case UNEQ_EXPR:
      /* Binary operations evaluating both arguments (increment and
	 decrement are binary internally in GCC).  */
      orig_op0 = op0 = TREE_OPERAND (expr, 0);
      orig_op1 = op1 = TREE_OPERAND (expr, 1);
      op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
				   maybe_const_itself, for_int_const);
      STRIP_TYPE_NOPS (op0);
      if (code != MODIFY_EXPR
	  && code != PREDECREMENT_EXPR
	  && code != PREINCREMENT_EXPR
	  && code != POSTDECREMENT_EXPR
	  && code != POSTINCREMENT_EXPR)
	op0 = decl_constant_value_for_optimization (op0);
      /* The RHS of a MODIFY_EXPR was fully folded when building that
	 expression for the sake of conversion warnings.  */
      if (code != MODIFY_EXPR)
	op1 = c_fully_fold_internal (op1, in_init, maybe_const_operands,
				     maybe_const_itself, for_int_const);
      STRIP_TYPE_NOPS (op1);
      op1 = decl_constant_value_for_optimization (op1);

      if (for_int_const && (TREE_CODE (op0) != INTEGER_CST
			    || TREE_CODE (op1) != INTEGER_CST))
	goto out;

      if (op0 != orig_op0 || op1 != orig_op1 || in_init)
	ret = in_init
	  ? fold_build2_initializer_loc (loc, code, TREE_TYPE (expr), op0, op1)
	  : fold_build2_loc (loc, code, TREE_TYPE (expr), op0, op1);
      else
	ret = fold (expr);
      if (TREE_OVERFLOW_P (ret)
	  && !TREE_OVERFLOW_P (op0)
	  && !TREE_OVERFLOW_P (op1))
	overflow_warning (EXPR_LOC_OR_LOC (expr, input_location), ret);
      if (code == LSHIFT_EXPR
	  && TREE_CODE (orig_op0) != INTEGER_CST
	  && TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE
	  && TREE_CODE (op0) == INTEGER_CST
	  && c_inhibit_evaluation_warnings == 0
	  && tree_int_cst_sgn (op0) < 0)
	warning_at (loc, OPT_Wshift_negative_value,
		    "left shift of negative value");
      if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
	  && TREE_CODE (orig_op1) != INTEGER_CST
	  && TREE_CODE (op1) == INTEGER_CST
	  && (TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE
	      || TREE_CODE (TREE_TYPE (orig_op0)) == FIXED_POINT_TYPE)
	  && TREE_CODE (TREE_TYPE (orig_op1)) == INTEGER_TYPE
	  && c_inhibit_evaluation_warnings == 0)
	{
	  if (tree_int_cst_sgn (op1) < 0)
	    warning_at (loc, OPT_Wshift_count_negative,
			(code == LSHIFT_EXPR
			 ? G_("left shift count is negative")
			 : G_("right shift count is negative")));
	  else if (compare_tree_int (op1,
				     TYPE_PRECISION (TREE_TYPE (orig_op0)))
		   >= 0)
	    warning_at (loc, OPT_Wshift_count_overflow,
			(code == LSHIFT_EXPR
			 ? G_("left shift count >= width of type")
			 : G_("right shift count >= width of type")));
	}
      if (code == LSHIFT_EXPR
	  /* If either OP0 has been folded to INTEGER_CST...  */
	  && ((TREE_CODE (orig_op0) != INTEGER_CST
	       && TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE
	       && TREE_CODE (op0) == INTEGER_CST)
	      /* ...or if OP1 has been folded to INTEGER_CST...  */
	      || (TREE_CODE (orig_op1) != INTEGER_CST
		  && TREE_CODE (TREE_TYPE (orig_op1)) == INTEGER_TYPE
		  && TREE_CODE (op1) == INTEGER_CST))
	  && c_inhibit_evaluation_warnings == 0)
	/* ...then maybe we can detect an overflow.  */
	maybe_warn_shift_overflow (loc, op0, op1);
      if ((code == TRUNC_DIV_EXPR
	   || code == CEIL_DIV_EXPR
	   || code == FLOOR_DIV_EXPR
	   || code == EXACT_DIV_EXPR
	   || code == TRUNC_MOD_EXPR)
	  && TREE_CODE (orig_op1) != INTEGER_CST
	  && TREE_CODE (op1) == INTEGER_CST
	  && (TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE
	      || TREE_CODE (TREE_TYPE (orig_op0)) == FIXED_POINT_TYPE)
	  && TREE_CODE (TREE_TYPE (orig_op1)) == INTEGER_TYPE)
	warn_for_div_by_zero (loc, op1);
      goto out;

    case INDIRECT_REF:
    case FIX_TRUNC_EXPR:
    case FLOAT_EXPR:
    CASE_CONVERT:
    case ADDR_SPACE_CONVERT_EXPR:
    case VIEW_CONVERT_EXPR:
    case NON_LVALUE_EXPR:
    case NEGATE_EXPR:
    case BIT_NOT_EXPR:
    case TRUTH_NOT_EXPR:
    case ADDR_EXPR:
    case CONJ_EXPR:
    case REALPART_EXPR:
    case IMAGPART_EXPR:
      /* Unary operations.  */
      orig_op0 = op0 = TREE_OPERAND (expr, 0);
      op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
				   maybe_const_itself, for_int_const);
      STRIP_TYPE_NOPS (op0);
      if (code != ADDR_EXPR && code != REALPART_EXPR && code != IMAGPART_EXPR)
	op0 = decl_constant_value_for_optimization (op0);

      if (for_int_const && TREE_CODE (op0) != INTEGER_CST)
	goto out;

      /* ??? Cope with user tricks that amount to offsetof.  The middle-end is
	 not prepared to deal with them if they occur in initializers.  */
      if (op0 != orig_op0
	  && code == ADDR_EXPR
	  && (op1 = get_base_address (op0)) != NULL_TREE
	  && INDIRECT_REF_P (op1)
	  && TREE_CONSTANT (TREE_OPERAND (op1, 0)))
	ret = fold_convert_loc (loc, TREE_TYPE (expr), fold_offsetof_1 (op0));
      else if (op0 != orig_op0 || in_init)
	ret = in_init
	  ? fold_build1_initializer_loc (loc, code, TREE_TYPE (expr), op0)
	  : fold_build1_loc (loc, code, TREE_TYPE (expr), op0);
      else
	ret = fold (expr);
      if (code == INDIRECT_REF
	  && ret != expr
	  && INDIRECT_REF_P (ret))
	{
	  TREE_READONLY (ret) = TREE_READONLY (expr);
	  TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (expr);
	  TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (expr);
	}
      switch (code)
	{
	case FIX_TRUNC_EXPR:
	case FLOAT_EXPR:
	CASE_CONVERT:
	  /* Don't warn about explicit conversions.  We will already
	     have warned about suspect implicit conversions.  */
	  break;

	default:
	  if (TREE_OVERFLOW_P (ret) && !TREE_OVERFLOW_P (op0))
	    overflow_warning (EXPR_LOCATION (expr), ret);
	  break;
	}
      goto out;

    case TRUTH_ANDIF_EXPR:
    case TRUTH_ORIF_EXPR:
      /* Binary operations not necessarily evaluating both
	 arguments.  */
      orig_op0 = op0 = TREE_OPERAND (expr, 0);
      orig_op1 = op1 = TREE_OPERAND (expr, 1);
      op0 = c_fully_fold_internal (op0, in_init, &op0_const, &op0_const_self,
				   for_int_const);
      STRIP_TYPE_NOPS (op0);

      unused_p = (op0 == (code == TRUTH_ANDIF_EXPR
			  ? truthvalue_false_node
			  : truthvalue_true_node));
      c_disable_warnings (unused_p);
      op1 = c_fully_fold_internal (op1, in_init, &op1_const, &op1_const_self,
				   for_int_const);
      STRIP_TYPE_NOPS (op1);
      c_enable_warnings (unused_p);

      if (for_int_const
	  && (TREE_CODE (op0) != INTEGER_CST
	      /* Require OP1 be an INTEGER_CST only if it's evaluated.  */
	      || (!unused_p && TREE_CODE (op1) != INTEGER_CST)))
	goto out;

      if (op0 != orig_op0 || op1 != orig_op1 || in_init)
	ret = in_init
	  ? fold_build2_initializer_loc (loc, code, TREE_TYPE (expr), op0, op1)
	  : fold_build2_loc (loc, code, TREE_TYPE (expr), op0, op1);
      else
	ret = fold (expr);
      *maybe_const_operands &= op0_const;
      *maybe_const_itself &= op0_const_self;
      if (!(flag_isoc99
	    && op0_const
	    && op0_const_self
	    && (code == TRUTH_ANDIF_EXPR
		? op0 == truthvalue_false_node
		: op0 == truthvalue_true_node)))
	*maybe_const_operands &= op1_const;
      if (!(op0_const
	    && op0_const_self
	    && (code == TRUTH_ANDIF_EXPR
		? op0 == truthvalue_false_node
		: op0 == truthvalue_true_node)))
	*maybe_const_itself &= op1_const_self;
      goto out;

    case COND_EXPR:
      orig_op0 = op0 = TREE_OPERAND (expr, 0);
      orig_op1 = op1 = TREE_OPERAND (expr, 1);
      orig_op2 = op2 = TREE_OPERAND (expr, 2);
      op0 = c_fully_fold_internal (op0, in_init, &op0_const, &op0_const_self,
				   for_int_const);

      STRIP_TYPE_NOPS (op0);
      c_disable_warnings (op0 == truthvalue_false_node);
      op1 = c_fully_fold_internal (op1, in_init, &op1_const, &op1_const_self,
				   for_int_const);
      STRIP_TYPE_NOPS (op1);
      c_enable_warnings (op0 == truthvalue_false_node);

      c_disable_warnings (op0 == truthvalue_true_node);
      op2 = c_fully_fold_internal (op2, in_init, &op2_const, &op2_const_self,
				   for_int_const);
      STRIP_TYPE_NOPS (op2);
      c_enable_warnings (op0 == truthvalue_true_node);

      if (for_int_const
	  && (TREE_CODE (op0) != INTEGER_CST
	      /* Only the evaluated operand must be an INTEGER_CST.  */
	      || (op0 == truthvalue_true_node
		  ? TREE_CODE (op1) != INTEGER_CST
		  : TREE_CODE (op2) != INTEGER_CST)))
	goto out;

      if (op0 != orig_op0 || op1 != orig_op1 || op2 != orig_op2)
	ret = fold_build3_loc (loc, code, TREE_TYPE (expr), op0, op1, op2);
      else
	ret = fold (expr);
      *maybe_const_operands &= op0_const;
      *maybe_const_itself &= op0_const_self;
      if (!(flag_isoc99
	    && op0_const
	    && op0_const_self
	    && op0 == truthvalue_false_node))
	*maybe_const_operands &= op1_const;
      if (!(op0_const
	    && op0_const_self
	    && op0 == truthvalue_false_node))
	*maybe_const_itself &= op1_const_self;
      if (!(flag_isoc99
	    && op0_const
	    && op0_const_self
	    && op0 == truthvalue_true_node))
	*maybe_const_operands &= op2_const;
      if (!(op0_const
	    && op0_const_self
	    && op0 == truthvalue_true_node))
	*maybe_const_itself &= op2_const_self;
      goto out;

    case VEC_COND_EXPR:
      orig_op0 = op0 = TREE_OPERAND (expr, 0);
      orig_op1 = op1 = TREE_OPERAND (expr, 1);
      orig_op2 = op2 = TREE_OPERAND (expr, 2);
      op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
				   maybe_const_itself, for_int_const);
      STRIP_TYPE_NOPS (op0);
      op1 = c_fully_fold_internal (op1, in_init, maybe_const_operands,
				   maybe_const_itself, for_int_const);
      STRIP_TYPE_NOPS (op1);
      op2 = c_fully_fold_internal (op2, in_init, maybe_const_operands,
				   maybe_const_itself, for_int_const);
      STRIP_TYPE_NOPS (op2);

      if (op0 != orig_op0 || op1 != orig_op1 || op2 != orig_op2)
	ret = fold_build3_loc (loc, code, TREE_TYPE (expr), op0, op1, op2);
      else
	ret = fold (expr);
      goto out;

    case EXCESS_PRECISION_EXPR:
      /* Each case where an operand with excess precision may be
	 encountered must remove the EXCESS_PRECISION_EXPR around
	 inner operands and possibly put one around the whole
	 expression or possibly convert to the semantic type (which
	 c_fully_fold does); we cannot tell at this stage which is
	 appropriate in any particular case.  */
      gcc_unreachable ();

    default:
      /* Various codes may appear through folding built-in functions
	 and their arguments.  */
      goto out;
    }

 out:
  /* Some folding may introduce NON_LVALUE_EXPRs; all lvalue checks
     have been done by this point, so remove them again.  */
  nowarning |= TREE_NO_WARNING (ret);
  STRIP_TYPE_NOPS (ret);
  if (nowarning && !TREE_NO_WARNING (ret))
    {
      if (!CAN_HAVE_LOCATION_P (ret))
	ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
      TREE_NO_WARNING (ret) = 1;
    }
  if (ret != expr)
    {
      protected_set_expr_location (ret, loc);
      if (IS_EXPR_CODE_CLASS (kind))
	set_source_range (ret, old_range.m_start, old_range.m_finish);
    }
  return ret;
}