Exemplo n.º 1
0
static bool
forward_propagate_addr_expr (tree name, tree rhs)
{
  int stmt_loop_depth = gimple_bb (SSA_NAME_DEF_STMT (name))->loop_depth;
  imm_use_iterator iter;
  gimple use_stmt;
  bool all = true;
  bool single_use_p = has_single_use (name);

  FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
    {
      bool result;
      tree use_rhs;

      /* If the use is not in a simple assignment statement, then
	 there is nothing we can do.  */
      if (gimple_code (use_stmt) != GIMPLE_ASSIGN)
	{
	  all = false;
	  continue;
	}

      /* If the use is in a deeper loop nest, then we do not want
	 to propagate the ADDR_EXPR into the loop as that is likely
	 adding expression evaluations into the loop.  */
      if (gimple_bb (use_stmt)->loop_depth > stmt_loop_depth)
	{
	  all = false;
	  continue;
	}

      push_stmt_changes (&use_stmt);

      {
	gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
	result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
						single_use_p);
	use_stmt = gsi_stmt (gsi);
      }
      all &= result;

      pop_stmt_changes (&use_stmt);

      /* Remove intermediate now unused copy and conversion chains.  */
      use_rhs = gimple_assign_rhs1 (use_stmt);
      if (result
	  && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
	  && (TREE_CODE (use_rhs) == SSA_NAME
	      || (CONVERT_EXPR_P (use_rhs)
		  && TREE_CODE (TREE_OPERAND (use_rhs, 0)) == SSA_NAME)))
	{
	  gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
	  release_defs (use_stmt);
	  gsi_remove (&gsi, true);
	}
    }
Exemplo n.º 2
0
void
do_jump (tree exp, rtx if_false_label, rtx if_true_label, int prob)
{
  enum tree_code code = TREE_CODE (exp);
  rtx temp;
  int i;
  tree type;
  enum machine_mode mode;
  rtx drop_through_label = 0;

  switch (code)
    {
    case ERROR_MARK:
      break;

    case INTEGER_CST:
      temp = integer_zerop (exp) ? if_false_label : if_true_label;
      if (temp)
        emit_jump (temp);
      break;

#if 0
      /* This is not true with #pragma weak  */
    case ADDR_EXPR:
      /* The address of something can never be zero.  */
      if (if_true_label)
        emit_jump (if_true_label);
      break;
#endif

    case NOP_EXPR:
      if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
          || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
          || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
          || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
        goto normal;
    case CONVERT_EXPR:
      /* If we are narrowing the operand, we have to do the compare in the
         narrower mode.  */
      if ((TYPE_PRECISION (TREE_TYPE (exp))
           < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
        goto normal;
    case NON_LVALUE_EXPR:
    case ABS_EXPR:
    case NEGATE_EXPR:
    case LROTATE_EXPR:
    case RROTATE_EXPR:
      /* These cannot change zero->nonzero or vice versa.  */
      do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label, prob);
      break;

    case TRUTH_NOT_EXPR:
      do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label,
	       inv (prob));
      break;

    case COND_EXPR:
      {
	rtx label1 = gen_label_rtx ();
	if (!if_true_label || !if_false_label)
	  {
	    drop_through_label = gen_label_rtx ();
	    if (!if_true_label)
	      if_true_label = drop_through_label;
	    if (!if_false_label)
	      if_false_label = drop_through_label;
	  }

        do_pending_stack_adjust ();
	do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX, -1);
	do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label, prob);
        emit_label (label1);
	do_jump (TREE_OPERAND (exp, 2), if_false_label, if_true_label, prob);
	break;
      }

    case COMPOUND_EXPR:
      /* Lowered by gimplify.c.  */
      gcc_unreachable ();

    case COMPONENT_REF:
    case BIT_FIELD_REF:
    case ARRAY_REF:
    case ARRAY_RANGE_REF:
      {
        HOST_WIDE_INT bitsize, bitpos;
        int unsignedp;
        enum machine_mode mode;
        tree type;
        tree offset;
        int volatilep = 0;

        /* Get description of this reference.  We don't actually care
           about the underlying object here.  */
        get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
                             &unsignedp, &volatilep, false);

        type = lang_hooks.types.type_for_size (bitsize, unsignedp);
        if (! SLOW_BYTE_ACCESS
            && type != 0 && bitsize >= 0
            && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
            && have_insn_for (COMPARE, TYPE_MODE (type)))
          {
	    do_jump (fold_convert (type, exp), if_false_label, if_true_label,
		     prob);
            break;
          }
        goto normal;
      }

    case MINUS_EXPR:
      /* Nonzero iff operands of minus differ.  */
      code = NE_EXPR;

      /* FALLTHRU */
    case EQ_EXPR:
    case NE_EXPR:
    case LT_EXPR:
    case LE_EXPR:
    case GT_EXPR:
    case GE_EXPR:
    case ORDERED_EXPR:
    case UNORDERED_EXPR:
    case UNLT_EXPR:
    case UNLE_EXPR:
    case UNGT_EXPR:
    case UNGE_EXPR:
    case UNEQ_EXPR:
    case LTGT_EXPR:
    case TRUTH_ANDIF_EXPR:
    case TRUTH_ORIF_EXPR:
    other_code:
      do_jump_1 (code, TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
		 if_false_label, if_true_label, prob);
      break;

    case BIT_AND_EXPR:
      /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1.
	 See if the former is preferred for jump tests and restore it
	 if so.  */
      if (integer_onep (TREE_OPERAND (exp, 1)))
	{
	  tree exp0 = TREE_OPERAND (exp, 0);
	  rtx set_label, clr_label;
	  int setclr_prob = prob;

	  /* Strip narrowing integral type conversions.  */
	  while (CONVERT_EXPR_P (exp0)
		 && TREE_OPERAND (exp0, 0) != error_mark_node
		 && TYPE_PRECISION (TREE_TYPE (exp0))
		    <= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp0, 0))))
	    exp0 = TREE_OPERAND (exp0, 0);

	  /* "exp0 ^ 1" inverts the sense of the single bit test.  */
	  if (TREE_CODE (exp0) == BIT_XOR_EXPR
	      && integer_onep (TREE_OPERAND (exp0, 1)))
	    {
	      exp0 = TREE_OPERAND (exp0, 0);
	      clr_label = if_true_label;
	      set_label = if_false_label;
	      setclr_prob = inv (prob);
	    }
	  else
	    {
	      clr_label = if_false_label;
	      set_label = if_true_label;
	    }

	  if (TREE_CODE (exp0) == RSHIFT_EXPR)
	    {
	      tree arg = TREE_OPERAND (exp0, 0);
	      tree shift = TREE_OPERAND (exp0, 1);
	      tree argtype = TREE_TYPE (arg);
	      if (TREE_CODE (shift) == INTEGER_CST
		  && compare_tree_int (shift, 0) >= 0
		  && compare_tree_int (shift, HOST_BITS_PER_WIDE_INT) < 0
		  && prefer_and_bit_test (TYPE_MODE (argtype),
					  TREE_INT_CST_LOW (shift)))
		{
		  unsigned HOST_WIDE_INT mask
		    = (unsigned HOST_WIDE_INT) 1 << TREE_INT_CST_LOW (shift);
		  do_jump (build2 (BIT_AND_EXPR, argtype, arg,
				   build_int_cstu (argtype, mask)),
			   clr_label, set_label, setclr_prob);
		  break;
		}
	    }
	}

      /* If we are AND'ing with a small constant, do this comparison in the
         smallest type that fits.  If the machine doesn't have comparisons
         that small, it will be converted back to the wider comparison.
         This helps if we are testing the sign bit of a narrower object.
         combine can't do this for us because it can't know whether a
         ZERO_EXTRACT or a compare in a smaller mode exists, but we do.  */

      if (! SLOW_BYTE_ACCESS
          && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
          && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
          && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
          && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
          && (type = lang_hooks.types.type_for_mode (mode, 1)) != 0
          && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
          && have_insn_for (COMPARE, TYPE_MODE (type)))
        {
	  do_jump (fold_convert (type, exp), if_false_label, if_true_label,
		   prob);
          break;
        }

      if (TYPE_PRECISION (TREE_TYPE (exp)) > 1
	  || TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
	goto normal;

      /* Boolean comparisons can be compiled as TRUTH_AND_EXPR.  */

    case TRUTH_AND_EXPR:
      /* High branch cost, expand as the bitwise AND of the conditions.
	 Do the same if the RHS has side effects, because we're effectively
	 turning a TRUTH_AND_EXPR into a TRUTH_ANDIF_EXPR.  */
      if (BRANCH_COST (optimize_insn_for_speed_p (),
		       false) >= 4
	  || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
	goto normal;
      code = TRUTH_ANDIF_EXPR;
      goto other_code;

    case BIT_IOR_EXPR:
    case TRUTH_OR_EXPR:
      /* High branch cost, expand as the bitwise OR of the conditions.
	 Do the same if the RHS has side effects, because we're effectively
	 turning a TRUTH_OR_EXPR into a TRUTH_ORIF_EXPR.  */
      if (BRANCH_COST (optimize_insn_for_speed_p (), false) >= 4
	  || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
	goto normal;
      code = TRUTH_ORIF_EXPR;
      goto other_code;

      /* Fall through and generate the normal code.  */
    default:
    normal:
      temp = expand_normal (exp);
      do_pending_stack_adjust ();
      /* The RTL optimizers prefer comparisons against pseudos.  */
      if (GET_CODE (temp) == SUBREG)
	{
	  /* Compare promoted variables in their promoted mode.  */
	  if (SUBREG_PROMOTED_VAR_P (temp)
	      && REG_P (XEXP (temp, 0)))
	    temp = XEXP (temp, 0);
	  else
	    temp = copy_to_reg (temp);
	}
      do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
			       NE, TYPE_UNSIGNED (TREE_TYPE (exp)),
			       GET_MODE (temp), NULL_RTX,
			       if_false_label, if_true_label, prob);
    }

  if (drop_through_label)
    {
      do_pending_stack_adjust ();
      emit_label (drop_through_label);
    }
}
Exemplo n.º 3
0
tree
ubsan_instrument_division (location_t loc, tree op0, tree op1)
{
  tree t, tt;
  tree type = TREE_TYPE (op0);

  /* At this point both operands should have the same type,
     because they are already converted to RESULT_TYPE.
     Use TYPE_MAIN_VARIANT since typedefs can confuse us.  */
  gcc_assert (TYPE_MAIN_VARIANT (TREE_TYPE (op0))
	      == TYPE_MAIN_VARIANT (TREE_TYPE (op1)));

  if (TREE_CODE (type) == INTEGER_TYPE
      && (flag_sanitize & SANITIZE_DIVIDE))
    t = fold_build2 (EQ_EXPR, boolean_type_node,
		     op1, build_int_cst (type, 0));
  else if (TREE_CODE (type) == REAL_TYPE
	   && (flag_sanitize & SANITIZE_FLOAT_DIVIDE))
    t = fold_build2 (EQ_EXPR, boolean_type_node,
		     op1, build_real (type, dconst0));
  else
    return NULL_TREE;

  /* We check INT_MIN / -1 only for signed types.  */
  if (TREE_CODE (type) == INTEGER_TYPE
      && (flag_sanitize & SANITIZE_DIVIDE)
      && !TYPE_UNSIGNED (type))
    {
      tree x;
      tt = fold_build2 (EQ_EXPR, boolean_type_node, op1,
			build_int_cst (type, -1));
      x = fold_build2 (EQ_EXPR, boolean_type_node, op0,
		       TYPE_MIN_VALUE (type));
      x = fold_build2 (TRUTH_AND_EXPR, boolean_type_node, x, tt);
      t = fold_build2 (TRUTH_OR_EXPR, boolean_type_node, t, x);
    }

  /* If the condition was folded to 0, no need to instrument
     this expression.  */
  if (integer_zerop (t))
    return NULL_TREE;

  /* In case we have a SAVE_EXPR in a conditional context, we need to
     make sure it gets evaluated before the condition.  If the OP0 is
     an instrumented array reference, mark it as having side effects so
     it's not folded away.  */
  if (flag_sanitize & SANITIZE_BOUNDS)
    {
      tree xop0 = op0;
      while (CONVERT_EXPR_P (xop0))
	xop0 = TREE_OPERAND (xop0, 0);
      if (TREE_CODE (xop0) == ARRAY_REF)
	{
	  TREE_SIDE_EFFECTS (xop0) = 1;
	  TREE_SIDE_EFFECTS (op0) = 1;
	}
    }
  t = fold_build2 (COMPOUND_EXPR, TREE_TYPE (t), op0, t);
  if (flag_sanitize_undefined_trap_on_error)
    tt = build_call_expr_loc (loc, builtin_decl_explicit (BUILT_IN_TRAP), 0);
  else
    {
      tree data = ubsan_create_data ("__ubsan_overflow_data", 1, &loc,
				     ubsan_type_descriptor (type), NULL_TREE,
				     NULL_TREE);
      data = build_fold_addr_expr_loc (loc, data);
      enum built_in_function bcode
	= (flag_sanitize_recover & SANITIZE_DIVIDE)
	  ? BUILT_IN_UBSAN_HANDLE_DIVREM_OVERFLOW
	  : BUILT_IN_UBSAN_HANDLE_DIVREM_OVERFLOW_ABORT;
      tt = builtin_decl_explicit (bcode);
      tt = build_call_expr_loc (loc, tt, 3, data, ubsan_encode_value (op0),
				ubsan_encode_value (op1));
    }
  t = fold_build3 (COND_EXPR, void_type_node, t, tt, void_node);

  return t;
}
Exemplo n.º 4
0
tree
ubsan_instrument_shift (location_t loc, enum tree_code code,
			tree op0, tree op1)
{
  tree t, tt = NULL_TREE;
  tree type0 = TREE_TYPE (op0);
  tree type1 = TREE_TYPE (op1);
  tree op1_utype = unsigned_type_for (type1);
  HOST_WIDE_INT op0_prec = TYPE_PRECISION (type0);
  tree uprecm1 = build_int_cst (op1_utype, op0_prec - 1);

  t = fold_convert_loc (loc, op1_utype, op1);
  t = fold_build2 (GT_EXPR, boolean_type_node, t, uprecm1);

  /* For signed x << y, in C99/C11, the following:
     (unsigned) x >> (uprecm1 - y)
     if non-zero, is undefined.  */
  if (code == LSHIFT_EXPR
      && !TYPE_UNSIGNED (type0)
      && flag_isoc99)
    {
      tree x = fold_build2 (MINUS_EXPR, unsigned_type_node, uprecm1,
			    fold_convert (op1_utype, op1));
      tt = fold_convert_loc (loc, unsigned_type_for (type0), op0);
      tt = fold_build2 (RSHIFT_EXPR, TREE_TYPE (tt), tt, x);
      tt = fold_build2 (NE_EXPR, boolean_type_node, tt,
			build_int_cst (TREE_TYPE (tt), 0));
    }

  /* For signed x << y, in C++11 and later, the following:
     x < 0 || ((unsigned) x >> (uprecm1 - y))
     if > 1, is undefined.  */
  if (code == LSHIFT_EXPR
      && !TYPE_UNSIGNED (TREE_TYPE (op0))
      && (cxx_dialect >= cxx11))
    {
      tree x = fold_build2 (MINUS_EXPR, unsigned_type_node, uprecm1,
			    fold_convert (op1_utype, op1));
      tt = fold_convert_loc (loc, unsigned_type_for (type0), op0);
      tt = fold_build2 (RSHIFT_EXPR, TREE_TYPE (tt), tt, x);
      tt = fold_build2 (GT_EXPR, boolean_type_node, tt,
			build_int_cst (TREE_TYPE (tt), 1));
      x = fold_build2 (LT_EXPR, boolean_type_node, op0,
		       build_int_cst (type0, 0));
      tt = fold_build2 (TRUTH_OR_EXPR, boolean_type_node, x, tt);
    }

  /* If the condition was folded to 0, no need to instrument
     this expression.  */
  if (integer_zerop (t) && (tt == NULL_TREE || integer_zerop (tt)))
    return NULL_TREE;

  /* In case we have a SAVE_EXPR in a conditional context, we need to
     make sure it gets evaluated before the condition.  If the OP0 is
     an instrumented array reference, mark it as having side effects so
     it's not folded away.  */
  if (flag_sanitize & SANITIZE_BOUNDS)
    {
      tree xop0 = op0;
      while (CONVERT_EXPR_P (xop0))
	xop0 = TREE_OPERAND (xop0, 0);
      if (TREE_CODE (xop0) == ARRAY_REF)
	{
	  TREE_SIDE_EFFECTS (xop0) = 1;
	  TREE_SIDE_EFFECTS (op0) = 1;
	}
    }
  t = fold_build2 (COMPOUND_EXPR, TREE_TYPE (t), op0, t);
  t = fold_build2 (TRUTH_OR_EXPR, boolean_type_node, t,
		   tt ? tt : integer_zero_node);

  if (flag_sanitize_undefined_trap_on_error)
    tt = build_call_expr_loc (loc, builtin_decl_explicit (BUILT_IN_TRAP), 0);
  else
    {
      tree data = ubsan_create_data ("__ubsan_shift_data", 1, &loc,
				     ubsan_type_descriptor (type0),
				     ubsan_type_descriptor (type1), NULL_TREE,
				     NULL_TREE);
      data = build_fold_addr_expr_loc (loc, data);

      enum built_in_function bcode
	= (flag_sanitize_recover & SANITIZE_SHIFT)
	  ? BUILT_IN_UBSAN_HANDLE_SHIFT_OUT_OF_BOUNDS
	  : BUILT_IN_UBSAN_HANDLE_SHIFT_OUT_OF_BOUNDS_ABORT;
      tt = builtin_decl_explicit (bcode);
      tt = build_call_expr_loc (loc, tt, 3, data, ubsan_encode_value (op0),
				ubsan_encode_value (op1));
    }
  t = fold_build3 (COND_EXPR, void_type_node, t, tt, void_node);

  return t;
}