예제 #1
0
파일: ubsan.c 프로젝트: didemoto/gcc
static void
instrument_si_overflow (gimple_stmt_iterator gsi)
{
  gimple stmt = gsi_stmt (gsi);
  tree_code code = gimple_assign_rhs_code (stmt);
  tree lhs = gimple_assign_lhs (stmt);
  tree lhstype = TREE_TYPE (lhs);
  tree a, b;
  gimple g;

  /* If this is not a signed operation, don't instrument anything here.
     Also punt on bit-fields.  */
  if (!INTEGRAL_TYPE_P (lhstype)
      || TYPE_OVERFLOW_WRAPS (lhstype)
      || GET_MODE_BITSIZE (TYPE_MODE (lhstype)) != TYPE_PRECISION (lhstype))
    return;

  switch (code)
    {
    case MINUS_EXPR:
    case PLUS_EXPR:
    case MULT_EXPR:
      /* Transform
	 i = u {+,-,*} 5;
	 into
	 i = UBSAN_CHECK_{ADD,SUB,MUL} (u, 5);  */
      a = gimple_assign_rhs1 (stmt);
      b = gimple_assign_rhs2 (stmt);
      g = gimple_build_call_internal (code == PLUS_EXPR
				      ? IFN_UBSAN_CHECK_ADD
				      : code == MINUS_EXPR
				      ? IFN_UBSAN_CHECK_SUB
				      : IFN_UBSAN_CHECK_MUL, 2, a, b);
      gimple_call_set_lhs (g, lhs);
      gsi_replace (&gsi, g, false);
      break;
    case NEGATE_EXPR:
      /* Represent i = -u;
	 as
	 i = UBSAN_CHECK_SUB (0, u);  */
      a = build_int_cst (lhstype, 0);
      b = gimple_assign_rhs1 (stmt);
      g = gimple_build_call_internal (IFN_UBSAN_CHECK_SUB, 2, a, b);
      gimple_call_set_lhs (g, lhs);
      gsi_replace (&gsi, g, false);
      break;
    default:
      break;
    }
}
예제 #2
0
파일: c-ubsan.c 프로젝트: WojciechMigda/gcc
tree
ubsan_instrument_shift (location_t loc, enum tree_code code,
			tree op0, tree op1)
{
  tree t, tt = NULL_TREE;
  tree type0 = TREE_TYPE (op0);
  tree type1 = TREE_TYPE (op1);
  if (!INTEGRAL_TYPE_P (type0))
    return NULL_TREE;

  tree op1_utype = unsigned_type_for (type1);
  HOST_WIDE_INT op0_prec = TYPE_PRECISION (type0);
  tree uprecm1 = build_int_cst (op1_utype, op0_prec - 1);

  op0 = unshare_expr (op0);
  op1 = unshare_expr (op1);

  t = fold_convert_loc (loc, op1_utype, op1);
  t = fold_build2 (GT_EXPR, boolean_type_node, t, uprecm1);

  /* If this is not a signed operation, don't perform overflow checks.
     Also punt on bit-fields.  */
  if (TYPE_OVERFLOW_WRAPS (type0)
      || GET_MODE_BITSIZE (TYPE_MODE (type0)) != TYPE_PRECISION (type0)
      || (flag_sanitize & SANITIZE_SHIFT_BASE) == 0)
    ;

  /* For signed x << y, in C99/C11, the following:
     (unsigned) x >> (uprecm1 - y)
     if non-zero, is undefined.  */
  else if (code == LSHIFT_EXPR && flag_isoc99 && cxx_dialect < cxx11)
    {
      tree x = fold_build2 (MINUS_EXPR, op1_utype, uprecm1,
			    fold_convert (op1_utype, unshare_expr (op1)));
      tt = fold_convert_loc (loc, unsigned_type_for (type0), op0);
      tt = fold_build2 (RSHIFT_EXPR, TREE_TYPE (tt), tt, x);
      tt = fold_build2 (NE_EXPR, boolean_type_node, tt,
			build_int_cst (TREE_TYPE (tt), 0));
    }

  /* For signed x << y, in C++11 and later, the following:
     x < 0 || ((unsigned) x >> (uprecm1 - y))
     if > 1, is undefined.  */
  else if (code == LSHIFT_EXPR && cxx_dialect >= cxx11)
    {
      tree x = fold_build2 (MINUS_EXPR, op1_utype, uprecm1,
			    fold_convert (op1_utype, unshare_expr (op1)));
      tt = fold_convert_loc (loc, unsigned_type_for (type0),
			     unshare_expr (op0));
      tt = fold_build2 (RSHIFT_EXPR, TREE_TYPE (tt), tt, x);
      tt = fold_build2 (GT_EXPR, boolean_type_node, tt,
			build_int_cst (TREE_TYPE (tt), 1));
      x = fold_build2 (LT_EXPR, boolean_type_node, unshare_expr (op0),
		       build_int_cst (type0, 0));
      tt = fold_build2 (TRUTH_OR_EXPR, boolean_type_node, x, tt);
    }

  /* If the condition was folded to 0, no need to instrument
     this expression.  */
  if (integer_zerop (t) && (tt == NULL_TREE || integer_zerop (tt)))
    return NULL_TREE;

  /* In case we have a SAVE_EXPR in a conditional context, we need to
     make sure it gets evaluated before the condition.  */
  t = fold_build2 (COMPOUND_EXPR, TREE_TYPE (t), unshare_expr (op0), t);

  enum sanitize_code recover_kind = SANITIZE_SHIFT_EXPONENT;
  tree else_t = void_node;
  if (tt)
    {
      if ((flag_sanitize & SANITIZE_SHIFT_EXPONENT) == 0)
	{
	  t = fold_build1 (TRUTH_NOT_EXPR, boolean_type_node, t);
	  t = fold_build2 (TRUTH_AND_EXPR, boolean_type_node, t, tt);
	  recover_kind = SANITIZE_SHIFT_BASE;
	}
      else
	{
	  if (flag_sanitize_undefined_trap_on_error
	      || ((!(flag_sanitize_recover & SANITIZE_SHIFT_EXPONENT))
		  == (!(flag_sanitize_recover & SANITIZE_SHIFT_BASE))))
	    t = fold_build2 (TRUTH_OR_EXPR, boolean_type_node, t, tt);
	  else
	    else_t = tt;
	}
    }

  if (flag_sanitize_undefined_trap_on_error)
    tt = build_call_expr_loc (loc, builtin_decl_explicit (BUILT_IN_TRAP), 0);
  else
    {
      tree data = ubsan_create_data ("__ubsan_shift_data", 1, &loc,
				     ubsan_type_descriptor (type0),
				     ubsan_type_descriptor (type1), NULL_TREE,
				     NULL_TREE);
      data = build_fold_addr_expr_loc (loc, data);

      enum built_in_function bcode
	= (flag_sanitize_recover & recover_kind)
	  ? BUILT_IN_UBSAN_HANDLE_SHIFT_OUT_OF_BOUNDS
	  : BUILT_IN_UBSAN_HANDLE_SHIFT_OUT_OF_BOUNDS_ABORT;
      tt = builtin_decl_explicit (bcode);
      op0 = unshare_expr (op0);
      op1 = unshare_expr (op1);
      tt = build_call_expr_loc (loc, tt, 3, data, ubsan_encode_value (op0),
				ubsan_encode_value (op1));
      if (else_t != void_node)
	{
	  bcode = (flag_sanitize_recover & SANITIZE_SHIFT_BASE)
		  ? BUILT_IN_UBSAN_HANDLE_SHIFT_OUT_OF_BOUNDS
		  : BUILT_IN_UBSAN_HANDLE_SHIFT_OUT_OF_BOUNDS_ABORT;
	  tree else_tt = builtin_decl_explicit (bcode);
	  op0 = unshare_expr (op0);
	  op1 = unshare_expr (op1);
	  else_tt = build_call_expr_loc (loc, else_tt, 3, data,
					 ubsan_encode_value (op0),
					 ubsan_encode_value (op1));
	  else_t = fold_build3 (COND_EXPR, void_type_node, else_t,
				else_tt, void_node);
	}
    }
  t = fold_build3 (COND_EXPR, void_type_node, t, tt, else_t);

  return t;
}
예제 #3
0
void
tree_to_aff_combination (tree expr, tree type, aff_tree *comb)
{
  aff_tree tmp;
  enum tree_code code;
  tree cst, core, toffset;
  poly_int64 bitpos, bitsize, bytepos;
  machine_mode mode;
  int unsignedp, reversep, volatilep;

  STRIP_NOPS (expr);

  code = TREE_CODE (expr);
  switch (code)
    {
    case POINTER_PLUS_EXPR:
      tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
      tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
      aff_combination_add (comb, &tmp);
      return;

    case PLUS_EXPR:
    case MINUS_EXPR:
      tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
      tree_to_aff_combination (TREE_OPERAND (expr, 1), type, &tmp);
      if (code == MINUS_EXPR)
	aff_combination_scale (&tmp, -1);
      aff_combination_add (comb, &tmp);
      return;

    case MULT_EXPR:
      cst = TREE_OPERAND (expr, 1);
      if (TREE_CODE (cst) != INTEGER_CST)
	break;
      tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
      aff_combination_scale (comb, wi::to_widest (cst));
      return;

    case NEGATE_EXPR:
      tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
      aff_combination_scale (comb, -1);
      return;

    case BIT_NOT_EXPR:
      /* ~x = -x - 1 */
      tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
      aff_combination_scale (comb, -1);
      aff_combination_add_cst (comb, -1);
      return;

    case ADDR_EXPR:
      /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR.  */
      if (TREE_CODE (TREE_OPERAND (expr, 0)) == MEM_REF)
	{
	  expr = TREE_OPERAND (expr, 0);
	  tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
	  tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
	  aff_combination_add (comb, &tmp);
	  return;
	}
      core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos,
				  &toffset, &mode, &unsignedp, &reversep,
				  &volatilep);
      if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos))
	break;
      aff_combination_const (comb, type, bytepos);
      if (TREE_CODE (core) == MEM_REF)
	{
	  tree mem_offset = TREE_OPERAND (core, 1);
	  aff_combination_add_cst (comb, wi::to_poly_widest (mem_offset));
	  core = TREE_OPERAND (core, 0);
	}
      else
	core = build_fold_addr_expr (core);

      if (TREE_CODE (core) == ADDR_EXPR)
	aff_combination_add_elt (comb, core, 1);
      else
	{
	  tree_to_aff_combination (core, type, &tmp);
	  aff_combination_add (comb, &tmp);
	}
      if (toffset)
	{
	  tree_to_aff_combination (toffset, type, &tmp);
	  aff_combination_add (comb, &tmp);
	}
      return;

    case MEM_REF:
      if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
	tree_to_aff_combination (TREE_OPERAND (TREE_OPERAND (expr, 0), 0),
				 type, comb);
      else if (integer_zerop (TREE_OPERAND (expr, 1)))
	{
	  aff_combination_elt (comb, type, expr);
	  return;
	}
      else
	aff_combination_elt (comb, type,
			     build2 (MEM_REF, TREE_TYPE (expr),
				     TREE_OPERAND (expr, 0),
				     build_int_cst
				      (TREE_TYPE (TREE_OPERAND (expr, 1)), 0)));
      tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
      aff_combination_add (comb, &tmp);
      return;

    CASE_CONVERT:
      {
	tree otype = TREE_TYPE (expr);
	tree inner = TREE_OPERAND (expr, 0);
	tree itype = TREE_TYPE (inner);
	enum tree_code icode = TREE_CODE (inner);

	/* In principle this is a valid folding, but it isn't necessarily
	   an optimization, so do it here and not in fold_unary.  */
	if ((icode == PLUS_EXPR || icode == MINUS_EXPR || icode == MULT_EXPR)
	    && TREE_CODE (itype) == INTEGER_TYPE
	    && TREE_CODE (otype) == INTEGER_TYPE
	    && TYPE_PRECISION (otype) > TYPE_PRECISION (itype))
	  {
	    tree op0 = TREE_OPERAND (inner, 0), op1 = TREE_OPERAND (inner, 1);

	    /* If inner type has undefined overflow behavior, fold conversion
	       for below two cases:
		 (T1)(X *+- CST) -> (T1)X *+- (T1)CST
		 (T1)(X + X)     -> (T1)X + (T1)X.  */
	    if (TYPE_OVERFLOW_UNDEFINED (itype)
		&& (TREE_CODE (op1) == INTEGER_CST
		    || (icode == PLUS_EXPR && operand_equal_p (op0, op1, 0))))
	      {
		op0 = fold_convert (otype, op0);
		op1 = fold_convert (otype, op1);
		expr = fold_build2 (icode, otype, op0, op1);
		tree_to_aff_combination (expr, type, comb);
		return;
	      }
	    wide_int minv, maxv;
	    /* If inner type has wrapping overflow behavior, fold conversion
	       for below case:
		 (T1)(X - CST) -> (T1)X - (T1)CST
	       if X - CST doesn't overflow by range information.  Also handle
	       (T1)(X + CST) as (T1)(X - (-CST)).  */
	    if (TYPE_UNSIGNED (itype)
		&& TYPE_OVERFLOW_WRAPS (itype)
		&& TREE_CODE (op0) == SSA_NAME
		&& TREE_CODE (op1) == INTEGER_CST
		&& icode != MULT_EXPR
		&& get_range_info (op0, &minv, &maxv) == VR_RANGE)
	      {
		if (icode == PLUS_EXPR)
		  op1 = wide_int_to_tree (itype, -wi::to_wide (op1));
		if (wi::geu_p (minv, wi::to_wide (op1)))
		  {
		    op0 = fold_convert (otype, op0);
		    op1 = fold_convert (otype, op1);
		    expr = fold_build2 (MINUS_EXPR, otype, op0, op1);
		    tree_to_aff_combination (expr, type, comb);
		    return;
		  }
	      }
	  }
      }
      break;

    default:
      {
	if (poly_int_tree_p (expr))
	  {
	    aff_combination_const (comb, type, wi::to_poly_widest (expr));
	    return;
	  }
	break;
      }
    }

  aff_combination_elt (comb, type, expr);
}