tree ubsan_build_overflow_builtin (tree_code code, location_t loc, tree lhstype, tree op0, tree op1) { tree data = ubsan_create_data ("__ubsan_overflow_data", loc, NULL, ubsan_type_descriptor (lhstype, false), NULL_TREE); enum built_in_function fn_code; switch (code) { case PLUS_EXPR: fn_code = BUILT_IN_UBSAN_HANDLE_ADD_OVERFLOW; break; case MINUS_EXPR: fn_code = BUILT_IN_UBSAN_HANDLE_SUB_OVERFLOW; break; case MULT_EXPR: fn_code = BUILT_IN_UBSAN_HANDLE_MUL_OVERFLOW; break; case NEGATE_EXPR: fn_code = BUILT_IN_UBSAN_HANDLE_NEGATE_OVERFLOW; break; default: gcc_unreachable (); } tree fn = builtin_decl_explicit (fn_code); return build_call_expr_loc (loc, fn, 2 + (code != NEGATE_EXPR), build_fold_addr_expr_loc (loc, data), ubsan_encode_value (op0, true), op1 ? ubsan_encode_value (op1, true) : NULL_TREE); }
tree ubsan_instrument_division (location_t loc, tree op0, tree op1) { tree t, tt; tree type = TREE_TYPE (op0); /* At this point both operands should have the same type, because they are already converted to RESULT_TYPE. Use TYPE_MAIN_VARIANT since typedefs can confuse us. */ gcc_assert (TYPE_MAIN_VARIANT (TREE_TYPE (op0)) == TYPE_MAIN_VARIANT (TREE_TYPE (op1))); /* TODO: REAL_TYPE is not supported yet. */ if (TREE_CODE (type) != INTEGER_TYPE) return NULL_TREE; t = fold_build2 (EQ_EXPR, boolean_type_node, op1, build_int_cst (type, 0)); /* We check INT_MIN / -1 only for signed types. */ if (!TYPE_UNSIGNED (type)) { tree x; tt = fold_build2 (EQ_EXPR, boolean_type_node, op1, build_int_cst (type, -1)); x = fold_build2 (EQ_EXPR, boolean_type_node, op0, TYPE_MIN_VALUE (type)); x = fold_build2 (TRUTH_AND_EXPR, boolean_type_node, x, tt); t = fold_build2 (TRUTH_OR_EXPR, boolean_type_node, t, x); } /* If the condition was folded to 0, no need to instrument this expression. */ if (integer_zerop (t)) return NULL_TREE; /* In case we have a SAVE_EXPR in a conditional context, we need to make sure it gets evaluated before the condition. */ t = fold_build2 (COMPOUND_EXPR, TREE_TYPE (t), op0, t); if (flag_sanitize_undefined_trap_on_error) tt = build_call_expr_loc (loc, builtin_decl_explicit (BUILT_IN_TRAP), 0); else { tree data = ubsan_create_data ("__ubsan_overflow_data", &loc, NULL, ubsan_type_descriptor (type, false), NULL_TREE); data = build_fold_addr_expr_loc (loc, data); enum built_in_function bcode = flag_sanitize_recover ? BUILT_IN_UBSAN_HANDLE_DIVREM_OVERFLOW : BUILT_IN_UBSAN_HANDLE_DIVREM_OVERFLOW_ABORT; tt = builtin_decl_explicit (bcode); tt = build_call_expr_loc (loc, tt, 3, data, ubsan_encode_value (op0), ubsan_encode_value (op1)); } t = fold_build3 (COND_EXPR, void_type_node, t, tt, void_zero_node); return t; }
tree ubsan_instrument_division (location_t loc, tree op0, tree op1) { tree t, tt; tree type = TREE_TYPE (op0); /* At this point both operands should have the same type, because they are already converted to RESULT_TYPE. Use TYPE_MAIN_VARIANT since typedefs can confuse us. */ gcc_assert (TYPE_MAIN_VARIANT (TREE_TYPE (op0)) == TYPE_MAIN_VARIANT (TREE_TYPE (op1))); /* TODO: REAL_TYPE is not supported yet. */ if (TREE_CODE (type) != INTEGER_TYPE) return NULL_TREE; /* If we *know* that the divisor is not -1 or 0, we don't have to instrument this expression. ??? We could use decl_constant_value to cover up more cases. */ if (TREE_CODE (op1) == INTEGER_CST && integer_nonzerop (op1) && !integer_minus_onep (op1)) return NULL_TREE; t = fold_build2 (EQ_EXPR, boolean_type_node, op1, build_int_cst (type, 0)); /* We check INT_MIN / -1 only for signed types. */ if (!TYPE_UNSIGNED (type)) { tree x; tt = fold_build2 (EQ_EXPR, boolean_type_node, op1, build_int_cst (type, -1)); x = fold_build2 (EQ_EXPR, boolean_type_node, op0, TYPE_MIN_VALUE (type)); x = fold_build2 (TRUTH_AND_EXPR, boolean_type_node, x, tt); t = fold_build2 (TRUTH_OR_EXPR, boolean_type_node, t, x); } /* In case we have a SAVE_EXPR in a conditional context, we need to make sure it gets evaluated before the condition. */ t = fold_build2 (COMPOUND_EXPR, TREE_TYPE (t), op0, t); tree data = ubsan_create_data ("__ubsan_overflow_data", loc, ubsan_type_descriptor (type), NULL_TREE); data = build_fold_addr_expr_loc (loc, data); tt = builtin_decl_explicit (BUILT_IN_UBSAN_HANDLE_DIVREM_OVERFLOW); tt = build_call_expr_loc (loc, tt, 3, data, ubsan_encode_value (op0), ubsan_encode_value (op1)); t = fold_build3 (COND_EXPR, void_type_node, t, tt, void_zero_node); return t; }
tree ubsan_instrument_vla (location_t loc, tree size) { tree type = TREE_TYPE (size); tree t, tt; t = fold_build2 (LE_EXPR, boolean_type_node, size, build_int_cst (type, 0)); if (flag_sanitize_undefined_trap_on_error) tt = build_call_expr_loc (loc, builtin_decl_explicit (BUILT_IN_TRAP), 0); else { tree data = ubsan_create_data ("__ubsan_vla_data", 1, &loc, ubsan_type_descriptor (type), NULL_TREE, NULL_TREE); data = build_fold_addr_expr_loc (loc, data); enum built_in_function bcode = (flag_sanitize_recover & SANITIZE_VLA) ? BUILT_IN_UBSAN_HANDLE_VLA_BOUND_NOT_POSITIVE : BUILT_IN_UBSAN_HANDLE_VLA_BOUND_NOT_POSITIVE_ABORT; tt = builtin_decl_explicit (bcode); tt = build_call_expr_loc (loc, tt, 2, data, ubsan_encode_value (size)); } t = fold_build3 (COND_EXPR, void_type_node, t, tt, void_node); return t; }
void ubsan_expand_bounds_ifn (gimple_stmt_iterator *gsi) { gimple stmt = gsi_stmt (*gsi); location_t loc = gimple_location (stmt); gcc_assert (gimple_call_num_args (stmt) == 3); /* Pick up the arguments of the UBSAN_BOUNDS call. */ tree type = TREE_TYPE (TREE_TYPE (gimple_call_arg (stmt, 0))); tree index = gimple_call_arg (stmt, 1); tree orig_index_type = TREE_TYPE (index); tree bound = gimple_call_arg (stmt, 2); gimple_stmt_iterator gsi_orig = *gsi; /* Create condition "if (index > bound)". */ basic_block then_bb, fallthru_bb; gimple_stmt_iterator cond_insert_point = create_cond_insert_point (gsi, 0/*before_p*/, false, true, &then_bb, &fallthru_bb); index = fold_convert (TREE_TYPE (bound), index); index = force_gimple_operand_gsi (&cond_insert_point, index, true/*simple_p*/, NULL_TREE, false/*before*/, GSI_NEW_STMT); gimple g = gimple_build_cond (GT_EXPR, index, bound, NULL_TREE, NULL_TREE); gimple_set_location (g, loc); gsi_insert_after (&cond_insert_point, g, GSI_NEW_STMT); /* Generate __ubsan_handle_out_of_bounds call. */ *gsi = gsi_after_labels (then_bb); if (flag_sanitize_undefined_trap_on_error) g = gimple_build_call (builtin_decl_explicit (BUILT_IN_TRAP), 0); else { tree data = ubsan_create_data ("__ubsan_out_of_bounds_data", &loc, NULL, ubsan_type_descriptor (type, UBSAN_PRINT_ARRAY), ubsan_type_descriptor (orig_index_type), NULL_TREE); data = build_fold_addr_expr_loc (loc, data); enum built_in_function bcode = flag_sanitize_recover ? BUILT_IN_UBSAN_HANDLE_OUT_OF_BOUNDS : BUILT_IN_UBSAN_HANDLE_OUT_OF_BOUNDS_ABORT; tree fn = builtin_decl_explicit (bcode); tree val = force_gimple_operand_gsi (gsi, ubsan_encode_value (index), true, NULL_TREE, true, GSI_SAME_STMT); g = gimple_build_call (fn, 2, data, val); } gimple_set_location (g, loc); gsi_insert_before (gsi, g, GSI_SAME_STMT); /* Get rid of the UBSAN_BOUNDS call from the IR. */ unlink_stmt_vdef (stmt); gsi_remove (&gsi_orig, true); /* Point GSI to next logical statement. */ *gsi = gsi_start_bb (fallthru_bb); }
tree ubsan_instrument_shift (location_t loc, enum tree_code code, tree op0, tree op1) { tree t, tt = NULL_TREE; tree type0 = TREE_TYPE (op0); tree type1 = TREE_TYPE (op1); if (!INTEGRAL_TYPE_P (type0)) return NULL_TREE; tree op1_utype = unsigned_type_for (type1); HOST_WIDE_INT op0_prec = TYPE_PRECISION (type0); tree uprecm1 = build_int_cst (op1_utype, op0_prec - 1); op0 = unshare_expr (op0); op1 = unshare_expr (op1); t = fold_convert_loc (loc, op1_utype, op1); t = fold_build2 (GT_EXPR, boolean_type_node, t, uprecm1); /* If this is not a signed operation, don't perform overflow checks. Also punt on bit-fields. */ if (TYPE_OVERFLOW_WRAPS (type0) || GET_MODE_BITSIZE (TYPE_MODE (type0)) != TYPE_PRECISION (type0) || (flag_sanitize & SANITIZE_SHIFT_BASE) == 0) ; /* For signed x << y, in C99/C11, the following: (unsigned) x >> (uprecm1 - y) if non-zero, is undefined. */ else if (code == LSHIFT_EXPR && flag_isoc99 && cxx_dialect < cxx11) { tree x = fold_build2 (MINUS_EXPR, op1_utype, uprecm1, fold_convert (op1_utype, unshare_expr (op1))); tt = fold_convert_loc (loc, unsigned_type_for (type0), op0); tt = fold_build2 (RSHIFT_EXPR, TREE_TYPE (tt), tt, x); tt = fold_build2 (NE_EXPR, boolean_type_node, tt, build_int_cst (TREE_TYPE (tt), 0)); } /* For signed x << y, in C++11 and later, the following: x < 0 || ((unsigned) x >> (uprecm1 - y)) if > 1, is undefined. */ else if (code == LSHIFT_EXPR && cxx_dialect >= cxx11) { tree x = fold_build2 (MINUS_EXPR, op1_utype, uprecm1, fold_convert (op1_utype, unshare_expr (op1))); tt = fold_convert_loc (loc, unsigned_type_for (type0), unshare_expr (op0)); tt = fold_build2 (RSHIFT_EXPR, TREE_TYPE (tt), tt, x); tt = fold_build2 (GT_EXPR, boolean_type_node, tt, build_int_cst (TREE_TYPE (tt), 1)); x = fold_build2 (LT_EXPR, boolean_type_node, unshare_expr (op0), build_int_cst (type0, 0)); tt = fold_build2 (TRUTH_OR_EXPR, boolean_type_node, x, tt); } /* If the condition was folded to 0, no need to instrument this expression. */ if (integer_zerop (t) && (tt == NULL_TREE || integer_zerop (tt))) return NULL_TREE; /* In case we have a SAVE_EXPR in a conditional context, we need to make sure it gets evaluated before the condition. */ t = fold_build2 (COMPOUND_EXPR, TREE_TYPE (t), unshare_expr (op0), t); enum sanitize_code recover_kind = SANITIZE_SHIFT_EXPONENT; tree else_t = void_node; if (tt) { if ((flag_sanitize & SANITIZE_SHIFT_EXPONENT) == 0) { t = fold_build1 (TRUTH_NOT_EXPR, boolean_type_node, t); t = fold_build2 (TRUTH_AND_EXPR, boolean_type_node, t, tt); recover_kind = SANITIZE_SHIFT_BASE; } else { if (flag_sanitize_undefined_trap_on_error || ((!(flag_sanitize_recover & SANITIZE_SHIFT_EXPONENT)) == (!(flag_sanitize_recover & SANITIZE_SHIFT_BASE)))) t = fold_build2 (TRUTH_OR_EXPR, boolean_type_node, t, tt); else else_t = tt; } } if (flag_sanitize_undefined_trap_on_error) tt = build_call_expr_loc (loc, builtin_decl_explicit (BUILT_IN_TRAP), 0); else { tree data = ubsan_create_data ("__ubsan_shift_data", 1, &loc, ubsan_type_descriptor (type0), ubsan_type_descriptor (type1), NULL_TREE, NULL_TREE); data = build_fold_addr_expr_loc (loc, data); enum built_in_function bcode = (flag_sanitize_recover & recover_kind) ? BUILT_IN_UBSAN_HANDLE_SHIFT_OUT_OF_BOUNDS : BUILT_IN_UBSAN_HANDLE_SHIFT_OUT_OF_BOUNDS_ABORT; tt = builtin_decl_explicit (bcode); op0 = unshare_expr (op0); op1 = unshare_expr (op1); tt = build_call_expr_loc (loc, tt, 3, data, ubsan_encode_value (op0), ubsan_encode_value (op1)); if (else_t != void_node) { bcode = (flag_sanitize_recover & SANITIZE_SHIFT_BASE) ? BUILT_IN_UBSAN_HANDLE_SHIFT_OUT_OF_BOUNDS : BUILT_IN_UBSAN_HANDLE_SHIFT_OUT_OF_BOUNDS_ABORT; tree else_tt = builtin_decl_explicit (bcode); op0 = unshare_expr (op0); op1 = unshare_expr (op1); else_tt = build_call_expr_loc (loc, else_tt, 3, data, ubsan_encode_value (op0), ubsan_encode_value (op1)); else_t = fold_build3 (COND_EXPR, void_type_node, else_t, else_tt, void_node); } } t = fold_build3 (COND_EXPR, void_type_node, t, tt, else_t); return t; }
static void instrument_bool_enum_load (gimple_stmt_iterator *gsi) { gimple stmt = gsi_stmt (*gsi); tree rhs = gimple_assign_rhs1 (stmt); tree type = TREE_TYPE (rhs); tree minv = NULL_TREE, maxv = NULL_TREE; if (TREE_CODE (type) == BOOLEAN_TYPE && (flag_sanitize & SANITIZE_BOOL)) { minv = boolean_false_node; maxv = boolean_true_node; } else if (TREE_CODE (type) == ENUMERAL_TYPE && (flag_sanitize & SANITIZE_ENUM) && TREE_TYPE (type) != NULL_TREE && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE && (TYPE_PRECISION (TREE_TYPE (type)) < GET_MODE_PRECISION (TYPE_MODE (type)))) { minv = TYPE_MIN_VALUE (TREE_TYPE (type)); maxv = TYPE_MAX_VALUE (TREE_TYPE (type)); } else return; int modebitsize = GET_MODE_BITSIZE (TYPE_MODE (type)); HOST_WIDE_INT bitsize, bitpos; tree offset; enum machine_mode mode; int volatilep = 0, unsignedp = 0; tree base = get_inner_reference (rhs, &bitsize, &bitpos, &offset, &mode, &unsignedp, &volatilep, false); tree utype = build_nonstandard_integer_type (modebitsize, 1); if ((TREE_CODE (base) == VAR_DECL && DECL_HARD_REGISTER (base)) || (bitpos % modebitsize) != 0 || bitsize != modebitsize || GET_MODE_BITSIZE (TYPE_MODE (utype)) != modebitsize || TREE_CODE (gimple_assign_lhs (stmt)) != SSA_NAME) return; location_t loc = gimple_location (stmt); tree ptype = build_pointer_type (TREE_TYPE (rhs)); tree atype = reference_alias_ptr_type (rhs); gimple g = gimple_build_assign (make_ssa_name (ptype, NULL), build_fold_addr_expr (rhs)); gimple_set_location (g, loc); gsi_insert_before (gsi, g, GSI_SAME_STMT); tree mem = build2 (MEM_REF, utype, gimple_assign_lhs (g), build_int_cst (atype, 0)); tree urhs = make_ssa_name (utype, NULL); g = gimple_build_assign (urhs, mem); gimple_set_location (g, loc); gsi_insert_before (gsi, g, GSI_SAME_STMT); minv = fold_convert (utype, minv); maxv = fold_convert (utype, maxv); if (!integer_zerop (minv)) { g = gimple_build_assign_with_ops (MINUS_EXPR, make_ssa_name (utype, NULL), urhs, minv); gimple_set_location (g, loc); gsi_insert_before (gsi, g, GSI_SAME_STMT); } gimple_stmt_iterator gsi2 = *gsi; basic_block then_bb, fallthru_bb; *gsi = create_cond_insert_point (gsi, true, false, true, &then_bb, &fallthru_bb); g = gimple_build_cond (GT_EXPR, gimple_assign_lhs (g), int_const_binop (MINUS_EXPR, maxv, minv), NULL_TREE, NULL_TREE); gimple_set_location (g, loc); gsi_insert_after (gsi, g, GSI_NEW_STMT); gimple_assign_set_rhs_with_ops (&gsi2, NOP_EXPR, urhs, NULL_TREE); update_stmt (stmt); gsi2 = gsi_after_labels (then_bb); if (flag_sanitize_undefined_trap_on_error) g = gimple_build_call (builtin_decl_explicit (BUILT_IN_TRAP), 0); else { tree data = ubsan_create_data ("__ubsan_invalid_value_data", &loc, NULL, ubsan_type_descriptor (type), NULL_TREE); data = build_fold_addr_expr_loc (loc, data); enum built_in_function bcode = flag_sanitize_recover ? BUILT_IN_UBSAN_HANDLE_LOAD_INVALID_VALUE : BUILT_IN_UBSAN_HANDLE_LOAD_INVALID_VALUE_ABORT; tree fn = builtin_decl_explicit (bcode); tree val = force_gimple_operand_gsi (&gsi2, ubsan_encode_value (urhs), true, NULL_TREE, true, GSI_SAME_STMT); g = gimple_build_call (fn, 2, data, val); } gimple_set_location (g, loc); gsi_insert_before (&gsi2, g, GSI_SAME_STMT); }
tree ubsan_instrument_float_cast (location_t loc, tree type, tree expr) { tree expr_type = TREE_TYPE (expr); tree t, tt, fn, min, max; enum machine_mode mode = TYPE_MODE (expr_type); int prec = TYPE_PRECISION (type); bool uns_p = TYPE_UNSIGNED (type); /* Float to integer conversion first truncates toward zero, so even signed char c = 127.875f; is not problematic. Therefore, we should complain only if EXPR is unordered or smaller or equal than TYPE_MIN_VALUE - 1.0 or greater or equal than TYPE_MAX_VALUE + 1.0. */ if (REAL_MODE_FORMAT (mode)->b == 2) { /* For maximum, TYPE_MAX_VALUE might not be representable in EXPR_TYPE, e.g. if TYPE is 64-bit long long and EXPR_TYPE is IEEE single float, but TYPE_MAX_VALUE + 1.0 is either representable or infinity. */ REAL_VALUE_TYPE maxval = dconst1; SET_REAL_EXP (&maxval, REAL_EXP (&maxval) + prec - !uns_p); real_convert (&maxval, mode, &maxval); max = build_real (expr_type, maxval); /* For unsigned, assume -1.0 is always representable. */ if (uns_p) min = build_minus_one_cst (expr_type); else { /* TYPE_MIN_VALUE is generally representable (or -inf), but TYPE_MIN_VALUE - 1.0 might not be. */ REAL_VALUE_TYPE minval = dconstm1, minval2; SET_REAL_EXP (&minval, REAL_EXP (&minval) + prec - 1); real_convert (&minval, mode, &minval); real_arithmetic (&minval2, MINUS_EXPR, &minval, &dconst1); real_convert (&minval2, mode, &minval2); if (real_compare (EQ_EXPR, &minval, &minval2) && !real_isinf (&minval)) { /* If TYPE_MIN_VALUE - 1.0 is not representable and rounds to TYPE_MIN_VALUE, we need to subtract more. As REAL_MODE_FORMAT (mode)->p is the number of base digits, we want to subtract a number that will be 1 << (REAL_MODE_FORMAT (mode)->p - 1) times smaller than minval. */ minval2 = dconst1; gcc_assert (prec > REAL_MODE_FORMAT (mode)->p); SET_REAL_EXP (&minval2, REAL_EXP (&minval2) + prec - 1 - REAL_MODE_FORMAT (mode)->p + 1); real_arithmetic (&minval2, MINUS_EXPR, &minval, &minval2); real_convert (&minval2, mode, &minval2); } min = build_real (expr_type, minval2); } } else if (REAL_MODE_FORMAT (mode)->b == 10) { /* For _Decimal128 up to 34 decimal digits, - sign, dot, e, exponent. */ char buf[64]; mpfr_t m; int p = REAL_MODE_FORMAT (mode)->p; REAL_VALUE_TYPE maxval, minval; /* Use mpfr_snprintf rounding to compute the smallest representable decimal number greater or equal than 1 << (prec - !uns_p). */ mpfr_init2 (m, prec + 2); mpfr_set_ui_2exp (m, 1, prec - !uns_p, GMP_RNDN); mpfr_snprintf (buf, sizeof buf, "%.*RUe", p - 1, m); decimal_real_from_string (&maxval, buf); max = build_real (expr_type, maxval); /* For unsigned, assume -1.0 is always representable. */ if (uns_p) min = build_minus_one_cst (expr_type); else { /* Use mpfr_snprintf rounding to compute the largest representable decimal number less or equal than (-1 << (prec - 1)) - 1. */ mpfr_set_si_2exp (m, -1, prec - 1, GMP_RNDN); mpfr_sub_ui (m, m, 1, GMP_RNDN); mpfr_snprintf (buf, sizeof buf, "%.*RDe", p - 1, m); decimal_real_from_string (&minval, buf); min = build_real (expr_type, minval); } mpfr_clear (m); } else return NULL_TREE; if (flag_sanitize_undefined_trap_on_error) fn = build_call_expr_loc (loc, builtin_decl_explicit (BUILT_IN_TRAP), 0); else { /* Create the __ubsan_handle_float_cast_overflow fn call. */ tree data = ubsan_create_data ("__ubsan_float_cast_overflow_data", NULL, NULL, ubsan_type_descriptor (expr_type), ubsan_type_descriptor (type), NULL_TREE); enum built_in_function bcode = flag_sanitize_recover ? BUILT_IN_UBSAN_HANDLE_FLOAT_CAST_OVERFLOW : BUILT_IN_UBSAN_HANDLE_FLOAT_CAST_OVERFLOW_ABORT; fn = builtin_decl_explicit (bcode); fn = build_call_expr_loc (loc, fn, 2, build_fold_addr_expr_loc (loc, data), ubsan_encode_value (expr, false)); } t = fold_build2 (UNLE_EXPR, boolean_type_node, expr, min); tt = fold_build2 (UNGE_EXPR, boolean_type_node, expr, max); return fold_build3 (COND_EXPR, void_type_node, fold_build2 (TRUTH_OR_EXPR, boolean_type_node, t, tt), fn, integer_zero_node); }
tree ubsan_instrument_shift (location_t loc, enum tree_code code, tree op0, tree op1) { tree t, tt = NULL_TREE; tree type0 = TREE_TYPE (op0); tree type1 = TREE_TYPE (op1); tree op1_utype = unsigned_type_for (type1); HOST_WIDE_INT op0_prec = TYPE_PRECISION (type0); tree uprecm1 = build_int_cst (op1_utype, op0_prec - 1); tree precm1 = build_int_cst (type1, op0_prec - 1); t = fold_convert_loc (loc, op1_utype, op1); t = fold_build2 (GT_EXPR, boolean_type_node, t, uprecm1); /* For signed x << y, in C99/C11, the following: (unsigned) x >> (precm1 - y) if non-zero, is undefined. */ if (code == LSHIFT_EXPR && !TYPE_UNSIGNED (type0) && flag_isoc99) { tree x = fold_build2 (MINUS_EXPR, integer_type_node, precm1, op1); tt = fold_convert_loc (loc, unsigned_type_for (type0), op0); tt = fold_build2 (RSHIFT_EXPR, TREE_TYPE (tt), tt, x); tt = fold_build2 (NE_EXPR, boolean_type_node, tt, build_int_cst (TREE_TYPE (tt), 0)); } /* For signed x << y, in C++11/C++14, the following: x < 0 || ((unsigned) x >> (precm1 - y)) if > 1, is undefined. */ if (code == LSHIFT_EXPR && !TYPE_UNSIGNED (TREE_TYPE (op0)) && (cxx_dialect == cxx11 || cxx_dialect == cxx1y)) { tree x = fold_build2 (MINUS_EXPR, integer_type_node, precm1, op1); tt = fold_convert_loc (loc, unsigned_type_for (type0), op0); tt = fold_build2 (RSHIFT_EXPR, TREE_TYPE (tt), tt, x); tt = fold_build2 (GT_EXPR, boolean_type_node, tt, build_int_cst (TREE_TYPE (tt), 1)); x = fold_build2 (LT_EXPR, boolean_type_node, op0, build_int_cst (type0, 0)); tt = fold_build2 (TRUTH_OR_EXPR, boolean_type_node, x, tt); } /* In case we have a SAVE_EXPR in a conditional context, we need to make sure it gets evaluated before the condition. */ t = fold_build2 (COMPOUND_EXPR, TREE_TYPE (t), op0, t); tree data = ubsan_create_data ("__ubsan_shift_data", loc, ubsan_type_descriptor (type0), ubsan_type_descriptor (type1), NULL_TREE); data = build_fold_addr_expr_loc (loc, data); t = fold_build2 (TRUTH_OR_EXPR, boolean_type_node, t, tt ? tt : integer_zero_node); tt = builtin_decl_explicit (BUILT_IN_UBSAN_HANDLE_SHIFT_OUT_OF_BOUNDS); tt = build_call_expr_loc (loc, tt, 3, data, ubsan_encode_value (op0), ubsan_encode_value (op1)); t = fold_build3 (COND_EXPR, void_type_node, t, tt, void_zero_node); return t; }
tree ubsan_instrument_division (location_t loc, tree op0, tree op1) { tree t, tt; tree type = TREE_TYPE (op0); /* At this point both operands should have the same type, because they are already converted to RESULT_TYPE. Use TYPE_MAIN_VARIANT since typedefs can confuse us. */ gcc_assert (TYPE_MAIN_VARIANT (TREE_TYPE (op0)) == TYPE_MAIN_VARIANT (TREE_TYPE (op1))); if (TREE_CODE (type) == INTEGER_TYPE && (flag_sanitize & SANITIZE_DIVIDE)) t = fold_build2 (EQ_EXPR, boolean_type_node, op1, build_int_cst (type, 0)); else if (TREE_CODE (type) == REAL_TYPE && (flag_sanitize & SANITIZE_FLOAT_DIVIDE)) t = fold_build2 (EQ_EXPR, boolean_type_node, op1, build_real (type, dconst0)); else return NULL_TREE; /* We check INT_MIN / -1 only for signed types. */ if (TREE_CODE (type) == INTEGER_TYPE && (flag_sanitize & SANITIZE_DIVIDE) && !TYPE_UNSIGNED (type)) { tree x; tt = fold_build2 (EQ_EXPR, boolean_type_node, op1, build_int_cst (type, -1)); x = fold_build2 (EQ_EXPR, boolean_type_node, op0, TYPE_MIN_VALUE (type)); x = fold_build2 (TRUTH_AND_EXPR, boolean_type_node, x, tt); t = fold_build2 (TRUTH_OR_EXPR, boolean_type_node, t, x); } /* If the condition was folded to 0, no need to instrument this expression. */ if (integer_zerop (t)) return NULL_TREE; /* In case we have a SAVE_EXPR in a conditional context, we need to make sure it gets evaluated before the condition. If the OP0 is an instrumented array reference, mark it as having side effects so it's not folded away. */ if (flag_sanitize & SANITIZE_BOUNDS) { tree xop0 = op0; while (CONVERT_EXPR_P (xop0)) xop0 = TREE_OPERAND (xop0, 0); if (TREE_CODE (xop0) == ARRAY_REF) { TREE_SIDE_EFFECTS (xop0) = 1; TREE_SIDE_EFFECTS (op0) = 1; } } t = fold_build2 (COMPOUND_EXPR, TREE_TYPE (t), op0, t); if (flag_sanitize_undefined_trap_on_error) tt = build_call_expr_loc (loc, builtin_decl_explicit (BUILT_IN_TRAP), 0); else { tree data = ubsan_create_data ("__ubsan_overflow_data", 1, &loc, ubsan_type_descriptor (type), NULL_TREE, NULL_TREE); data = build_fold_addr_expr_loc (loc, data); enum built_in_function bcode = (flag_sanitize_recover & SANITIZE_DIVIDE) ? BUILT_IN_UBSAN_HANDLE_DIVREM_OVERFLOW : BUILT_IN_UBSAN_HANDLE_DIVREM_OVERFLOW_ABORT; tt = builtin_decl_explicit (bcode); tt = build_call_expr_loc (loc, tt, 3, data, ubsan_encode_value (op0), ubsan_encode_value (op1)); } t = fold_build3 (COND_EXPR, void_type_node, t, tt, void_node); return t; }
tree ubsan_instrument_shift (location_t loc, enum tree_code code, tree op0, tree op1) { tree t, tt = NULL_TREE; tree type0 = TREE_TYPE (op0); tree type1 = TREE_TYPE (op1); tree op1_utype = unsigned_type_for (type1); HOST_WIDE_INT op0_prec = TYPE_PRECISION (type0); tree uprecm1 = build_int_cst (op1_utype, op0_prec - 1); t = fold_convert_loc (loc, op1_utype, op1); t = fold_build2 (GT_EXPR, boolean_type_node, t, uprecm1); /* For signed x << y, in C99/C11, the following: (unsigned) x >> (uprecm1 - y) if non-zero, is undefined. */ if (code == LSHIFT_EXPR && !TYPE_UNSIGNED (type0) && flag_isoc99) { tree x = fold_build2 (MINUS_EXPR, unsigned_type_node, uprecm1, fold_convert (op1_utype, op1)); tt = fold_convert_loc (loc, unsigned_type_for (type0), op0); tt = fold_build2 (RSHIFT_EXPR, TREE_TYPE (tt), tt, x); tt = fold_build2 (NE_EXPR, boolean_type_node, tt, build_int_cst (TREE_TYPE (tt), 0)); } /* For signed x << y, in C++11 and later, the following: x < 0 || ((unsigned) x >> (uprecm1 - y)) if > 1, is undefined. */ if (code == LSHIFT_EXPR && !TYPE_UNSIGNED (TREE_TYPE (op0)) && (cxx_dialect >= cxx11)) { tree x = fold_build2 (MINUS_EXPR, unsigned_type_node, uprecm1, fold_convert (op1_utype, op1)); tt = fold_convert_loc (loc, unsigned_type_for (type0), op0); tt = fold_build2 (RSHIFT_EXPR, TREE_TYPE (tt), tt, x); tt = fold_build2 (GT_EXPR, boolean_type_node, tt, build_int_cst (TREE_TYPE (tt), 1)); x = fold_build2 (LT_EXPR, boolean_type_node, op0, build_int_cst (type0, 0)); tt = fold_build2 (TRUTH_OR_EXPR, boolean_type_node, x, tt); } /* If the condition was folded to 0, no need to instrument this expression. */ if (integer_zerop (t) && (tt == NULL_TREE || integer_zerop (tt))) return NULL_TREE; /* In case we have a SAVE_EXPR in a conditional context, we need to make sure it gets evaluated before the condition. If the OP0 is an instrumented array reference, mark it as having side effects so it's not folded away. */ if (flag_sanitize & SANITIZE_BOUNDS) { tree xop0 = op0; while (CONVERT_EXPR_P (xop0)) xop0 = TREE_OPERAND (xop0, 0); if (TREE_CODE (xop0) == ARRAY_REF) { TREE_SIDE_EFFECTS (xop0) = 1; TREE_SIDE_EFFECTS (op0) = 1; } } t = fold_build2 (COMPOUND_EXPR, TREE_TYPE (t), op0, t); t = fold_build2 (TRUTH_OR_EXPR, boolean_type_node, t, tt ? tt : integer_zero_node); if (flag_sanitize_undefined_trap_on_error) tt = build_call_expr_loc (loc, builtin_decl_explicit (BUILT_IN_TRAP), 0); else { tree data = ubsan_create_data ("__ubsan_shift_data", 1, &loc, ubsan_type_descriptor (type0), ubsan_type_descriptor (type1), NULL_TREE, NULL_TREE); data = build_fold_addr_expr_loc (loc, data); enum built_in_function bcode = (flag_sanitize_recover & SANITIZE_SHIFT) ? BUILT_IN_UBSAN_HANDLE_SHIFT_OUT_OF_BOUNDS : BUILT_IN_UBSAN_HANDLE_SHIFT_OUT_OF_BOUNDS_ABORT; tt = builtin_decl_explicit (bcode); tt = build_call_expr_loc (loc, tt, 3, data, ubsan_encode_value (op0), ubsan_encode_value (op1)); } t = fold_build3 (COND_EXPR, void_type_node, t, tt, void_node); return t; }