/* Computes the frequency of the call statement so that it can be stored in cgraph_edge. BB is the basic block of the call statement. */ int compute_call_stmt_bb_frequency (tree decl, basic_block bb) { int entry_freq = ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (decl))->frequency; int freq = bb->frequency; if (profile_status_for_fn (DECL_STRUCT_FUNCTION (decl)) == PROFILE_ABSENT) return CGRAPH_FREQ_BASE; if (!entry_freq) entry_freq = 1, freq++; freq = freq * CGRAPH_FREQ_BASE / entry_freq; if (freq > CGRAPH_FREQ_MAX) freq = CGRAPH_FREQ_MAX; return freq; }
static bool tail_duplicate (void) { auto_vec<fibonacci_node<long, basic_block_def>*> blocks; blocks.safe_grow_cleared (last_basic_block_for_fn (cfun)); basic_block *trace = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun)); int *counts = XNEWVEC (int, last_basic_block_for_fn (cfun)); int ninsns = 0, nduplicated = 0; gcov_type weighted_insns = 0, traced_insns = 0; fibonacci_heap<long, basic_block_def> heap (LONG_MIN); gcov_type cover_insns; int max_dup_insns; basic_block bb; bool changed = false; /* Create an oversized sbitmap to reduce the chance that we need to resize it. */ bb_seen = sbitmap_alloc (last_basic_block_for_fn (cfun) * 2); bitmap_clear (bb_seen); initialize_original_copy_tables (); if (profile_info && profile_status_for_fn (cfun) == PROFILE_READ) probability_cutoff = PARAM_VALUE (TRACER_MIN_BRANCH_PROBABILITY_FEEDBACK); else probability_cutoff = PARAM_VALUE (TRACER_MIN_BRANCH_PROBABILITY); probability_cutoff = REG_BR_PROB_BASE / 100 * probability_cutoff; branch_ratio_cutoff = (REG_BR_PROB_BASE / 100 * PARAM_VALUE (TRACER_MIN_BRANCH_RATIO)); FOR_EACH_BB_FN (bb, cfun) { int n = count_insns (bb); if (!ignore_bb_p (bb)) blocks[bb->index] = heap.insert (-bb->count.to_frequency (cfun), bb); counts [bb->index] = n; ninsns += n; weighted_insns += n * bb->count.to_frequency (cfun); }
void ubsan_expand_si_overflow_mul_check (gimple stmt) { rtx res, op0, op1; tree lhs, fn, arg0, arg1; rtx_code_label *done_label, *do_error; rtx target = NULL_RTX; lhs = gimple_call_lhs (stmt); arg0 = gimple_call_arg (stmt, 0); arg1 = gimple_call_arg (stmt, 1); done_label = gen_label_rtx (); do_error = gen_label_rtx (); do_pending_stack_adjust (); op0 = expand_normal (arg0); op1 = expand_normal (arg1); machine_mode mode = TYPE_MODE (TREE_TYPE (arg0)); if (lhs) target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE); enum insn_code icode = optab_handler (mulv4_optab, mode); if (icode != CODE_FOR_nothing) { struct expand_operand ops[4]; rtx_insn *last = get_last_insn (); res = gen_reg_rtx (mode); create_output_operand (&ops[0], res, mode); create_input_operand (&ops[1], op0, mode); create_input_operand (&ops[2], op1, mode); create_fixed_operand (&ops[3], do_error); if (maybe_expand_insn (icode, 4, ops)) { last = get_last_insn (); if (profile_status_for_fn (cfun) != PROFILE_ABSENT && JUMP_P (last) && any_condjump_p (last) && !find_reg_note (last, REG_BR_PROB, 0)) add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY); emit_jump (done_label); } else { delete_insns_since (last); icode = CODE_FOR_nothing; } } if (icode == CODE_FOR_nothing) { struct separate_ops ops; machine_mode hmode = mode_for_size (GET_MODE_PRECISION (mode) / 2, MODE_INT, 1); ops.op0 = arg0; ops.op1 = arg1; ops.op2 = NULL_TREE; ops.location = gimple_location (stmt); if (GET_MODE_2XWIDER_MODE (mode) != VOIDmode && targetm.scalar_mode_supported_p (GET_MODE_2XWIDER_MODE (mode))) { machine_mode wmode = GET_MODE_2XWIDER_MODE (mode); ops.code = WIDEN_MULT_EXPR; ops.type = build_nonstandard_integer_type (GET_MODE_PRECISION (wmode), 0); res = expand_expr_real_2 (&ops, NULL_RTX, wmode, EXPAND_NORMAL); rtx hipart = expand_shift (RSHIFT_EXPR, wmode, res, GET_MODE_PRECISION (mode), NULL_RTX, 0); hipart = gen_lowpart (mode, hipart); res = gen_lowpart (mode, res); rtx signbit = expand_shift (RSHIFT_EXPR, mode, res, GET_MODE_PRECISION (mode) - 1, NULL_RTX, 0); /* RES is low half of the double width result, HIPART the high half. There was overflow if HIPART is different from RES < 0 ? -1 : 0. */ emit_cmp_and_jump_insns (signbit, hipart, EQ, NULL_RTX, mode, false, done_label, PROB_VERY_LIKELY); } else if (hmode != BLKmode && 2 * GET_MODE_PRECISION (hmode) == GET_MODE_PRECISION (mode)) { rtx_code_label *large_op0 = gen_label_rtx (); rtx_code_label *small_op0_large_op1 = gen_label_rtx (); rtx_code_label *one_small_one_large = gen_label_rtx (); rtx_code_label *both_ops_large = gen_label_rtx (); rtx_code_label *after_hipart_neg = gen_label_rtx (); rtx_code_label *after_lopart_neg = gen_label_rtx (); rtx_code_label *do_overflow = gen_label_rtx (); rtx_code_label *hipart_different = gen_label_rtx (); unsigned int hprec = GET_MODE_PRECISION (hmode); rtx hipart0 = expand_shift (RSHIFT_EXPR, mode, op0, hprec, NULL_RTX, 0); hipart0 = gen_lowpart (hmode, hipart0); rtx lopart0 = gen_lowpart (hmode, op0); rtx signbit0 = expand_shift (RSHIFT_EXPR, hmode, lopart0, hprec - 1, NULL_RTX, 0); rtx hipart1 = expand_shift (RSHIFT_EXPR, mode, op1, hprec, NULL_RTX, 0); hipart1 = gen_lowpart (hmode, hipart1); rtx lopart1 = gen_lowpart (hmode, op1); rtx signbit1 = expand_shift (RSHIFT_EXPR, hmode, lopart1, hprec - 1, NULL_RTX, 0); res = gen_reg_rtx (mode); /* True if op0 resp. op1 are known to be in the range of halfstype. */ bool op0_small_p = false; bool op1_small_p = false; /* True if op0 resp. op1 are known to have all zeros or all ones in the upper half of bits, but are not known to be op{0,1}_small_p. */ bool op0_medium_p = false; bool op1_medium_p = false; /* -1 if op{0,1} is known to be negative, 0 if it is known to be nonnegative, 1 if unknown. */ int op0_sign = 1; int op1_sign = 1; if (TREE_CODE (arg0) == SSA_NAME) { wide_int arg0_min, arg0_max; if (get_range_info (arg0, &arg0_min, &arg0_max) == VR_RANGE) { unsigned int mprec0 = wi::min_precision (arg0_min, SIGNED); unsigned int mprec1 = wi::min_precision (arg0_max, SIGNED); if (mprec0 <= hprec && mprec1 <= hprec) op0_small_p = true; else if (mprec0 <= hprec + 1 && mprec1 <= hprec + 1) op0_medium_p = true; if (!wi::neg_p (arg0_min, TYPE_SIGN (TREE_TYPE (arg0)))) op0_sign = 0; else if (wi::neg_p (arg0_max, TYPE_SIGN (TREE_TYPE (arg0)))) op0_sign = -1; } } if (TREE_CODE (arg1) == SSA_NAME) { wide_int arg1_min, arg1_max; if (get_range_info (arg1, &arg1_min, &arg1_max) == VR_RANGE) { unsigned int mprec0 = wi::min_precision (arg1_min, SIGNED); unsigned int mprec1 = wi::min_precision (arg1_max, SIGNED); if (mprec0 <= hprec && mprec1 <= hprec) op1_small_p = true; else if (mprec0 <= hprec + 1 && mprec1 <= hprec + 1) op1_medium_p = true; if (!wi::neg_p (arg1_min, TYPE_SIGN (TREE_TYPE (arg1)))) op1_sign = 0; else if (wi::neg_p (arg1_max, TYPE_SIGN (TREE_TYPE (arg1)))) op1_sign = -1; } } int smaller_sign = 1; int larger_sign = 1; if (op0_small_p) { smaller_sign = op0_sign; larger_sign = op1_sign; } else if (op1_small_p) { smaller_sign = op1_sign; larger_sign = op0_sign; } else if (op0_sign == op1_sign) { smaller_sign = op0_sign; larger_sign = op0_sign; } if (!op0_small_p) emit_cmp_and_jump_insns (signbit0, hipart0, NE, NULL_RTX, hmode, false, large_op0, PROB_UNLIKELY); if (!op1_small_p) emit_cmp_and_jump_insns (signbit1, hipart1, NE, NULL_RTX, hmode, false, small_op0_large_op1, PROB_UNLIKELY); /* If both op0 and op1 are sign extended from hmode to mode, the multiplication will never overflow. We can do just one hmode x hmode => mode widening multiplication. */ if (GET_CODE (lopart0) == SUBREG) { SUBREG_PROMOTED_VAR_P (lopart0) = 1; SUBREG_PROMOTED_SET (lopart0, 0); } if (GET_CODE (lopart1) == SUBREG) { SUBREG_PROMOTED_VAR_P (lopart1) = 1; SUBREG_PROMOTED_SET (lopart1, 0); } tree halfstype = build_nonstandard_integer_type (hprec, 0); ops.op0 = make_tree (halfstype, lopart0); ops.op1 = make_tree (halfstype, lopart1); ops.code = WIDEN_MULT_EXPR; ops.type = TREE_TYPE (arg0); rtx thisres = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL); emit_move_insn (res, thisres); emit_jump (done_label); emit_label (small_op0_large_op1); /* If op0 is sign extended from hmode to mode, but op1 is not, just swap the arguments and handle it as op1 sign extended, op0 not. */ rtx larger = gen_reg_rtx (mode); rtx hipart = gen_reg_rtx (hmode); rtx lopart = gen_reg_rtx (hmode); emit_move_insn (larger, op1); emit_move_insn (hipart, hipart1); emit_move_insn (lopart, lopart0); emit_jump (one_small_one_large); emit_label (large_op0); if (!op1_small_p) emit_cmp_and_jump_insns (signbit1, hipart1, NE, NULL_RTX, hmode, false, both_ops_large, PROB_UNLIKELY); /* If op1 is sign extended from hmode to mode, but op0 is not, prepare larger, hipart and lopart pseudos and handle it together with small_op0_large_op1. */ emit_move_insn (larger, op0); emit_move_insn (hipart, hipart0); emit_move_insn (lopart, lopart1); emit_label (one_small_one_large); /* lopart is the low part of the operand that is sign extended to mode, larger is the the other operand, hipart is the high part of larger and lopart0 and lopart1 are the low parts of both operands. We perform lopart0 * lopart1 and lopart * hipart widening multiplications. */ tree halfutype = build_nonstandard_integer_type (hprec, 1); ops.op0 = make_tree (halfutype, lopart0); ops.op1 = make_tree (halfutype, lopart1); rtx lo0xlo1 = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL); ops.op0 = make_tree (halfutype, lopart); ops.op1 = make_tree (halfutype, hipart); rtx loxhi = gen_reg_rtx (mode); rtx tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL); emit_move_insn (loxhi, tem); /* if (hipart < 0) loxhi -= lopart << (bitsize / 2); */ if (larger_sign == 0) emit_jump (after_hipart_neg); else if (larger_sign != -1) emit_cmp_and_jump_insns (hipart, const0_rtx, GE, NULL_RTX, hmode, false, after_hipart_neg, PROB_EVEN); tem = convert_modes (mode, hmode, lopart, 1); tem = expand_shift (LSHIFT_EXPR, mode, tem, hprec, NULL_RTX, 1); tem = expand_simple_binop (mode, MINUS, loxhi, tem, NULL_RTX, 1, OPTAB_DIRECT); emit_move_insn (loxhi, tem); emit_label (after_hipart_neg); /* if (lopart < 0) loxhi -= larger; */ if (smaller_sign == 0) emit_jump (after_lopart_neg); else if (smaller_sign != -1) emit_cmp_and_jump_insns (lopart, const0_rtx, GE, NULL_RTX, hmode, false, after_lopart_neg, PROB_EVEN); tem = expand_simple_binop (mode, MINUS, loxhi, larger, NULL_RTX, 1, OPTAB_DIRECT); emit_move_insn (loxhi, tem); emit_label (after_lopart_neg); /* loxhi += (uns) lo0xlo1 >> (bitsize / 2); */ tem = expand_shift (RSHIFT_EXPR, mode, lo0xlo1, hprec, NULL_RTX, 1); tem = expand_simple_binop (mode, PLUS, loxhi, tem, NULL_RTX, 1, OPTAB_DIRECT); emit_move_insn (loxhi, tem); /* if (loxhi >> (bitsize / 2) == (hmode) loxhi >> (bitsize / 2 - 1)) */ rtx hipartloxhi = expand_shift (RSHIFT_EXPR, mode, loxhi, hprec, NULL_RTX, 0); hipartloxhi = gen_lowpart (hmode, hipartloxhi); rtx lopartloxhi = gen_lowpart (hmode, loxhi); rtx signbitloxhi = expand_shift (RSHIFT_EXPR, hmode, lopartloxhi, hprec - 1, NULL_RTX, 0); emit_cmp_and_jump_insns (signbitloxhi, hipartloxhi, NE, NULL_RTX, hmode, false, do_overflow, PROB_VERY_UNLIKELY); /* res = (loxhi << (bitsize / 2)) | (hmode) lo0xlo1; */ rtx loxhishifted = expand_shift (LSHIFT_EXPR, mode, loxhi, hprec, NULL_RTX, 1); tem = convert_modes (mode, hmode, gen_lowpart (hmode, lo0xlo1), 1); tem = expand_simple_binop (mode, IOR, loxhishifted, tem, res, 1, OPTAB_DIRECT); if (tem != res) emit_move_insn (res, tem); emit_jump (done_label); emit_label (both_ops_large); /* If both operands are large (not sign extended from hmode), then perform the full multiplication which will be the result of the operation. The only cases which don't overflow are some cases where both hipart0 and highpart1 are 0 or -1. */ ops.code = MULT_EXPR; ops.op0 = make_tree (TREE_TYPE (arg0), op0); ops.op1 = make_tree (TREE_TYPE (arg0), op1); tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL); emit_move_insn (res, tem); if (!op0_medium_p) { tem = expand_simple_binop (hmode, PLUS, hipart0, const1_rtx, NULL_RTX, 1, OPTAB_DIRECT); emit_cmp_and_jump_insns (tem, const1_rtx, GTU, NULL_RTX, hmode, true, do_error, PROB_VERY_UNLIKELY); } if (!op1_medium_p) { tem = expand_simple_binop (hmode, PLUS, hipart1, const1_rtx, NULL_RTX, 1, OPTAB_DIRECT); emit_cmp_and_jump_insns (tem, const1_rtx, GTU, NULL_RTX, hmode, true, do_error, PROB_VERY_UNLIKELY); } /* At this point hipart{0,1} are both in [-1, 0]. If they are the same, overflow happened if res is negative, if they are different, overflow happened if res is positive. */ if (op0_sign != 1 && op1_sign != 1 && op0_sign != op1_sign) emit_jump (hipart_different); else if (op0_sign == 1 || op1_sign == 1) emit_cmp_and_jump_insns (hipart0, hipart1, NE, NULL_RTX, hmode, true, hipart_different, PROB_EVEN); emit_cmp_and_jump_insns (res, const0_rtx, LT, NULL_RTX, mode, false, do_error, PROB_VERY_UNLIKELY); emit_jump (done_label); emit_label (hipart_different); emit_cmp_and_jump_insns (res, const0_rtx, GE, NULL_RTX, mode, false, do_error, PROB_VERY_UNLIKELY); emit_jump (done_label); emit_label (do_overflow); /* Overflow, do full multiplication and fallthru into do_error. */ ops.op0 = make_tree (TREE_TYPE (arg0), op0); ops.op1 = make_tree (TREE_TYPE (arg0), op1); tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL); emit_move_insn (res, tem); } else { ops.code = MULT_EXPR; ops.type = TREE_TYPE (arg0); res = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL); emit_jump (done_label); } } emit_label (do_error); /* Expand the ubsan builtin call. */ push_temp_slots (); fn = ubsan_build_overflow_builtin (MULT_EXPR, gimple_location (stmt), TREE_TYPE (arg0), arg0, arg1); expand_normal (fn); pop_temp_slots (); do_pending_stack_adjust (); /* We're done. */ emit_label (done_label); if (lhs) emit_move_insn (target, res); }
void ubsan_expand_si_overflow_neg_check (gimple stmt) { rtx res, op1; tree lhs, fn, arg1; rtx_code_label *done_label, *do_error; rtx target = NULL_RTX; lhs = gimple_call_lhs (stmt); arg1 = gimple_call_arg (stmt, 1); done_label = gen_label_rtx (); do_error = gen_label_rtx (); do_pending_stack_adjust (); op1 = expand_normal (arg1); machine_mode mode = TYPE_MODE (TREE_TYPE (arg1)); if (lhs) target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE); enum insn_code icode = optab_handler (negv3_optab, mode); if (icode != CODE_FOR_nothing) { struct expand_operand ops[3]; rtx_insn *last = get_last_insn (); res = gen_reg_rtx (mode); create_output_operand (&ops[0], res, mode); create_input_operand (&ops[1], op1, mode); create_fixed_operand (&ops[2], do_error); if (maybe_expand_insn (icode, 3, ops)) { last = get_last_insn (); if (profile_status_for_fn (cfun) != PROFILE_ABSENT && JUMP_P (last) && any_condjump_p (last) && !find_reg_note (last, REG_BR_PROB, 0)) add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY); emit_jump (done_label); } else { delete_insns_since (last); icode = CODE_FOR_nothing; } } if (icode == CODE_FOR_nothing) { /* Compute the operation. On RTL level, the addition is always unsigned. */ res = expand_unop (mode, neg_optab, op1, NULL_RTX, false); /* Compare the operand with the most negative value. */ rtx minv = expand_normal (TYPE_MIN_VALUE (TREE_TYPE (arg1))); emit_cmp_and_jump_insns (op1, minv, NE, NULL_RTX, mode, false, done_label, PROB_VERY_LIKELY); } emit_label (do_error); /* Expand the ubsan builtin call. */ push_temp_slots (); fn = ubsan_build_overflow_builtin (NEGATE_EXPR, gimple_location (stmt), TREE_TYPE (arg1), arg1, NULL_TREE); expand_normal (fn); pop_temp_slots (); do_pending_stack_adjust (); /* We're done. */ emit_label (done_label); if (lhs) emit_move_insn (target, res); }
void ubsan_expand_si_overflow_addsub_check (tree_code code, gimple stmt) { rtx res, op0, op1; tree lhs, fn, arg0, arg1; rtx_code_label *done_label, *do_error; rtx target = NULL_RTX; lhs = gimple_call_lhs (stmt); arg0 = gimple_call_arg (stmt, 0); arg1 = gimple_call_arg (stmt, 1); done_label = gen_label_rtx (); do_error = gen_label_rtx (); do_pending_stack_adjust (); op0 = expand_normal (arg0); op1 = expand_normal (arg1); machine_mode mode = TYPE_MODE (TREE_TYPE (arg0)); if (lhs) target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE); enum insn_code icode = optab_handler (code == PLUS_EXPR ? addv4_optab : subv4_optab, mode); if (icode != CODE_FOR_nothing) { struct expand_operand ops[4]; rtx_insn *last = get_last_insn (); res = gen_reg_rtx (mode); create_output_operand (&ops[0], res, mode); create_input_operand (&ops[1], op0, mode); create_input_operand (&ops[2], op1, mode); create_fixed_operand (&ops[3], do_error); if (maybe_expand_insn (icode, 4, ops)) { last = get_last_insn (); if (profile_status_for_fn (cfun) != PROFILE_ABSENT && JUMP_P (last) && any_condjump_p (last) && !find_reg_note (last, REG_BR_PROB, 0)) add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY); emit_jump (done_label); } else { delete_insns_since (last); icode = CODE_FOR_nothing; } } if (icode == CODE_FOR_nothing) { rtx_code_label *sub_check = gen_label_rtx (); int pos_neg = 3; /* Compute the operation. On RTL level, the addition is always unsigned. */ res = expand_binop (mode, code == PLUS_EXPR ? add_optab : sub_optab, op0, op1, NULL_RTX, false, OPTAB_LIB_WIDEN); /* If we can prove one of the arguments (for MINUS_EXPR only the second operand, as subtraction is not commutative) is always non-negative or always negative, we can do just one comparison and conditional jump instead of 2 at runtime, 3 present in the emitted code. If one of the arguments is CONST_INT, all we need is to make sure it is op1, then the first emit_cmp_and_jump_insns will be just folded. Otherwise try to use range info if available. */ if (code == PLUS_EXPR && CONST_INT_P (op0)) { rtx tem = op0; op0 = op1; op1 = tem; } else if (CONST_INT_P (op1)) ; else if (code == PLUS_EXPR && TREE_CODE (arg0) == SSA_NAME) { wide_int arg0_min, arg0_max; if (get_range_info (arg0, &arg0_min, &arg0_max) == VR_RANGE) { if (!wi::neg_p (arg0_min, TYPE_SIGN (TREE_TYPE (arg0)))) pos_neg = 1; else if (wi::neg_p (arg0_max, TYPE_SIGN (TREE_TYPE (arg0)))) pos_neg = 2; } if (pos_neg != 3) { rtx tem = op0; op0 = op1; op1 = tem; } } if (pos_neg == 3 && !CONST_INT_P (op1) && TREE_CODE (arg1) == SSA_NAME) { wide_int arg1_min, arg1_max; if (get_range_info (arg1, &arg1_min, &arg1_max) == VR_RANGE) { if (!wi::neg_p (arg1_min, TYPE_SIGN (TREE_TYPE (arg1)))) pos_neg = 1; else if (wi::neg_p (arg1_max, TYPE_SIGN (TREE_TYPE (arg1)))) pos_neg = 2; } } /* If the op1 is negative, we have to use a different check. */ if (pos_neg == 3) emit_cmp_and_jump_insns (op1, const0_rtx, LT, NULL_RTX, mode, false, sub_check, PROB_EVEN); /* Compare the result of the operation with one of the operands. */ if (pos_neg & 1) emit_cmp_and_jump_insns (res, op0, code == PLUS_EXPR ? GE : LE, NULL_RTX, mode, false, done_label, PROB_VERY_LIKELY); /* If we get here, we have to print the error. */ if (pos_neg == 3) { emit_jump (do_error); emit_label (sub_check); } /* We have k = a + b for b < 0 here. k <= a must hold. */ if (pos_neg & 2) emit_cmp_and_jump_insns (res, op0, code == PLUS_EXPR ? LE : GE, NULL_RTX, mode, false, done_label, PROB_VERY_LIKELY); } emit_label (do_error); /* Expand the ubsan builtin call. */ push_temp_slots (); fn = ubsan_build_overflow_builtin (code, gimple_location (stmt), TREE_TYPE (arg0), arg0, arg1); expand_normal (fn); pop_temp_slots (); do_pending_stack_adjust (); /* We're done. */ emit_label (done_label); if (lhs) emit_move_insn (target, res); }