Ejemplo n.º 1
0
static void patch_reloc(uint8_t *code_ptr, int type,
                        tcg_target_long value, tcg_target_long addend)
{
    value += addend;
    switch (type) {
    case R_SPARC_32:
        if (value != (uint32_t)value)
            tcg_abort();
        *(uint32_t *)code_ptr = value;
        break;
    case R_SPARC_WDISP22:
        value -= (long)code_ptr;
        value >>= 2;
        if (!check_fit_tl(value, 22))
            tcg_abort();
        *(uint32_t *)code_ptr = ((*(uint32_t *)code_ptr) & ~0x3fffff) | value;
        break;
    case R_SPARC_WDISP19:
        value -= (long)code_ptr;
        value >>= 2;
        if (!check_fit_tl(value, 19))
            tcg_abort();
        *(uint32_t *)code_ptr = ((*(uint32_t *)code_ptr) & ~0x7ffff) | value;
        break;
    default:
        tcg_abort();
    }
}
Ejemplo n.º 2
0
/* Return 2 if the condition can't be simplified, and the result
   of the condition (0 or 1) if it can */
static TCGArg do_constant_folding_cond(TCGContext *s, TCGOpcode op, TCGArg x,
                                       TCGArg y, TCGCond c)
{
    struct tcg_temp_info *temps = s->temps2;

    if (temps[x].state == TCG_TEMP_CONST && temps[y].state == TCG_TEMP_CONST) {
        switch (op_bits(s, op)) {
        case 32:
            return do_constant_folding_cond_32(temps[x].val, temps[y].val, c);
        case 64:
            return do_constant_folding_cond_64(temps[x].val, temps[y].val, c);
        default:
            tcg_abort();
        }
    } else if (temps_are_copies(s, x, y)) {
        return do_constant_folding_cond_eq(c);
    } else if (temps[y].state == TCG_TEMP_CONST && temps[y].val == 0) {
        switch (c) {
        case TCG_COND_LTU:
            return 0;
        case TCG_COND_GEU:
            return 1;
        default:
            return 2;
        }
    } else {
        return 2;
    }
}
Ejemplo n.º 3
0
static bool do_constant_folding_cond_64(uint64_t x, uint64_t y, TCGCond c)
{
    switch (c) {
    case TCG_COND_EQ:
        return x == y;
    case TCG_COND_NE:
        return x != y;
    case TCG_COND_LT:
        return (int64_t)x < (int64_t)y;
    case TCG_COND_GE:
        return (int64_t)x >= (int64_t)y;
    case TCG_COND_LE:
        return (int64_t)x <= (int64_t)y;
    case TCG_COND_GT:
        return (int64_t)x > (int64_t)y;
    case TCG_COND_LTU:
        return x < y;
    case TCG_COND_GEU:
        return x >= y;
    case TCG_COND_LEU:
        return x <= y;
    case TCG_COND_GTU:
        return x > y;
    default:
        tcg_abort();
    }
}
Ejemplo n.º 4
0
/* XXX: we implement it at the target level to avoid having to
   handle cross basic blocks temporaries */
static void tcg_out_brcond2(TCGContext *s,
                            const TCGArg *args, const int *const_args)
{
    int label_next;
    label_next = gen_new_label();
    switch(args[4]) {
    case TCG_COND_EQ:
        tcg_out_brcond(s, TCG_COND_NE, args[0], args[2], const_args[2], label_next);
        tcg_out_brcond(s, TCG_COND_EQ, args[1], args[3], const_args[3], args[5]);
        break;
    case TCG_COND_NE:
        tcg_out_brcond(s, TCG_COND_NE, args[0], args[2], const_args[2], args[5]);
        tcg_out_brcond(s, TCG_COND_NE, args[1], args[3], const_args[3], args[5]);
        break;
    case TCG_COND_LT:
        tcg_out_brcond(s, TCG_COND_LT, args[1], args[3], const_args[3], args[5]);
        tcg_out_jxx(s, JCC_JNE, label_next);
        tcg_out_brcond(s, TCG_COND_LTU, args[0], args[2], const_args[2], args[5]);
        break;
    case TCG_COND_LE:
        tcg_out_brcond(s, TCG_COND_LT, args[1], args[3], const_args[3], args[5]);
        tcg_out_jxx(s, JCC_JNE, label_next);
        tcg_out_brcond(s, TCG_COND_LEU, args[0], args[2], const_args[2], args[5]);
        break;
    case TCG_COND_GT:
        tcg_out_brcond(s, TCG_COND_GT, args[1], args[3], const_args[3], args[5]);
        tcg_out_jxx(s, JCC_JNE, label_next);
        tcg_out_brcond(s, TCG_COND_GTU, args[0], args[2], const_args[2], args[5]);
        break;
    case TCG_COND_GE:
        tcg_out_brcond(s, TCG_COND_GT, args[1], args[3], const_args[3], args[5]);
        tcg_out_jxx(s, JCC_JNE, label_next);
        tcg_out_brcond(s, TCG_COND_GEU, args[0], args[2], const_args[2], args[5]);
        break;
    case TCG_COND_LTU:
        tcg_out_brcond(s, TCG_COND_LTU, args[1], args[3], const_args[3], args[5]);
        tcg_out_jxx(s, JCC_JNE, label_next);
        tcg_out_brcond(s, TCG_COND_LTU, args[0], args[2], const_args[2], args[5]);
        break;
    case TCG_COND_LEU:
        tcg_out_brcond(s, TCG_COND_LTU, args[1], args[3], const_args[3], args[5]);
        tcg_out_jxx(s, JCC_JNE, label_next);
        tcg_out_brcond(s, TCG_COND_LEU, args[0], args[2], const_args[2], args[5]);
        break;
    case TCG_COND_GTU:
        tcg_out_brcond(s, TCG_COND_GTU, args[1], args[3], const_args[3], args[5]);
        tcg_out_jxx(s, JCC_JNE, label_next);
        tcg_out_brcond(s, TCG_COND_GTU, args[0], args[2], const_args[2], args[5]);
        break;
    case TCG_COND_GEU:
        tcg_out_brcond(s, TCG_COND_GTU, args[1], args[3], const_args[3], args[5]);
        tcg_out_jxx(s, JCC_JNE, label_next);
        tcg_out_brcond(s, TCG_COND_GEU, args[0], args[2], const_args[2], args[5]);
        break;
    default:
        tcg_abort();
    }
    tcg_out_label(s, label_next, (tcg_target_long)s->code_ptr);
}
Ejemplo n.º 5
0
static TCGOpcode op_to_movi(TCGContext *s, TCGOpcode op)
{
    switch (op_bits(s, op)) {
    case 32:
        return INDEX_op_movi_i32;
    case 64:
        return INDEX_op_movi_i64;
    default:
        fprintf(stderr, "op_to_movi: unexpected return value of "
                "function op_bits.\n");
        tcg_abort();
    }
}
Ejemplo n.º 6
0
/* maximum number of register used for input function arguments */
static inline int tcg_target_get_call_iarg_regs_count(int flags)
{
    flags &= TCG_CALL_TYPE_MASK;
    switch(flags) {
    case TCG_CALL_TYPE_STD:
        return 0;
    case TCG_CALL_TYPE_REGPARM_1:
    case TCG_CALL_TYPE_REGPARM_2:
    case TCG_CALL_TYPE_REGPARM:
        return flags - TCG_CALL_TYPE_REGPARM_1 + 1;
    default:
        tcg_abort();
    }
}
Ejemplo n.º 7
0
static void patch_reloc(uint8_t *code_ptr, int type, 
                        tcg_target_long value, tcg_target_long addend)
{
    value += addend;
    switch(type) {
    case R_386_32:
        *(uint32_t *)code_ptr = value;
        break;
    case R_386_PC32:
        *(uint32_t *)code_ptr = value - (long)code_ptr;
        break;
    default:
        tcg_abort();
    }
}
Ejemplo n.º 8
0
void tcg_target_init(TCGContext *s)
{
    /* fail safe */
    if ((1 << CPU_TLB_ENTRY_BITS) != sizeof(CPUTLBEntry))
        tcg_abort();

    tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I32], 0, 0xff);
    tcg_regset_set32(tcg_target_call_clobber_regs, 0,
                     (1 << TCG_REG_EAX) | 
                     (1 << TCG_REG_EDX) | 
                     (1 << TCG_REG_ECX));
    
    tcg_regset_clear(s->reserved_regs);
    tcg_regset_set_reg(s->reserved_regs, TCG_REG_ESP);

    tcg_add_target_add_op_defs(x86_op_defs);
}
Ejemplo n.º 9
0
static bool do_constant_folding_cond_eq(TCGCond c)
{
    switch (c) {
    case TCG_COND_GT:
    case TCG_COND_LTU:
    case TCG_COND_LT:
    case TCG_COND_GTU:
    case TCG_COND_NE:
        return 0;
    case TCG_COND_GE:
    case TCG_COND_GEU:
    case TCG_COND_LE:
    case TCG_COND_LEU:
    case TCG_COND_EQ:
        return 1;
    default:
        tcg_abort();
    }
}
Ejemplo n.º 10
0
Archivo: tci.c Proyecto: 8tab/qemu
/* Interpret pseudo code in tb. */
uintptr_t tcg_qemu_tb_exec(CPUArchState *env, uint8_t *tb_ptr)
{
    long tcg_temps[CPU_TEMP_BUF_NLONGS];
    uintptr_t sp_value = (uintptr_t)(tcg_temps + CPU_TEMP_BUF_NLONGS);
    uintptr_t ret = 0;

    tci_reg[TCG_AREG0] = (tcg_target_ulong)env;
    tci_reg[TCG_REG_CALL_STACK] = sp_value;
    tci_assert(tb_ptr);

    for (;;) {
        TCGOpcode opc = tb_ptr[0];
#if defined(CONFIG_DEBUG_TCG) && !defined(NDEBUG)
        uint8_t op_size = tb_ptr[1];
        uint8_t *old_code_ptr = tb_ptr;
#endif
        tcg_target_ulong t0;
        tcg_target_ulong t1;
        tcg_target_ulong t2;
        tcg_target_ulong label;
        TCGCond condition;
        target_ulong taddr;
        uint8_t tmp8;
        uint16_t tmp16;
        uint32_t tmp32;
        uint64_t tmp64;
#if TCG_TARGET_REG_BITS == 32
        uint64_t v64;
#endif
        TCGMemOpIdx oi;

#if defined(GETPC)
        tci_tb_ptr = (uintptr_t)tb_ptr;
#endif

        /* Skip opcode and size entry. */
        tb_ptr += 2;

        switch (opc) {
        case INDEX_op_call:
            t0 = tci_read_ri(&tb_ptr);
#if TCG_TARGET_REG_BITS == 32
            tmp64 = ((helper_function)t0)(tci_read_reg(TCG_REG_R0),
                                          tci_read_reg(TCG_REG_R1),
                                          tci_read_reg(TCG_REG_R2),
                                          tci_read_reg(TCG_REG_R3),
                                          tci_read_reg(TCG_REG_R5),
                                          tci_read_reg(TCG_REG_R6),
                                          tci_read_reg(TCG_REG_R7),
                                          tci_read_reg(TCG_REG_R8),
                                          tci_read_reg(TCG_REG_R9),
                                          tci_read_reg(TCG_REG_R10));
            tci_write_reg(TCG_REG_R0, tmp64);
            tci_write_reg(TCG_REG_R1, tmp64 >> 32);
#else
            tmp64 = ((helper_function)t0)(tci_read_reg(TCG_REG_R0),
                                          tci_read_reg(TCG_REG_R1),
                                          tci_read_reg(TCG_REG_R2),
                                          tci_read_reg(TCG_REG_R3),
                                          tci_read_reg(TCG_REG_R5));
            tci_write_reg(TCG_REG_R0, tmp64);
#endif
            break;
        case INDEX_op_br:
            label = tci_read_label(&tb_ptr);
            tci_assert(tb_ptr == old_code_ptr + op_size);
            tb_ptr = (uint8_t *)label;
            continue;
        case INDEX_op_setcond_i32:
            t0 = *tb_ptr++;
            t1 = tci_read_r32(&tb_ptr);
            t2 = tci_read_ri32(&tb_ptr);
            condition = *tb_ptr++;
            tci_write_reg32(t0, tci_compare32(t1, t2, condition));
            break;
#if TCG_TARGET_REG_BITS == 32
        case INDEX_op_setcond2_i32:
            t0 = *tb_ptr++;
            tmp64 = tci_read_r64(&tb_ptr);
            v64 = tci_read_ri64(&tb_ptr);
            condition = *tb_ptr++;
            tci_write_reg32(t0, tci_compare64(tmp64, v64, condition));
            break;
#elif TCG_TARGET_REG_BITS == 64
        case INDEX_op_setcond_i64:
            t0 = *tb_ptr++;
            t1 = tci_read_r64(&tb_ptr);
            t2 = tci_read_ri64(&tb_ptr);
            condition = *tb_ptr++;
            tci_write_reg64(t0, tci_compare64(t1, t2, condition));
            break;
#endif
        case INDEX_op_mov_i32:
            t0 = *tb_ptr++;
            t1 = tci_read_r32(&tb_ptr);
            tci_write_reg32(t0, t1);
            break;
        case INDEX_op_movi_i32:
            t0 = *tb_ptr++;
            t1 = tci_read_i32(&tb_ptr);
            tci_write_reg32(t0, t1);
            break;

            /* Load/store operations (32 bit). */

        case INDEX_op_ld8u_i32:
            t0 = *tb_ptr++;
            t1 = tci_read_r(&tb_ptr);
            t2 = tci_read_s32(&tb_ptr);
            tci_write_reg8(t0, *(uint8_t *)(t1 + t2));
            break;
        case INDEX_op_ld8s_i32:
        case INDEX_op_ld16u_i32:
            TODO();
            break;
        case INDEX_op_ld16s_i32:
            TODO();
            break;
        case INDEX_op_ld_i32:
            t0 = *tb_ptr++;
            t1 = tci_read_r(&tb_ptr);
            t2 = tci_read_s32(&tb_ptr);
            tci_write_reg32(t0, *(uint32_t *)(t1 + t2));
            break;
        case INDEX_op_st8_i32:
            t0 = tci_read_r8(&tb_ptr);
            t1 = tci_read_r(&tb_ptr);
            t2 = tci_read_s32(&tb_ptr);
            *(uint8_t *)(t1 + t2) = t0;
            break;
        case INDEX_op_st16_i32:
            t0 = tci_read_r16(&tb_ptr);
            t1 = tci_read_r(&tb_ptr);
            t2 = tci_read_s32(&tb_ptr);
            *(uint16_t *)(t1 + t2) = t0;
            break;
        case INDEX_op_st_i32:
            t0 = tci_read_r32(&tb_ptr);
            t1 = tci_read_r(&tb_ptr);
            t2 = tci_read_s32(&tb_ptr);
            tci_assert(t1 != sp_value || (int32_t)t2 < 0);
            *(uint32_t *)(t1 + t2) = t0;
            break;

            /* Arithmetic operations (32 bit). */

        case INDEX_op_add_i32:
            t0 = *tb_ptr++;
            t1 = tci_read_ri32(&tb_ptr);
            t2 = tci_read_ri32(&tb_ptr);
            tci_write_reg32(t0, t1 + t2);
            break;
        case INDEX_op_sub_i32:
            t0 = *tb_ptr++;
            t1 = tci_read_ri32(&tb_ptr);
            t2 = tci_read_ri32(&tb_ptr);
            tci_write_reg32(t0, t1 - t2);
            break;
        case INDEX_op_mul_i32:
            t0 = *tb_ptr++;
            t1 = tci_read_ri32(&tb_ptr);
            t2 = tci_read_ri32(&tb_ptr);
            tci_write_reg32(t0, t1 * t2);
            break;
#if TCG_TARGET_HAS_div_i32
        case INDEX_op_div_i32:
            t0 = *tb_ptr++;
            t1 = tci_read_ri32(&tb_ptr);
            t2 = tci_read_ri32(&tb_ptr);
            tci_write_reg32(t0, (int32_t)t1 / (int32_t)t2);
            break;
        case INDEX_op_divu_i32:
            t0 = *tb_ptr++;
            t1 = tci_read_ri32(&tb_ptr);
            t2 = tci_read_ri32(&tb_ptr);
            tci_write_reg32(t0, t1 / t2);
            break;
        case INDEX_op_rem_i32:
            t0 = *tb_ptr++;
            t1 = tci_read_ri32(&tb_ptr);
            t2 = tci_read_ri32(&tb_ptr);
            tci_write_reg32(t0, (int32_t)t1 % (int32_t)t2);
            break;
        case INDEX_op_remu_i32:
            t0 = *tb_ptr++;
            t1 = tci_read_ri32(&tb_ptr);
            t2 = tci_read_ri32(&tb_ptr);
            tci_write_reg32(t0, t1 % t2);
            break;
#elif TCG_TARGET_HAS_div2_i32
        case INDEX_op_div2_i32:
        case INDEX_op_divu2_i32:
            TODO();
            break;
#endif
        case INDEX_op_and_i32:
            t0 = *tb_ptr++;
            t1 = tci_read_ri32(&tb_ptr);
            t2 = tci_read_ri32(&tb_ptr);
            tci_write_reg32(t0, t1 & t2);
            break;
        case INDEX_op_or_i32:
            t0 = *tb_ptr++;
            t1 = tci_read_ri32(&tb_ptr);
            t2 = tci_read_ri32(&tb_ptr);
            tci_write_reg32(t0, t1 | t2);
            break;
        case INDEX_op_xor_i32:
            t0 = *tb_ptr++;
            t1 = tci_read_ri32(&tb_ptr);
            t2 = tci_read_ri32(&tb_ptr);
            tci_write_reg32(t0, t1 ^ t2);
            break;

            /* Shift/rotate operations (32 bit). */

        case INDEX_op_shl_i32:
            t0 = *tb_ptr++;
            t1 = tci_read_ri32(&tb_ptr);
            t2 = tci_read_ri32(&tb_ptr);
            tci_write_reg32(t0, t1 << (t2 & 31));
            break;
        case INDEX_op_shr_i32:
            t0 = *tb_ptr++;
            t1 = tci_read_ri32(&tb_ptr);
            t2 = tci_read_ri32(&tb_ptr);
            tci_write_reg32(t0, t1 >> (t2 & 31));
            break;
        case INDEX_op_sar_i32:
            t0 = *tb_ptr++;
            t1 = tci_read_ri32(&tb_ptr);
            t2 = tci_read_ri32(&tb_ptr);
            tci_write_reg32(t0, ((int32_t)t1 >> (t2 & 31)));
            break;
#if TCG_TARGET_HAS_rot_i32
        case INDEX_op_rotl_i32:
            t0 = *tb_ptr++;
            t1 = tci_read_ri32(&tb_ptr);
            t2 = tci_read_ri32(&tb_ptr);
            tci_write_reg32(t0, rol32(t1, t2 & 31));
            break;
        case INDEX_op_rotr_i32:
            t0 = *tb_ptr++;
            t1 = tci_read_ri32(&tb_ptr);
            t2 = tci_read_ri32(&tb_ptr);
            tci_write_reg32(t0, ror32(t1, t2 & 31));
            break;
#endif
#if TCG_TARGET_HAS_deposit_i32
        case INDEX_op_deposit_i32:
            t0 = *tb_ptr++;
            t1 = tci_read_r32(&tb_ptr);
            t2 = tci_read_r32(&tb_ptr);
            tmp16 = *tb_ptr++;
            tmp8 = *tb_ptr++;
            tmp32 = (((1 << tmp8) - 1) << tmp16);
            tci_write_reg32(t0, (t1 & ~tmp32) | ((t2 << tmp16) & tmp32));
            break;
#endif
        case INDEX_op_brcond_i32:
            t0 = tci_read_r32(&tb_ptr);
            t1 = tci_read_ri32(&tb_ptr);
            condition = *tb_ptr++;
            label = tci_read_label(&tb_ptr);
            if (tci_compare32(t0, t1, condition)) {
                tci_assert(tb_ptr == old_code_ptr + op_size);
                tb_ptr = (uint8_t *)label;
                continue;
            }
            break;
#if TCG_TARGET_REG_BITS == 32
        case INDEX_op_add2_i32:
            t0 = *tb_ptr++;
            t1 = *tb_ptr++;
            tmp64 = tci_read_r64(&tb_ptr);
            tmp64 += tci_read_r64(&tb_ptr);
            tci_write_reg64(t1, t0, tmp64);
            break;
        case INDEX_op_sub2_i32:
            t0 = *tb_ptr++;
            t1 = *tb_ptr++;
            tmp64 = tci_read_r64(&tb_ptr);
            tmp64 -= tci_read_r64(&tb_ptr);
            tci_write_reg64(t1, t0, tmp64);
            break;
        case INDEX_op_brcond2_i32:
            tmp64 = tci_read_r64(&tb_ptr);
            v64 = tci_read_ri64(&tb_ptr);
            condition = *tb_ptr++;
            label = tci_read_label(&tb_ptr);
            if (tci_compare64(tmp64, v64, condition)) {
                tci_assert(tb_ptr == old_code_ptr + op_size);
                tb_ptr = (uint8_t *)label;
                continue;
            }
            break;
        case INDEX_op_mulu2_i32:
            t0 = *tb_ptr++;
            t1 = *tb_ptr++;
            t2 = tci_read_r32(&tb_ptr);
            tmp64 = tci_read_r32(&tb_ptr);
            tci_write_reg64(t1, t0, t2 * tmp64);
            break;
#endif /* TCG_TARGET_REG_BITS == 32 */
#if TCG_TARGET_HAS_ext8s_i32
        case INDEX_op_ext8s_i32:
            t0 = *tb_ptr++;
            t1 = tci_read_r8s(&tb_ptr);
            tci_write_reg32(t0, t1);
            break;
#endif
#if TCG_TARGET_HAS_ext16s_i32
        case INDEX_op_ext16s_i32:
            t0 = *tb_ptr++;
            t1 = tci_read_r16s(&tb_ptr);
            tci_write_reg32(t0, t1);
            break;
#endif
#if TCG_TARGET_HAS_ext8u_i32
        case INDEX_op_ext8u_i32:
            t0 = *tb_ptr++;
            t1 = tci_read_r8(&tb_ptr);
            tci_write_reg32(t0, t1);
            break;
#endif
#if TCG_TARGET_HAS_ext16u_i32
        case INDEX_op_ext16u_i32:
            t0 = *tb_ptr++;
            t1 = tci_read_r16(&tb_ptr);
            tci_write_reg32(t0, t1);
            break;
#endif
#if TCG_TARGET_HAS_bswap16_i32
        case INDEX_op_bswap16_i32:
            t0 = *tb_ptr++;
            t1 = tci_read_r16(&tb_ptr);
            tci_write_reg32(t0, bswap16(t1));
            break;
#endif
#if TCG_TARGET_HAS_bswap32_i32
        case INDEX_op_bswap32_i32:
            t0 = *tb_ptr++;
            t1 = tci_read_r32(&tb_ptr);
            tci_write_reg32(t0, bswap32(t1));
            break;
#endif
#if TCG_TARGET_HAS_not_i32
        case INDEX_op_not_i32:
            t0 = *tb_ptr++;
            t1 = tci_read_r32(&tb_ptr);
            tci_write_reg32(t0, ~t1);
            break;
#endif
#if TCG_TARGET_HAS_neg_i32
        case INDEX_op_neg_i32:
            t0 = *tb_ptr++;
            t1 = tci_read_r32(&tb_ptr);
            tci_write_reg32(t0, -t1);
            break;
#endif
#if TCG_TARGET_REG_BITS == 64
        case INDEX_op_mov_i64:
            t0 = *tb_ptr++;
            t1 = tci_read_r64(&tb_ptr);
            tci_write_reg64(t0, t1);
            break;
        case INDEX_op_movi_i64:
            t0 = *tb_ptr++;
            t1 = tci_read_i64(&tb_ptr);
            tci_write_reg64(t0, t1);
            break;

            /* Load/store operations (64 bit). */

        case INDEX_op_ld8u_i64:
            t0 = *tb_ptr++;
            t1 = tci_read_r(&tb_ptr);
            t2 = tci_read_s32(&tb_ptr);
            tci_write_reg8(t0, *(uint8_t *)(t1 + t2));
            break;
        case INDEX_op_ld8s_i64:
        case INDEX_op_ld16u_i64:
        case INDEX_op_ld16s_i64:
            TODO();
            break;
        case INDEX_op_ld32u_i64:
            t0 = *tb_ptr++;
            t1 = tci_read_r(&tb_ptr);
            t2 = tci_read_s32(&tb_ptr);
            tci_write_reg32(t0, *(uint32_t *)(t1 + t2));
            break;
        case INDEX_op_ld32s_i64:
            t0 = *tb_ptr++;
            t1 = tci_read_r(&tb_ptr);
            t2 = tci_read_s32(&tb_ptr);
            tci_write_reg32s(t0, *(int32_t *)(t1 + t2));
            break;
        case INDEX_op_ld_i64:
            t0 = *tb_ptr++;
            t1 = tci_read_r(&tb_ptr);
            t2 = tci_read_s32(&tb_ptr);
            tci_write_reg64(t0, *(uint64_t *)(t1 + t2));
            break;
        case INDEX_op_st8_i64:
            t0 = tci_read_r8(&tb_ptr);
            t1 = tci_read_r(&tb_ptr);
            t2 = tci_read_s32(&tb_ptr);
            *(uint8_t *)(t1 + t2) = t0;
            break;
        case INDEX_op_st16_i64:
            t0 = tci_read_r16(&tb_ptr);
            t1 = tci_read_r(&tb_ptr);
            t2 = tci_read_s32(&tb_ptr);
            *(uint16_t *)(t1 + t2) = t0;
            break;
        case INDEX_op_st32_i64:
            t0 = tci_read_r32(&tb_ptr);
            t1 = tci_read_r(&tb_ptr);
            t2 = tci_read_s32(&tb_ptr);
            *(uint32_t *)(t1 + t2) = t0;
            break;
        case INDEX_op_st_i64:
            t0 = tci_read_r64(&tb_ptr);
            t1 = tci_read_r(&tb_ptr);
            t2 = tci_read_s32(&tb_ptr);
            tci_assert(t1 != sp_value || (int32_t)t2 < 0);
            *(uint64_t *)(t1 + t2) = t0;
            break;

            /* Arithmetic operations (64 bit). */

        case INDEX_op_add_i64:
            t0 = *tb_ptr++;
            t1 = tci_read_ri64(&tb_ptr);
            t2 = tci_read_ri64(&tb_ptr);
            tci_write_reg64(t0, t1 + t2);
            break;
        case INDEX_op_sub_i64:
            t0 = *tb_ptr++;
            t1 = tci_read_ri64(&tb_ptr);
            t2 = tci_read_ri64(&tb_ptr);
            tci_write_reg64(t0, t1 - t2);
            break;
        case INDEX_op_mul_i64:
            t0 = *tb_ptr++;
            t1 = tci_read_ri64(&tb_ptr);
            t2 = tci_read_ri64(&tb_ptr);
            tci_write_reg64(t0, t1 * t2);
            break;
#if TCG_TARGET_HAS_div_i64
        case INDEX_op_div_i64:
        case INDEX_op_divu_i64:
        case INDEX_op_rem_i64:
        case INDEX_op_remu_i64:
            TODO();
            break;
#elif TCG_TARGET_HAS_div2_i64
        case INDEX_op_div2_i64:
        case INDEX_op_divu2_i64:
            TODO();
            break;
#endif
        case INDEX_op_and_i64:
            t0 = *tb_ptr++;
            t1 = tci_read_ri64(&tb_ptr);
            t2 = tci_read_ri64(&tb_ptr);
            tci_write_reg64(t0, t1 & t2);
            break;
        case INDEX_op_or_i64:
            t0 = *tb_ptr++;
            t1 = tci_read_ri64(&tb_ptr);
            t2 = tci_read_ri64(&tb_ptr);
            tci_write_reg64(t0, t1 | t2);
            break;
        case INDEX_op_xor_i64:
            t0 = *tb_ptr++;
            t1 = tci_read_ri64(&tb_ptr);
            t2 = tci_read_ri64(&tb_ptr);
            tci_write_reg64(t0, t1 ^ t2);
            break;

            /* Shift/rotate operations (64 bit). */

        case INDEX_op_shl_i64:
            t0 = *tb_ptr++;
            t1 = tci_read_ri64(&tb_ptr);
            t2 = tci_read_ri64(&tb_ptr);
            tci_write_reg64(t0, t1 << (t2 & 63));
            break;
        case INDEX_op_shr_i64:
            t0 = *tb_ptr++;
            t1 = tci_read_ri64(&tb_ptr);
            t2 = tci_read_ri64(&tb_ptr);
            tci_write_reg64(t0, t1 >> (t2 & 63));
            break;
        case INDEX_op_sar_i64:
            t0 = *tb_ptr++;
            t1 = tci_read_ri64(&tb_ptr);
            t2 = tci_read_ri64(&tb_ptr);
            tci_write_reg64(t0, ((int64_t)t1 >> (t2 & 63)));
            break;
#if TCG_TARGET_HAS_rot_i64
        case INDEX_op_rotl_i64:
            t0 = *tb_ptr++;
            t1 = tci_read_ri64(&tb_ptr);
            t2 = tci_read_ri64(&tb_ptr);
            tci_write_reg64(t0, rol64(t1, t2 & 63));
            break;
        case INDEX_op_rotr_i64:
            t0 = *tb_ptr++;
            t1 = tci_read_ri64(&tb_ptr);
            t2 = tci_read_ri64(&tb_ptr);
            tci_write_reg64(t0, ror64(t1, t2 & 63));
            break;
#endif
#if TCG_TARGET_HAS_deposit_i64
        case INDEX_op_deposit_i64:
            t0 = *tb_ptr++;
            t1 = tci_read_r64(&tb_ptr);
            t2 = tci_read_r64(&tb_ptr);
            tmp16 = *tb_ptr++;
            tmp8 = *tb_ptr++;
            tmp64 = (((1ULL << tmp8) - 1) << tmp16);
            tci_write_reg64(t0, (t1 & ~tmp64) | ((t2 << tmp16) & tmp64));
            break;
#endif
        case INDEX_op_brcond_i64:
            t0 = tci_read_r64(&tb_ptr);
            t1 = tci_read_ri64(&tb_ptr);
            condition = *tb_ptr++;
            label = tci_read_label(&tb_ptr);
            if (tci_compare64(t0, t1, condition)) {
                tci_assert(tb_ptr == old_code_ptr + op_size);
                tb_ptr = (uint8_t *)label;
                continue;
            }
            break;
#if TCG_TARGET_HAS_ext8u_i64
        case INDEX_op_ext8u_i64:
            t0 = *tb_ptr++;
            t1 = tci_read_r8(&tb_ptr);
            tci_write_reg64(t0, t1);
            break;
#endif
#if TCG_TARGET_HAS_ext8s_i64
        case INDEX_op_ext8s_i64:
            t0 = *tb_ptr++;
            t1 = tci_read_r8s(&tb_ptr);
            tci_write_reg64(t0, t1);
            break;
#endif
#if TCG_TARGET_HAS_ext16s_i64
        case INDEX_op_ext16s_i64:
            t0 = *tb_ptr++;
            t1 = tci_read_r16s(&tb_ptr);
            tci_write_reg64(t0, t1);
            break;
#endif
#if TCG_TARGET_HAS_ext16u_i64
        case INDEX_op_ext16u_i64:
            t0 = *tb_ptr++;
            t1 = tci_read_r16(&tb_ptr);
            tci_write_reg64(t0, t1);
            break;
#endif
#if TCG_TARGET_HAS_ext32s_i64
        case INDEX_op_ext32s_i64:
#endif
        case INDEX_op_ext_i32_i64:
            t0 = *tb_ptr++;
            t1 = tci_read_r32s(&tb_ptr);
            tci_write_reg64(t0, t1);
            break;
#if TCG_TARGET_HAS_ext32u_i64
        case INDEX_op_ext32u_i64:
#endif
        case INDEX_op_extu_i32_i64:
            t0 = *tb_ptr++;
            t1 = tci_read_r32(&tb_ptr);
            tci_write_reg64(t0, t1);
            break;
#if TCG_TARGET_HAS_bswap16_i64
        case INDEX_op_bswap16_i64:
            t0 = *tb_ptr++;
            t1 = tci_read_r16(&tb_ptr);
            tci_write_reg64(t0, bswap16(t1));
            break;
#endif
#if TCG_TARGET_HAS_bswap32_i64
        case INDEX_op_bswap32_i64:
            t0 = *tb_ptr++;
            t1 = tci_read_r32(&tb_ptr);
            tci_write_reg64(t0, bswap32(t1));
            break;
#endif
#if TCG_TARGET_HAS_bswap64_i64
        case INDEX_op_bswap64_i64:
            t0 = *tb_ptr++;
            t1 = tci_read_r64(&tb_ptr);
            tci_write_reg64(t0, bswap64(t1));
            break;
#endif
#if TCG_TARGET_HAS_not_i64
        case INDEX_op_not_i64:
            t0 = *tb_ptr++;
            t1 = tci_read_r64(&tb_ptr);
            tci_write_reg64(t0, ~t1);
            break;
#endif
#if TCG_TARGET_HAS_neg_i64
        case INDEX_op_neg_i64:
            t0 = *tb_ptr++;
            t1 = tci_read_r64(&tb_ptr);
            tci_write_reg64(t0, -t1);
            break;
#endif
#endif /* TCG_TARGET_REG_BITS == 64 */

            /* QEMU specific operations. */

        case INDEX_op_exit_tb:
            ret = *(uint64_t *)tb_ptr;
            goto exit;
            break;
        case INDEX_op_goto_tb:
            /* Jump address is aligned */
            tb_ptr = QEMU_ALIGN_PTR_UP(tb_ptr, 4);
            t0 = atomic_read((int32_t *)tb_ptr);
            tb_ptr += sizeof(int32_t);
            tci_assert(tb_ptr == old_code_ptr + op_size);
            tb_ptr += (int32_t)t0;
            continue;
        case INDEX_op_qemu_ld_i32:
            t0 = *tb_ptr++;
            taddr = tci_read_ulong(&tb_ptr);
            oi = tci_read_i(&tb_ptr);
            switch (get_memop(oi) & (MO_BSWAP | MO_SSIZE)) {
            case MO_UB:
                tmp32 = qemu_ld_ub;
                break;
            case MO_SB:
                tmp32 = (int8_t)qemu_ld_ub;
                break;
            case MO_LEUW:
                tmp32 = qemu_ld_leuw;
                break;
            case MO_LESW:
                tmp32 = (int16_t)qemu_ld_leuw;
                break;
            case MO_LEUL:
                tmp32 = qemu_ld_leul;
                break;
            case MO_BEUW:
                tmp32 = qemu_ld_beuw;
                break;
            case MO_BESW:
                tmp32 = (int16_t)qemu_ld_beuw;
                break;
            case MO_BEUL:
                tmp32 = qemu_ld_beul;
                break;
            default:
                tcg_abort();
            }
            tci_write_reg(t0, tmp32);
            break;
        case INDEX_op_qemu_ld_i64:
            t0 = *tb_ptr++;
            if (TCG_TARGET_REG_BITS == 32) {
                t1 = *tb_ptr++;
            }
            taddr = tci_read_ulong(&tb_ptr);
            oi = tci_read_i(&tb_ptr);
            switch (get_memop(oi) & (MO_BSWAP | MO_SSIZE)) {
            case MO_UB:
                tmp64 = qemu_ld_ub;
                break;
            case MO_SB:
                tmp64 = (int8_t)qemu_ld_ub;
                break;
            case MO_LEUW:
                tmp64 = qemu_ld_leuw;
                break;
            case MO_LESW:
                tmp64 = (int16_t)qemu_ld_leuw;
                break;
            case MO_LEUL:
                tmp64 = qemu_ld_leul;
                break;
            case MO_LESL:
                tmp64 = (int32_t)qemu_ld_leul;
                break;
            case MO_LEQ:
                tmp64 = qemu_ld_leq;
                break;
            case MO_BEUW:
                tmp64 = qemu_ld_beuw;
                break;
            case MO_BESW:
                tmp64 = (int16_t)qemu_ld_beuw;
                break;
            case MO_BEUL:
                tmp64 = qemu_ld_beul;
                break;
            case MO_BESL:
                tmp64 = (int32_t)qemu_ld_beul;
                break;
            case MO_BEQ:
                tmp64 = qemu_ld_beq;
                break;
            default:
                tcg_abort();
            }
            tci_write_reg(t0, tmp64);
            if (TCG_TARGET_REG_BITS == 32) {
                tci_write_reg(t1, tmp64 >> 32);
            }
            break;
        case INDEX_op_qemu_st_i32:
            t0 = tci_read_r(&tb_ptr);
            taddr = tci_read_ulong(&tb_ptr);
            oi = tci_read_i(&tb_ptr);
            switch (get_memop(oi) & (MO_BSWAP | MO_SIZE)) {
            case MO_UB:
                qemu_st_b(t0);
                break;
            case MO_LEUW:
                qemu_st_lew(t0);
                break;
            case MO_LEUL:
                qemu_st_lel(t0);
                break;
            case MO_BEUW:
                qemu_st_bew(t0);
                break;
            case MO_BEUL:
                qemu_st_bel(t0);
                break;
            default:
                tcg_abort();
            }
            break;
        case INDEX_op_qemu_st_i64:
            tmp64 = tci_read_r64(&tb_ptr);
            taddr = tci_read_ulong(&tb_ptr);
            oi = tci_read_i(&tb_ptr);
            switch (get_memop(oi) & (MO_BSWAP | MO_SIZE)) {
            case MO_UB:
                qemu_st_b(tmp64);
                break;
            case MO_LEUW:
                qemu_st_lew(tmp64);
                break;
            case MO_LEUL:
                qemu_st_lel(tmp64);
                break;
            case MO_LEQ:
                qemu_st_leq(tmp64);
                break;
            case MO_BEUW:
                qemu_st_bew(tmp64);
                break;
            case MO_BEUL:
                qemu_st_bel(tmp64);
                break;
            case MO_BEQ:
                qemu_st_beq(tmp64);
                break;
            default:
                tcg_abort();
            }
            break;
        case INDEX_op_mb:
            /* Ensure ordering for all kinds */
            smp_mb();
            break;
        default:
            TODO();
            break;
        }
        tci_assert(tb_ptr == old_code_ptr + op_size);
    }
Ejemplo n.º 11
0
static void tcg_out_op(TCGContext *s, TCGOpcode opc, const TCGArg *args,
                       const int *const_args)
{
    uint8_t *old_code_ptr = s->code_ptr;

    tcg_out_op_t(s, opc);

    switch (opc) {
    case INDEX_op_exit_tb:
        tcg_out64(s, args[0]);
        break;
    case INDEX_op_goto_tb:
        if (s->tb_jmp_offset) {
            /* Direct jump method. */
            assert(args[0] < ARRAY_SIZE(s->tb_jmp_offset));
            s->tb_jmp_offset[args[0]] = tcg_current_code_size(s);
            tcg_out32(s, 0);
        } else {
            /* Indirect jump method. */
            TODO();
        }
        assert(args[0] < ARRAY_SIZE(s->tb_next_offset));
        s->tb_next_offset[args[0]] = tcg_current_code_size(s);
        break;
    case INDEX_op_br:
        tci_out_label(s, arg_label(args[0]));
        break;
    case INDEX_op_setcond_i32:
        tcg_out_r(s, args[0]);
        tcg_out_r(s, args[1]);
        tcg_out_ri32(s, const_args[2], args[2]);
        tcg_out8(s, args[3]);   /* condition */
        break;
#if TCG_TARGET_REG_BITS == 32
    case INDEX_op_setcond2_i32:
        /* setcond2_i32 cond, t0, t1_low, t1_high, t2_low, t2_high */
        tcg_out_r(s, args[0]);
        tcg_out_r(s, args[1]);
        tcg_out_r(s, args[2]);
        tcg_out_ri32(s, const_args[3], args[3]);
        tcg_out_ri32(s, const_args[4], args[4]);
        tcg_out8(s, args[5]);   /* condition */
        break;
#elif TCG_TARGET_REG_BITS == 64
    case INDEX_op_setcond_i64:
        tcg_out_r(s, args[0]);
        tcg_out_r(s, args[1]);
        tcg_out_ri64(s, const_args[2], args[2]);
        tcg_out8(s, args[3]);   /* condition */
        break;
#endif
    case INDEX_op_ld8u_i32:
    case INDEX_op_ld8s_i32:
    case INDEX_op_ld16u_i32:
    case INDEX_op_ld16s_i32:
    case INDEX_op_ld_i32:
    case INDEX_op_st8_i32:
    case INDEX_op_st16_i32:
    case INDEX_op_st_i32:
    case INDEX_op_ld8u_i64:
    case INDEX_op_ld8s_i64:
    case INDEX_op_ld16u_i64:
    case INDEX_op_ld16s_i64:
    case INDEX_op_ld32u_i64:
    case INDEX_op_ld32s_i64:
    case INDEX_op_ld_i64:
    case INDEX_op_st8_i64:
    case INDEX_op_st16_i64:
    case INDEX_op_st32_i64:
    case INDEX_op_st_i64:
        tcg_out_r(s, args[0]);
        tcg_out_r(s, args[1]);
        assert(args[2] == (int32_t)args[2]);
        tcg_out32(s, args[2]);
        break;
    case INDEX_op_add_i32:
    case INDEX_op_sub_i32:
    case INDEX_op_mul_i32:
    case INDEX_op_and_i32:
    case INDEX_op_andc_i32:     /* Optional (TCG_TARGET_HAS_andc_i32). */
    case INDEX_op_eqv_i32:      /* Optional (TCG_TARGET_HAS_eqv_i32). */
    case INDEX_op_nand_i32:     /* Optional (TCG_TARGET_HAS_nand_i32). */
    case INDEX_op_nor_i32:      /* Optional (TCG_TARGET_HAS_nor_i32). */
    case INDEX_op_or_i32:
    case INDEX_op_orc_i32:      /* Optional (TCG_TARGET_HAS_orc_i32). */
    case INDEX_op_xor_i32:
    case INDEX_op_shl_i32:
    case INDEX_op_shr_i32:
    case INDEX_op_sar_i32:
    case INDEX_op_rotl_i32:     /* Optional (TCG_TARGET_HAS_rot_i32). */
    case INDEX_op_rotr_i32:     /* Optional (TCG_TARGET_HAS_rot_i32). */
        tcg_out_r(s, args[0]);
        tcg_out_ri32(s, const_args[1], args[1]);
        tcg_out_ri32(s, const_args[2], args[2]);
        break;
    case INDEX_op_deposit_i32:  /* Optional (TCG_TARGET_HAS_deposit_i32). */
        tcg_out_r(s, args[0]);
        tcg_out_r(s, args[1]);
        tcg_out_r(s, args[2]);
        assert(args[3] <= UINT8_MAX);
        tcg_out8(s, args[3]);
        assert(args[4] <= UINT8_MAX);
        tcg_out8(s, args[4]);
        break;

#if TCG_TARGET_REG_BITS == 64
    case INDEX_op_add_i64:
    case INDEX_op_sub_i64:
    case INDEX_op_mul_i64:
    case INDEX_op_and_i64:
    case INDEX_op_andc_i64:     /* Optional (TCG_TARGET_HAS_andc_i64). */
    case INDEX_op_eqv_i64:      /* Optional (TCG_TARGET_HAS_eqv_i64). */
    case INDEX_op_nand_i64:     /* Optional (TCG_TARGET_HAS_nand_i64). */
    case INDEX_op_nor_i64:      /* Optional (TCG_TARGET_HAS_nor_i64). */
    case INDEX_op_or_i64:
    case INDEX_op_orc_i64:      /* Optional (TCG_TARGET_HAS_orc_i64). */
    case INDEX_op_xor_i64:
    case INDEX_op_shl_i64:
    case INDEX_op_shr_i64:
    case INDEX_op_sar_i64:
    case INDEX_op_rotl_i64:     /* Optional (TCG_TARGET_HAS_rot_i64). */
    case INDEX_op_rotr_i64:     /* Optional (TCG_TARGET_HAS_rot_i64). */
        tcg_out_r(s, args[0]);
        tcg_out_ri64(s, const_args[1], args[1]);
        tcg_out_ri64(s, const_args[2], args[2]);
        break;
    case INDEX_op_deposit_i64:  /* Optional (TCG_TARGET_HAS_deposit_i64). */
        tcg_out_r(s, args[0]);
        tcg_out_r(s, args[1]);
        tcg_out_r(s, args[2]);
        assert(args[3] <= UINT8_MAX);
        tcg_out8(s, args[3]);
        assert(args[4] <= UINT8_MAX);
        tcg_out8(s, args[4]);
        break;
    case INDEX_op_div_i64:      /* Optional (TCG_TARGET_HAS_div_i64). */
    case INDEX_op_divu_i64:     /* Optional (TCG_TARGET_HAS_div_i64). */
    case INDEX_op_rem_i64:      /* Optional (TCG_TARGET_HAS_div_i64). */
    case INDEX_op_remu_i64:     /* Optional (TCG_TARGET_HAS_div_i64). */
        TODO();
        break;
    case INDEX_op_div2_i64:     /* Optional (TCG_TARGET_HAS_div2_i64). */
    case INDEX_op_divu2_i64:    /* Optional (TCG_TARGET_HAS_div2_i64). */
        TODO();
        break;
    case INDEX_op_brcond_i64:
        tcg_out_r(s, args[0]);
        tcg_out_ri64(s, const_args[1], args[1]);
        tcg_out8(s, args[2]);           /* condition */
        tci_out_label(s, arg_label(args[3]));
        break;
    case INDEX_op_bswap16_i64:  /* Optional (TCG_TARGET_HAS_bswap16_i64). */
    case INDEX_op_bswap32_i64:  /* Optional (TCG_TARGET_HAS_bswap32_i64). */
    case INDEX_op_bswap64_i64:  /* Optional (TCG_TARGET_HAS_bswap64_i64). */
    case INDEX_op_not_i64:      /* Optional (TCG_TARGET_HAS_not_i64). */
    case INDEX_op_neg_i64:      /* Optional (TCG_TARGET_HAS_neg_i64). */
    case INDEX_op_ext8s_i64:    /* Optional (TCG_TARGET_HAS_ext8s_i64). */
    case INDEX_op_ext8u_i64:    /* Optional (TCG_TARGET_HAS_ext8u_i64). */
    case INDEX_op_ext16s_i64:   /* Optional (TCG_TARGET_HAS_ext16s_i64). */
    case INDEX_op_ext16u_i64:   /* Optional (TCG_TARGET_HAS_ext16u_i64). */
    case INDEX_op_ext32s_i64:   /* Optional (TCG_TARGET_HAS_ext32s_i64). */
    case INDEX_op_ext32u_i64:   /* Optional (TCG_TARGET_HAS_ext32u_i64). */
    case INDEX_op_ext_i32_i64:
    case INDEX_op_extu_i32_i64:
#endif /* TCG_TARGET_REG_BITS == 64 */
    case INDEX_op_neg_i32:      /* Optional (TCG_TARGET_HAS_neg_i32). */
    case INDEX_op_not_i32:      /* Optional (TCG_TARGET_HAS_not_i32). */
    case INDEX_op_ext8s_i32:    /* Optional (TCG_TARGET_HAS_ext8s_i32). */
    case INDEX_op_ext16s_i32:   /* Optional (TCG_TARGET_HAS_ext16s_i32). */
    case INDEX_op_ext8u_i32:    /* Optional (TCG_TARGET_HAS_ext8u_i32). */
    case INDEX_op_ext16u_i32:   /* Optional (TCG_TARGET_HAS_ext16u_i32). */
    case INDEX_op_bswap16_i32:  /* Optional (TCG_TARGET_HAS_bswap16_i32). */
    case INDEX_op_bswap32_i32:  /* Optional (TCG_TARGET_HAS_bswap32_i32). */
        tcg_out_r(s, args[0]);
        tcg_out_r(s, args[1]);
        break;
    case INDEX_op_div_i32:      /* Optional (TCG_TARGET_HAS_div_i32). */
    case INDEX_op_divu_i32:     /* Optional (TCG_TARGET_HAS_div_i32). */
    case INDEX_op_rem_i32:      /* Optional (TCG_TARGET_HAS_div_i32). */
    case INDEX_op_remu_i32:     /* Optional (TCG_TARGET_HAS_div_i32). */
        tcg_out_r(s, args[0]);
        tcg_out_ri32(s, const_args[1], args[1]);
        tcg_out_ri32(s, const_args[2], args[2]);
        break;
    case INDEX_op_div2_i32:     /* Optional (TCG_TARGET_HAS_div2_i32). */
    case INDEX_op_divu2_i32:    /* Optional (TCG_TARGET_HAS_div2_i32). */
        TODO();
        break;
#if TCG_TARGET_REG_BITS == 32
    case INDEX_op_add2_i32:
    case INDEX_op_sub2_i32:
        tcg_out_r(s, args[0]);
        tcg_out_r(s, args[1]);
        tcg_out_r(s, args[2]);
        tcg_out_r(s, args[3]);
        tcg_out_r(s, args[4]);
        tcg_out_r(s, args[5]);
        break;
    case INDEX_op_brcond2_i32:
        tcg_out_r(s, args[0]);
        tcg_out_r(s, args[1]);
        tcg_out_ri32(s, const_args[2], args[2]);
        tcg_out_ri32(s, const_args[3], args[3]);
        tcg_out8(s, args[4]);           /* condition */
        tci_out_label(s, arg_label(args[5]));
        break;
    case INDEX_op_mulu2_i32:
        tcg_out_r(s, args[0]);
        tcg_out_r(s, args[1]);
        tcg_out_r(s, args[2]);
        tcg_out_r(s, args[3]);
        break;
#endif
    case INDEX_op_brcond_i32:
        tcg_out_r(s, args[0]);
        tcg_out_ri32(s, const_args[1], args[1]);
        tcg_out8(s, args[2]);           /* condition */
        tci_out_label(s, arg_label(args[3]));
        break;
    case INDEX_op_qemu_ld_i32:
        tcg_out_r(s, *args++);
        tcg_out_r(s, *args++);
        if (TARGET_LONG_BITS > TCG_TARGET_REG_BITS) {
            tcg_out_r(s, *args++);
        }
        tcg_out_i(s, *args++);
        break;
    case INDEX_op_qemu_ld_i64:
        tcg_out_r(s, *args++);
        if (TCG_TARGET_REG_BITS == 32) {
            tcg_out_r(s, *args++);
        }
        tcg_out_r(s, *args++);
        if (TARGET_LONG_BITS > TCG_TARGET_REG_BITS) {
            tcg_out_r(s, *args++);
        }
        tcg_out_i(s, *args++);
        break;
    case INDEX_op_qemu_st_i32:
        tcg_out_r(s, *args++);
        tcg_out_r(s, *args++);
        if (TARGET_LONG_BITS > TCG_TARGET_REG_BITS) {
            tcg_out_r(s, *args++);
        }
        tcg_out_i(s, *args++);
        break;
    case INDEX_op_qemu_st_i64:
        tcg_out_r(s, *args++);
        if (TCG_TARGET_REG_BITS == 32) {
            tcg_out_r(s, *args++);
        }
        tcg_out_r(s, *args++);
        if (TARGET_LONG_BITS > TCG_TARGET_REG_BITS) {
            tcg_out_r(s, *args++);
        }
        tcg_out_i(s, *args++);
        break;
    case INDEX_op_mov_i32:  /* Always emitted via tcg_out_mov.  */
    case INDEX_op_mov_i64:
    case INDEX_op_movi_i32: /* Always emitted via tcg_out_movi.  */
    case INDEX_op_movi_i64:
    case INDEX_op_call:     /* Always emitted via tcg_out_call.  */
    default:
        tcg_abort();
    }
    old_code_ptr[1] = s->code_ptr - old_code_ptr;
}
Ejemplo n.º 12
0
static void tcg_out_op(TCGContext *s, TCGOpcode opc, const TCGArg *args,
                       const int *const_args)
{
    uint8_t *old_code_ptr = s->code_ptr;

    tcg_out_op_t(s, opc);

    switch (opc) {
    case INDEX_op_exit_tb:
        tcg_out64(s, args[0]);
        break;
    case INDEX_op_goto_tb:
        if (s->tb_jmp_offset) {
            /* Direct jump method. */
            assert(args[0] < ARRAY_SIZE(s->tb_jmp_offset));
            s->tb_jmp_offset[args[0]] = s->code_ptr - s->code_buf;
            tcg_out32(s, 0);
        } else {
            /* Indirect jump method. */
            TODO();
        }
        assert(args[0] < ARRAY_SIZE(s->tb_next_offset));
        s->tb_next_offset[args[0]] = s->code_ptr - s->code_buf;
        break;
    case INDEX_op_br:
        tci_out_label(s, args[0]);
        break;
    case INDEX_op_call:
        tcg_out_ri(s, const_args[0], args[0]);
        break;
    case INDEX_op_setcond_i32:
        tcg_out_r(s, args[0]);
        tcg_out_r(s, args[1]);
        tcg_out_ri32(s, const_args[2], args[2]);
        tcg_out8(s, args[3]);   /* condition */
        break;
#if TCG_TARGET_REG_BITS == 32
    case INDEX_op_setcond2_i32:
        /* setcond2_i32 cond, t0, t1_low, t1_high, t2_low, t2_high */
        tcg_out_r(s, args[0]);
        tcg_out_r(s, args[1]);
        tcg_out_r(s, args[2]);
        tcg_out_ri32(s, const_args[3], args[3]);
        tcg_out_ri32(s, const_args[4], args[4]);
        tcg_out8(s, args[5]);   /* condition */
        break;
#elif TCG_TARGET_REG_BITS == 64
    case INDEX_op_setcond_i64:
        tcg_out_r(s, args[0]);
        tcg_out_r(s, args[1]);
        tcg_out_ri64(s, const_args[2], args[2]);
        tcg_out8(s, args[3]);   /* condition */
        break;
#endif
    case INDEX_op_movi_i32:
        TODO(); /* Handled by tcg_out_movi? */
        break;
    case INDEX_op_ld8u_i32:
    case INDEX_op_ld8s_i32:
    case INDEX_op_ld16u_i32:
    case INDEX_op_ld16s_i32:
    case INDEX_op_ld_i32:
    case INDEX_op_st8_i32:
    case INDEX_op_st16_i32:
    case INDEX_op_st_i32:
    case INDEX_op_ld8u_i64:
    case INDEX_op_ld8s_i64:
    case INDEX_op_ld16u_i64:
    case INDEX_op_ld16s_i64:
    case INDEX_op_ld32u_i64:
    case INDEX_op_ld32s_i64:
    case INDEX_op_ld_i64:
    case INDEX_op_st8_i64:
    case INDEX_op_st16_i64:
    case INDEX_op_st32_i64:
    case INDEX_op_st_i64:
        tcg_out_r(s, args[0]);
        tcg_out_r(s, args[1]);
        assert(args[2] == (int32_t)args[2]);
        tcg_out32(s, args[2]);
        break;
    case INDEX_op_add_i32:
    case INDEX_op_sub_i32:
    case INDEX_op_mul_i32:
    case INDEX_op_and_i32:
    case INDEX_op_andc_i32:     /* Optional (TCG_TARGET_HAS_andc_i32). */
    case INDEX_op_eqv_i32:      /* Optional (TCG_TARGET_HAS_eqv_i32). */
    case INDEX_op_nand_i32:     /* Optional (TCG_TARGET_HAS_nand_i32). */
    case INDEX_op_nor_i32:      /* Optional (TCG_TARGET_HAS_nor_i32). */
    case INDEX_op_or_i32:
    case INDEX_op_orc_i32:      /* Optional (TCG_TARGET_HAS_orc_i32). */
    case INDEX_op_xor_i32:
    case INDEX_op_shl_i32:
    case INDEX_op_shr_i32:
    case INDEX_op_sar_i32:
    case INDEX_op_rotl_i32:     /* Optional (TCG_TARGET_HAS_rot_i32). */
    case INDEX_op_rotr_i32:     /* Optional (TCG_TARGET_HAS_rot_i32). */
        tcg_out_r(s, args[0]);
        tcg_out_ri32(s, const_args[1], args[1]);
        tcg_out_ri32(s, const_args[2], args[2]);
        break;
    case INDEX_op_deposit_i32:  /* Optional (TCG_TARGET_HAS_deposit_i32). */
        tcg_out_r(s, args[0]);
        tcg_out_r(s, args[1]);
        tcg_out_r(s, args[2]);
        assert(args[3] <= UINT8_MAX);
        tcg_out8(s, args[3]);
        assert(args[4] <= UINT8_MAX);
        tcg_out8(s, args[4]);
        break;

#if TCG_TARGET_REG_BITS == 64
    case INDEX_op_mov_i64:
    case INDEX_op_movi_i64:
        TODO();
        break;
    case INDEX_op_add_i64:
    case INDEX_op_sub_i64:
    case INDEX_op_mul_i64:
    case INDEX_op_and_i64:
    case INDEX_op_andc_i64:     /* Optional (TCG_TARGET_HAS_andc_i64). */
    case INDEX_op_eqv_i64:      /* Optional (TCG_TARGET_HAS_eqv_i64). */
    case INDEX_op_nand_i64:     /* Optional (TCG_TARGET_HAS_nand_i64). */
    case INDEX_op_nor_i64:      /* Optional (TCG_TARGET_HAS_nor_i64). */
    case INDEX_op_or_i64:
    case INDEX_op_orc_i64:      /* Optional (TCG_TARGET_HAS_orc_i64). */
    case INDEX_op_xor_i64:
    case INDEX_op_shl_i64:
    case INDEX_op_shr_i64:
    case INDEX_op_sar_i64:
    case INDEX_op_rotl_i64:     /* Optional (TCG_TARGET_HAS_rot_i64). */
    case INDEX_op_rotr_i64:     /* Optional (TCG_TARGET_HAS_rot_i64). */
        tcg_out_r(s, args[0]);
        tcg_out_ri64(s, const_args[1], args[1]);
        tcg_out_ri64(s, const_args[2], args[2]);
        break;
    case INDEX_op_deposit_i64:  /* Optional (TCG_TARGET_HAS_deposit_i64). */
        tcg_out_r(s, args[0]);
        tcg_out_r(s, args[1]);
        tcg_out_r(s, args[2]);
        assert(args[3] <= UINT8_MAX);
        tcg_out8(s, args[3]);
        assert(args[4] <= UINT8_MAX);
        tcg_out8(s, args[4]);
        break;
    case INDEX_op_div_i64:      /* Optional (TCG_TARGET_HAS_div_i64). */
    case INDEX_op_divu_i64:     /* Optional (TCG_TARGET_HAS_div_i64). */
    case INDEX_op_rem_i64:      /* Optional (TCG_TARGET_HAS_div_i64). */
    case INDEX_op_remu_i64:     /* Optional (TCG_TARGET_HAS_div_i64). */
        TODO();
        break;
    case INDEX_op_div2_i64:     /* Optional (TCG_TARGET_HAS_div2_i64). */
    case INDEX_op_divu2_i64:    /* Optional (TCG_TARGET_HAS_div2_i64). */
        TODO();
        break;
    case INDEX_op_brcond_i64:
        tcg_out_r(s, args[0]);
        tcg_out_ri64(s, const_args[1], args[1]);
        tcg_out8(s, args[2]);           /* condition */
        tci_out_label(s, args[3]);
        break;
    case INDEX_op_bswap16_i64:  /* Optional (TCG_TARGET_HAS_bswap16_i64). */
    case INDEX_op_bswap32_i64:  /* Optional (TCG_TARGET_HAS_bswap32_i64). */
    case INDEX_op_bswap64_i64:  /* Optional (TCG_TARGET_HAS_bswap64_i64). */
    case INDEX_op_not_i64:      /* Optional (TCG_TARGET_HAS_not_i64). */
    case INDEX_op_neg_i64:      /* Optional (TCG_TARGET_HAS_neg_i64). */
    case INDEX_op_ext8s_i64:    /* Optional (TCG_TARGET_HAS_ext8s_i64). */
    case INDEX_op_ext8u_i64:    /* Optional (TCG_TARGET_HAS_ext8u_i64). */
    case INDEX_op_ext16s_i64:   /* Optional (TCG_TARGET_HAS_ext16s_i64). */
    case INDEX_op_ext16u_i64:   /* Optional (TCG_TARGET_HAS_ext16u_i64). */
    case INDEX_op_ext32s_i64:   /* Optional (TCG_TARGET_HAS_ext32s_i64). */
    case INDEX_op_ext32u_i64:   /* Optional (TCG_TARGET_HAS_ext32u_i64). */
#endif /* TCG_TARGET_REG_BITS == 64 */
    case INDEX_op_neg_i32:      /* Optional (TCG_TARGET_HAS_neg_i32). */
    case INDEX_op_not_i32:      /* Optional (TCG_TARGET_HAS_not_i32). */
    case INDEX_op_ext8s_i32:    /* Optional (TCG_TARGET_HAS_ext8s_i32). */
    case INDEX_op_ext16s_i32:   /* Optional (TCG_TARGET_HAS_ext16s_i32). */
    case INDEX_op_ext8u_i32:    /* Optional (TCG_TARGET_HAS_ext8u_i32). */
    case INDEX_op_ext16u_i32:   /* Optional (TCG_TARGET_HAS_ext16u_i32). */
    case INDEX_op_bswap16_i32:  /* Optional (TCG_TARGET_HAS_bswap16_i32). */
    case INDEX_op_bswap32_i32:  /* Optional (TCG_TARGET_HAS_bswap32_i32). */
        tcg_out_r(s, args[0]);
        tcg_out_r(s, args[1]);
        break;
    case INDEX_op_div_i32:      /* Optional (TCG_TARGET_HAS_div_i32). */
    case INDEX_op_divu_i32:     /* Optional (TCG_TARGET_HAS_div_i32). */
    case INDEX_op_rem_i32:      /* Optional (TCG_TARGET_HAS_div_i32). */
    case INDEX_op_remu_i32:     /* Optional (TCG_TARGET_HAS_div_i32). */
        tcg_out_r(s, args[0]);
        tcg_out_ri32(s, const_args[1], args[1]);
        tcg_out_ri32(s, const_args[2], args[2]);
        break;
    case INDEX_op_div2_i32:     /* Optional (TCG_TARGET_HAS_div2_i32). */
    case INDEX_op_divu2_i32:    /* Optional (TCG_TARGET_HAS_div2_i32). */
        TODO();
        break;
#if TCG_TARGET_REG_BITS == 32
    case INDEX_op_add2_i32:
    case INDEX_op_sub2_i32:
        tcg_out_r(s, args[0]);
        tcg_out_r(s, args[1]);
        tcg_out_r(s, args[2]);
        tcg_out_r(s, args[3]);
        tcg_out_r(s, args[4]);
        tcg_out_r(s, args[5]);
        break;
    case INDEX_op_brcond2_i32:
        tcg_out_r(s, args[0]);
        tcg_out_r(s, args[1]);
        tcg_out_ri32(s, const_args[2], args[2]);
        tcg_out_ri32(s, const_args[3], args[3]);
        tcg_out8(s, args[4]);           /* condition */
        tci_out_label(s, args[5]);
        break;
    case INDEX_op_mulu2_i32:
        tcg_out_r(s, args[0]);
        tcg_out_r(s, args[1]);
        tcg_out_r(s, args[2]);
        tcg_out_r(s, args[3]);
        break;
#endif
    case INDEX_op_brcond_i32:
        tcg_out_r(s, args[0]);
        tcg_out_ri32(s, const_args[1], args[1]);
        tcg_out8(s, args[2]);           /* condition */
        tci_out_label(s, args[3]);
        break;
    case INDEX_op_qemu_ld8u:
    case INDEX_op_qemu_ld8s:
    case INDEX_op_qemu_ld16u:
    case INDEX_op_qemu_ld16s:
    case INDEX_op_qemu_ld32:
#if TCG_TARGET_REG_BITS == 64
    case INDEX_op_qemu_ld32s:
    case INDEX_op_qemu_ld32u:
#endif
        tcg_out_r(s, *args++);
        tcg_out_r(s, *args++);
#if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
        tcg_out_r(s, *args++);
#endif
#ifdef CONFIG_SOFTMMU
        tcg_out_i(s, *args);
#endif
        break;
    case INDEX_op_qemu_ld64:
        tcg_out_r(s, *args++);
#if TCG_TARGET_REG_BITS == 32
        tcg_out_r(s, *args++);
#endif
        tcg_out_r(s, *args++);
#if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
        tcg_out_r(s, *args++);
#endif
#ifdef CONFIG_SOFTMMU
        tcg_out_i(s, *args);
#endif
        break;
    case INDEX_op_qemu_st8:
    case INDEX_op_qemu_st16:
    case INDEX_op_qemu_st32:
        tcg_out_r(s, *args++);
        tcg_out_r(s, *args++);
#if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
        tcg_out_r(s, *args++);
#endif
#ifdef CONFIG_SOFTMMU
        tcg_out_i(s, *args);
#endif
        break;
    case INDEX_op_qemu_st64:
        tcg_out_r(s, *args++);
#if TCG_TARGET_REG_BITS == 32
        tcg_out_r(s, *args++);
#endif
        tcg_out_r(s, *args++);
#if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
        tcg_out_r(s, *args++);
#endif
#ifdef CONFIG_SOFTMMU
        tcg_out_i(s, *args);
#endif
        break;
    case INDEX_op_end:
        TODO();
        break;
    default:
        fprintf(stderr, "Missing: %s\n", tcg_op_defs[opc].name);
        tcg_abort();
    }
    old_code_ptr[1] = s->code_ptr - old_code_ptr;
}
Ejemplo n.º 13
0
static TCGArg do_constant_folding_2(TCGOpcode op, TCGArg x, TCGArg y)
{
    uint64_t l64, h64;

    switch (op) {
    CASE_OP_32_64(add):
        return x + y;

    CASE_OP_32_64(sub):
        return x - y;

    CASE_OP_32_64(mul):
        return x * y;

    CASE_OP_32_64(and):
        return x & y;

    CASE_OP_32_64(or):
        return x | y;

    CASE_OP_32_64(xor):
        return x ^ y;

    case INDEX_op_shl_i32:
        return (uint32_t)x << (y & 31);

    case INDEX_op_shl_i64:
        return (uint64_t)x << (y & 63);

    case INDEX_op_shr_i32:
        return (uint32_t)x >> (y & 31);

    case INDEX_op_trunc_shr_i32:
    case INDEX_op_shr_i64:
        return (uint64_t)x >> (y & 63);

    case INDEX_op_sar_i32:
        return (int32_t)x >> (y & 31);

    case INDEX_op_sar_i64:
        return (int64_t)x >> (y & 63);

    case INDEX_op_rotr_i32:
        return ror32(x, y & 31);

    case INDEX_op_rotr_i64:
        return (TCGArg)ror64(x, y & 63);

    case INDEX_op_rotl_i32:
        return rol32(x, y & 31);

    case INDEX_op_rotl_i64:
        return (TCGArg)rol64(x, y & 63);

    CASE_OP_32_64(not):
        return ~x;

    CASE_OP_32_64(neg):
        return 0-x;

    CASE_OP_32_64(andc):
        return x & ~y;

    CASE_OP_32_64(orc):
        return x | ~y;

    CASE_OP_32_64(eqv):
        return ~(x ^ y);

    CASE_OP_32_64(nand):
        return ~(x & y);

    CASE_OP_32_64(nor):
        return ~(x | y);

    CASE_OP_32_64(ext8s):
        return (int8_t)x;

    CASE_OP_32_64(ext16s):
        return (int16_t)x;

    CASE_OP_32_64(ext8u):
        return (uint8_t)x;

    CASE_OP_32_64(ext16u):
        return (uint16_t)x;

    case INDEX_op_ext32s_i64:
        return (int32_t)x;

    case INDEX_op_ext32u_i64:
        return (uint32_t)x;

    case INDEX_op_muluh_i32:
        return ((uint64_t)(uint32_t)x * (uint32_t)y) >> 32;
    case INDEX_op_mulsh_i32:
        return ((int64_t)(int32_t)x * (int32_t)y) >> 32;

    case INDEX_op_muluh_i64:
        mulu64(&l64, &h64, x, y);
        return (TCGArg)h64;
    case INDEX_op_mulsh_i64:
        muls64(&l64, &h64, x, y);
        return (TCGArg)h64;

    case INDEX_op_div_i32:
        /* Avoid crashing on divide by zero, otherwise undefined.  */
        return (int32_t)x / ((int32_t)y ? (int32_t)y : 1);
    case INDEX_op_divu_i32:
        return (uint32_t)x / ((uint32_t)y ? (uint32_t)y : 1);
    case INDEX_op_div_i64:
        return (int64_t)x / ((int64_t)y ? (int64_t)y : 1);
    case INDEX_op_divu_i64:
        return (uint64_t)x / ((uint64_t)y ? (uint64_t)y : 1);

    case INDEX_op_rem_i32:
        return (int32_t)x % ((int32_t)y ? (int32_t)y : 1);
    case INDEX_op_remu_i32:
        return (uint32_t)x % ((uint32_t)y ? (uint32_t)y : 1);
    case INDEX_op_rem_i64:
        return (int64_t)x % ((int64_t)y ? (int64_t)y : 1);
    case INDEX_op_remu_i64:
        return (uint64_t)x % ((uint64_t)y ? (uint64_t)y : 1);

    default:
        fprintf(stderr,
                "Unrecognized operation %d in do_constant_folding.\n", op);
        tcg_abort();
    }
}
Ejemplo n.º 14
0
static inline void tcg_out_op(TCGContext *s, int opc, 
                              const TCGArg *args, const int *const_args)
{
    int c;
    
    switch(opc) {
    case INDEX_op_exit_tb:
        tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_EAX, args[0]);
        tcg_out8(s, 0xe9); /* jmp tb_ret_addr */
        tcg_out32(s, tb_ret_addr - s->code_ptr - 4);
        break;
    case INDEX_op_goto_tb:
        if (s->tb_jmp_offset) {
            /* direct jump method */
            tcg_out8(s, 0xe9); /* jmp im */
            s->tb_jmp_offset[args[0]] = s->code_ptr - s->code_buf;
            tcg_out32(s, 0);
        } else {
            /* indirect jump method */
            /* jmp Ev */
            tcg_out_modrm_offset(s, 0xff, 4, -1, 
                                 (tcg_target_long)(s->tb_next + args[0]));
        }
        s->tb_next_offset[args[0]] = s->code_ptr - s->code_buf;
        break;
    case INDEX_op_call:
        if (const_args[0]) {
            tcg_out8(s, 0xe8);
            tcg_out32(s, args[0] - (tcg_target_long)s->code_ptr - 4);
        } else {
            tcg_out_modrm(s, 0xff, 2, args[0]);
        }
        break;
    case INDEX_op_jmp:
        if (const_args[0]) {
            tcg_out8(s, 0xe9);
            tcg_out32(s, args[0] - (tcg_target_long)s->code_ptr - 4);
        } else {
            tcg_out_modrm(s, 0xff, 4, args[0]);
        }
        break;
    case INDEX_op_br:
        tcg_out_jxx(s, JCC_JMP, args[0]);
        break;
    case INDEX_op_movi_i32:
        tcg_out_movi(s, TCG_TYPE_I32, args[0], args[1]);
        break;
    case INDEX_op_ld8u_i32:
        /* movzbl */
        tcg_out_modrm_offset(s, 0xb6 | P_EXT, args[0], args[1], args[2]);
        break;
    case INDEX_op_ld8s_i32:
        /* movsbl */
        tcg_out_modrm_offset(s, 0xbe | P_EXT, args[0], args[1], args[2]);
        break;
    case INDEX_op_ld16u_i32:
        /* movzwl */
        tcg_out_modrm_offset(s, 0xb7 | P_EXT, args[0], args[1], args[2]);
        break;
    case INDEX_op_ld16s_i32:
        /* movswl */
        tcg_out_modrm_offset(s, 0xbf | P_EXT, args[0], args[1], args[2]);
        break;
    case INDEX_op_ld_i32:
        /* movl */
        tcg_out_modrm_offset(s, 0x8b, args[0], args[1], args[2]);
        break;
    case INDEX_op_st8_i32:
        /* movb */
        tcg_out_modrm_offset(s, 0x88, args[0], args[1], args[2]);
        break;
    case INDEX_op_st16_i32:
        /* movw */
        tcg_out8(s, 0x66);
        tcg_out_modrm_offset(s, 0x89, args[0], args[1], args[2]);
        break;
    case INDEX_op_st_i32:
        /* movl */
        tcg_out_modrm_offset(s, 0x89, args[0], args[1], args[2]);
        break;
    case INDEX_op_sub_i32:
        c = ARITH_SUB;
        goto gen_arith;
    case INDEX_op_and_i32:
        c = ARITH_AND;
        goto gen_arith;
    case INDEX_op_or_i32:
        c = ARITH_OR;
        goto gen_arith;
    case INDEX_op_xor_i32:
        c = ARITH_XOR;
        goto gen_arith;
    case INDEX_op_add_i32:
        c = ARITH_ADD;
    gen_arith:
        if (const_args[2]) {
            tgen_arithi(s, c, args[0], args[2], 0);
        } else {
            tcg_out_modrm(s, 0x01 | (c << 3), args[2], args[0]);
        }
        break;
    case INDEX_op_mul_i32:
        if (const_args[2]) {
            int32_t val;
            val = args[2];
            if (val == (int8_t)val) {
                tcg_out_modrm(s, 0x6b, args[0], args[0]);
                tcg_out8(s, val);
            } else {
                tcg_out_modrm(s, 0x69, args[0], args[0]);
                tcg_out32(s, val);
            }
        } else {
            tcg_out_modrm(s, 0xaf | P_EXT, args[0], args[2]);
        }
        break;
    case INDEX_op_mulu2_i32:
        tcg_out_modrm(s, 0xf7, 4, args[3]);
        break;
    case INDEX_op_div2_i32:
        tcg_out_modrm(s, 0xf7, 7, args[4]);
        break;
    case INDEX_op_divu2_i32:
        tcg_out_modrm(s, 0xf7, 6, args[4]);
        break;
    case INDEX_op_shl_i32:
        c = SHIFT_SHL;
    gen_shift32:
        if (const_args[2]) {
            if (args[2] == 1) {
                tcg_out_modrm(s, 0xd1, c, args[0]);
            } else {
                tcg_out_modrm(s, 0xc1, c, args[0]);
                tcg_out8(s, args[2]);
            }
        } else {
            tcg_out_modrm(s, 0xd3, c, args[0]);
        }
        break;
    case INDEX_op_shr_i32:
        c = SHIFT_SHR;
        goto gen_shift32;
    case INDEX_op_sar_i32:
        c = SHIFT_SAR;
        goto gen_shift32;
    case INDEX_op_rotl_i32:
        c = SHIFT_ROL;
        goto gen_shift32;
    case INDEX_op_rotr_i32:
        c = SHIFT_ROR;
        goto gen_shift32;

    case INDEX_op_add2_i32:
        if (const_args[4]) 
            tgen_arithi(s, ARITH_ADD, args[0], args[4], 1);
        else
            tcg_out_modrm(s, 0x01 | (ARITH_ADD << 3), args[4], args[0]);
        if (const_args[5]) 
            tgen_arithi(s, ARITH_ADC, args[1], args[5], 1);
        else
            tcg_out_modrm(s, 0x01 | (ARITH_ADC << 3), args[5], args[1]);
        break;
    case INDEX_op_sub2_i32:
        if (const_args[4]) 
            tgen_arithi(s, ARITH_SUB, args[0], args[4], 1);
        else
            tcg_out_modrm(s, 0x01 | (ARITH_SUB << 3), args[4], args[0]);
        if (const_args[5]) 
            tgen_arithi(s, ARITH_SBB, args[1], args[5], 1);
        else
            tcg_out_modrm(s, 0x01 | (ARITH_SBB << 3), args[5], args[1]);
        break;
    case INDEX_op_brcond_i32:
        tcg_out_brcond(s, args[2], args[0], args[1], const_args[1], args[3]);
        break;
    case INDEX_op_brcond2_i32:
        tcg_out_brcond2(s, args, const_args);
        break;

    case INDEX_op_bswap16_i32:
        tcg_out8(s, 0x66);
        tcg_out_modrm(s, 0xc1, SHIFT_ROL, args[0]);
        tcg_out8(s, 8);
        break;
    case INDEX_op_bswap32_i32:
        tcg_out_opc(s, (0xc8 + args[0]) | P_EXT);
        break;

    case INDEX_op_neg_i32:
        tcg_out_modrm(s, 0xf7, 3, args[0]);
        break;

    case INDEX_op_not_i32:
        tcg_out_modrm(s, 0xf7, 2, args[0]);
        break;

    case INDEX_op_ext8s_i32:
        tcg_out_modrm(s, 0xbe | P_EXT, args[0], args[1]);
        break;
    case INDEX_op_ext16s_i32:
        tcg_out_modrm(s, 0xbf | P_EXT, args[0], args[1]);
        break;
    case INDEX_op_ext8u_i32:
        tcg_out_modrm(s, 0xb6 | P_EXT, args[0], args[1]);
        break;
    case INDEX_op_ext16u_i32:
        tcg_out_modrm(s, 0xb7 | P_EXT, args[0], args[1]);
        break;

    case INDEX_op_qemu_ld8u:
        tcg_out_qemu_ld(s, args, 0);
        break;
    case INDEX_op_qemu_ld8s:
        tcg_out_qemu_ld(s, args, 0 | 4);
        break;
    case INDEX_op_qemu_ld16u:
        tcg_out_qemu_ld(s, args, 1);
        break;
    case INDEX_op_qemu_ld16s:
        tcg_out_qemu_ld(s, args, 1 | 4);
        break;
    case INDEX_op_qemu_ld32u:
        tcg_out_qemu_ld(s, args, 2);
        break;
    case INDEX_op_qemu_ld64:
        tcg_out_qemu_ld(s, args, 3);
        break;
        
    case INDEX_op_qemu_st8:
        tcg_out_qemu_st(s, args, 0);
        break;
    case INDEX_op_qemu_st16:
        tcg_out_qemu_st(s, args, 1);
        break;
    case INDEX_op_qemu_st32:
        tcg_out_qemu_st(s, args, 2);
        break;
    case INDEX_op_qemu_st64:
        tcg_out_qemu_st(s, args, 3);
        break;

    default:
        tcg_abort();
    }
}
Ejemplo n.º 15
0
static void tcg_out_qemu_st(TCGContext *s, const TCGArg *args,
                            int opc)
{
    int addr_reg, data_reg, data_reg2, r0, r1, mem_index, s_bits, bswap;
#if defined(CONFIG_SOFTMMU)
    uint8_t *label1_ptr, *label2_ptr;
#endif
#if TARGET_LONG_BITS == 64
#if defined(CONFIG_SOFTMMU)
    uint8_t *label3_ptr;
#endif
    int addr_reg2;
#endif

    data_reg = *args++;
    if (opc == 3)
        data_reg2 = *args++;
    else
        data_reg2 = 0;
    addr_reg = *args++;
#if TARGET_LONG_BITS == 64
    addr_reg2 = *args++;
#endif
    mem_index = *args;

    s_bits = opc;

    r0 = TCG_REG_EAX;
    r1 = TCG_REG_EDX;

#if defined(CONFIG_SOFTMMU)
    tcg_out_mov(s, r1, addr_reg); 

    tcg_out_mov(s, r0, addr_reg); 
 
    tcg_out_modrm(s, 0xc1, 5, r1); /* shr $x, r1 */
    tcg_out8(s, TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS); 
    
    tcg_out_modrm(s, 0x81, 4, r0); /* andl $x, r0 */
    tcg_out32(s, TARGET_PAGE_MASK | ((1 << s_bits) - 1));
    
    tcg_out_modrm(s, 0x81, 4, r1); /* andl $x, r1 */
    tcg_out32(s, (CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS);

    tcg_out_opc(s, 0x8d); /* lea offset(r1, %ebp), r1 */
    tcg_out8(s, 0x80 | (r1 << 3) | 0x04);
    tcg_out8(s, (5 << 3) | r1);
    tcg_out32(s, offsetof(CPUState, tlb_table[mem_index][0].addr_write));

    /* cmp 0(r1), r0 */
    tcg_out_modrm_offset(s, 0x3b, r0, r1, 0);
    
    tcg_out_mov(s, r0, addr_reg);
    
#if TARGET_LONG_BITS == 32
    /* je label1 */
    tcg_out8(s, 0x70 + JCC_JE);
    label1_ptr = s->code_ptr;
    s->code_ptr++;
#else
    /* jne label3 */
    tcg_out8(s, 0x70 + JCC_JNE);
    label3_ptr = s->code_ptr;
    s->code_ptr++;
    
    /* cmp 4(r1), addr_reg2 */
    tcg_out_modrm_offset(s, 0x3b, addr_reg2, r1, 4);

    /* je label1 */
    tcg_out8(s, 0x70 + JCC_JE);
    label1_ptr = s->code_ptr;
    s->code_ptr++;
    
    /* label3: */
    *label3_ptr = s->code_ptr - label3_ptr - 1;
#endif

    /* XXX: move that code at the end of the TB */
#if TARGET_LONG_BITS == 32
    if (opc == 3) {
        tcg_out_mov(s, TCG_REG_EDX, data_reg);
        tcg_out_mov(s, TCG_REG_ECX, data_reg2);
        tcg_out8(s, 0x6a); /* push Ib */
        tcg_out8(s, mem_index);
        tcg_out8(s, 0xe8);
        tcg_out32(s, (tcg_target_long)qemu_st_helpers[s_bits] - 
                  (tcg_target_long)s->code_ptr - 4);
        tcg_out_addi(s, TCG_REG_ESP, 4);
    } else {
        switch(opc) {
        case 0:
            /* movzbl */
            tcg_out_modrm(s, 0xb6 | P_EXT, TCG_REG_EDX, data_reg);
            break;
        case 1:
            /* movzwl */
            tcg_out_modrm(s, 0xb7 | P_EXT, TCG_REG_EDX, data_reg);
            break;
        case 2:
            tcg_out_mov(s, TCG_REG_EDX, data_reg);
            break;
        }
        tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_ECX, mem_index);
        tcg_out8(s, 0xe8);
        tcg_out32(s, (tcg_target_long)qemu_st_helpers[s_bits] - 
                  (tcg_target_long)s->code_ptr - 4);
    }
#else
    if (opc == 3) {
        tcg_out_mov(s, TCG_REG_EDX, addr_reg2);
        tcg_out8(s, 0x6a); /* push Ib */
        tcg_out8(s, mem_index);
        tcg_out_opc(s, 0x50 + data_reg2); /* push */
        tcg_out_opc(s, 0x50 + data_reg); /* push */
        tcg_out8(s, 0xe8);
        tcg_out32(s, (tcg_target_long)qemu_st_helpers[s_bits] - 
                  (tcg_target_long)s->code_ptr - 4);
        tcg_out_addi(s, TCG_REG_ESP, 12);
    } else {
        tcg_out_mov(s, TCG_REG_EDX, addr_reg2);
        switch(opc) {
        case 0:
            /* movzbl */
            tcg_out_modrm(s, 0xb6 | P_EXT, TCG_REG_ECX, data_reg);
            break;
        case 1:
            /* movzwl */
            tcg_out_modrm(s, 0xb7 | P_EXT, TCG_REG_ECX, data_reg);
            break;
        case 2:
            tcg_out_mov(s, TCG_REG_ECX, data_reg);
            break;
        }
        tcg_out8(s, 0x6a); /* push Ib */
        tcg_out8(s, mem_index);
        tcg_out8(s, 0xe8);
        tcg_out32(s, (tcg_target_long)qemu_st_helpers[s_bits] - 
                  (tcg_target_long)s->code_ptr - 4);
        tcg_out_addi(s, TCG_REG_ESP, 4);
    }
#endif
    
    /* jmp label2 */
    tcg_out8(s, 0xeb);
    label2_ptr = s->code_ptr;
    s->code_ptr++;
    
    /* label1: */
    *label1_ptr = s->code_ptr - label1_ptr - 1;

    /* add x(r1), r0 */
    tcg_out_modrm_offset(s, 0x03, r0, r1, offsetof(CPUTLBEntry, addend) - 
                         offsetof(CPUTLBEntry, addr_write));
#else
    r0 = addr_reg;
#endif

#ifdef TARGET_WORDS_BIGENDIAN
    bswap = 1;
#else
    bswap = 0;
#endif
    switch(opc) {
    case 0:
        /* movb */
        tcg_out_modrm_offset(s, 0x88, data_reg, r0, GUEST_BASE);
        break;
    case 1:
        if (bswap) {
            tcg_out_mov(s, r1, data_reg);
            tcg_out8(s, 0x66); /* rolw $8, %ecx */
            tcg_out_modrm(s, 0xc1, 0, r1);
            tcg_out8(s, 8);
            data_reg = r1;
        }
        /* movw */
        tcg_out8(s, 0x66);
        tcg_out_modrm_offset(s, 0x89, data_reg, r0, GUEST_BASE);
        break;
    case 2:
        if (bswap) {
            tcg_out_mov(s, r1, data_reg);
            /* bswap data_reg */
            tcg_out_opc(s, (0xc8 + r1) | P_EXT);
            data_reg = r1;
        }
        /* movl */
        tcg_out_modrm_offset(s, 0x89, data_reg, r0, GUEST_BASE);
        break;
    case 3:
        if (bswap) {
            tcg_out_mov(s, r1, data_reg2);
            /* bswap data_reg */
            tcg_out_opc(s, (0xc8 + r1) | P_EXT);
            tcg_out_modrm_offset(s, 0x89, r1, r0, GUEST_BASE);
            tcg_out_mov(s, r1, data_reg);
            /* bswap data_reg */
            tcg_out_opc(s, (0xc8 + r1) | P_EXT);
            tcg_out_modrm_offset(s, 0x89, r1, r0, GUEST_BASE + 4);
        } else {
            tcg_out_modrm_offset(s, 0x89, data_reg, r0, GUEST_BASE);
            tcg_out_modrm_offset(s, 0x89, data_reg2, r0, GUEST_BASE + 4);
        }
        break;
    default:
        tcg_abort();
    }

#if defined(CONFIG_SOFTMMU)
    /* label2: */
    *label2_ptr = s->code_ptr - label2_ptr - 1;
#endif
}