Ejemplo n.º 1
0
STATIC void asm_x64_lea_disp_to_r64(asm_x64_t *as, int src_r64, int src_disp, int dest_r64) {
    // use REX prefix for 64 bit operation
    assert(src_r64 < 8);
    assert(dest_r64 < 8);
    asm_x64_write_byte_2(as, REX_PREFIX | REX_W, OPCODE_LEA_MEM_TO_R64);
    asm_x64_write_r64_disp(as, dest_r64, src_r64, src_disp);
}
Ejemplo n.º 2
0
void asm_x64_mov_r32_to_mem32(asm_x64_t *as, int src_r64, int dest_r64, int dest_disp) {
    if (src_r64 < 8 && dest_r64 < 8) {
        asm_x64_write_byte_1(as, OPCODE_MOV_R64_TO_RM64);
    } else {
        asm_x64_write_byte_2(as, REX_PREFIX | REX_R_FROM_R64(src_r64) | REX_B_FROM_R64(dest_r64), OPCODE_MOV_R64_TO_RM64);
    }
    asm_x64_write_r64_disp(as, src_r64, dest_r64, dest_disp);
}
Ejemplo n.º 3
0
void asm_x64_mov_mem32_to_r64zx(asm_x64_t *as, int src_r64, int src_disp, int dest_r64) {
    assert(src_r64 < 8);
    if (dest_r64 < 8) {
        asm_x64_write_byte_1(as, OPCODE_MOV_RM64_TO_R64);
    } else {
        asm_x64_write_byte_2(as, REX_PREFIX | REX_R, OPCODE_MOV_RM64_TO_R64);
    }
    asm_x64_write_r64_disp(as, dest_r64, src_r64, src_disp);
}
Ejemplo n.º 4
0
void asm_x64_mov_disp_to_r64(asm_x64_t* as, int src_r64, int src_disp, int dest_r64) {
    // use REX prefix for 64 bit operation
    asm_x64_write_byte_2(as, REX_PREFIX | REX_W, OPCODE_MOV_RM64_TO_R64);
    asm_x64_write_r64_disp(as, dest_r64, src_r64, src_disp);
}
Ejemplo n.º 5
0
void asm_x64_push_disp(asm_x64_t* as, int src_r64, int src_offset) {
    asm_x64_write_byte_1(as, OPCODE_PUSH_M64);
    asm_x64_write_r64_disp(as, 6, src_r64, src_offset);
}
Ejemplo n.º 6
0
void asm_x64_mov_r64_to_mem64(asm_x64_t *as, int src_r64, int dest_r64, int dest_disp) {
    // use REX prefix for 64 bit operation
    asm_x64_write_byte_2(as, REX_PREFIX | REX_W | REX_R_FROM_R64(src_r64) | REX_B_FROM_R64(dest_r64), OPCODE_MOV_R64_TO_RM64);
    asm_x64_write_r64_disp(as, src_r64, dest_r64, dest_disp);
}