SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_return(struct sljit_compiler *compiler, sljit_si op, sljit_si src, sljit_sw srcw) { sljit_si size; sljit_ub *inst; CHECK_ERROR(); CHECK(check_sljit_emit_return(compiler, op, src, srcw)); SLJIT_ASSERT(compiler->args >= 0); compiler->flags_saved = 0; FAIL_IF(emit_mov_before_return(compiler, op, src, srcw)); SLJIT_ASSERT(compiler->local_size > 0); FAIL_IF(emit_cum_binary(compiler, ADD_r_rm, ADD_rm_r, ADD, ADD_EAX_i32, SLJIT_SP, 0, SLJIT_SP, 0, SLJIT_IMM, compiler->local_size)); #if !defined(__APPLE__) if (compiler->options & SLJIT_DOUBLE_ALIGNMENT) { inst = (sljit_ub*)ensure_buf(compiler, 1 + 3); FAIL_IF(!inst); INC_SIZE(3); inst[0] = MOV_r_rm; inst[1] = (reg_map[SLJIT_SP] << 3) | 0x4 /* SIB */; inst[2] = (4 << 3) | reg_map[SLJIT_SP]; } #endif size = 2 + (compiler->scratches > 7 ? (compiler->scratches - 7) : 0) + (compiler->saveds <= 3 ? compiler->saveds : 3); #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL) if (compiler->args > 2) size += 2; #else if (compiler->args > 0) size += 2; #endif inst = (sljit_ub*)ensure_buf(compiler, 1 + size); FAIL_IF(!inst); INC_SIZE(size); if (compiler->saveds > 0 || compiler->scratches > 9) POP_REG(reg_map[SLJIT_S0]); if (compiler->saveds > 1 || compiler->scratches > 8) POP_REG(reg_map[SLJIT_S1]); if (compiler->saveds > 2 || compiler->scratches > 7) POP_REG(reg_map[SLJIT_S2]); POP_REG(reg_map[TMP_REG1]); #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL) if (compiler->args > 2) RET_I16(sizeof(sljit_sw)); else RET(); #else RET(); #endif return SLJIT_SUCCESS; }
int sljit_emit_return(struct sljit_compiler *compiler, int src, sljit_w srcw) { int size; sljit_ub *buf; CHECK_ERROR(); check_sljit_emit_return(compiler, src, srcw); SLJIT_ASSERT(compiler->args >= 0); compiler->flags_saved = 0; CHECK_EXTRA_REGS(src, srcw, (void)0); if (src != SLJIT_UNUSED && src != SLJIT_RETURN_REG) FAIL_IF(emit_mov(compiler, SLJIT_RETURN_REG, 0, src, srcw)); if (compiler->local_size > 0) FAIL_IF(emit_cum_binary(compiler, 0x03, 0x01, 0x0 << 3, 0x05, SLJIT_LOCALS_REG, 0, SLJIT_LOCALS_REG, 0, SLJIT_IMM, compiler->local_size)); size = 2 + (compiler->generals <= 3 ? compiler->generals : 3); #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL) if (compiler->args > 2) size += 2; #else if (compiler->args > 0) size += 2; #endif buf = (sljit_ub*)ensure_buf(compiler, 1 + size); FAIL_IF(!buf); INC_SIZE(size); if (compiler->generals > 0) POP_REG(reg_map[SLJIT_GENERAL_REG1]); if (compiler->generals > 1) POP_REG(reg_map[SLJIT_GENERAL_REG2]); if (compiler->generals > 2) POP_REG(reg_map[SLJIT_GENERAL_REG3]); POP_REG(reg_map[TMP_REGISTER]); #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL) if (compiler->args > 2) RETN(sizeof(sljit_w)); else RET(); #else if (compiler->args > 0) RETN(compiler->args * sizeof(sljit_w)); else RET(); #endif return SLJIT_SUCCESS; }
SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_return(struct sljit_compiler *compiler, sljit_si op, sljit_si src, sljit_sw srcw) { sljit_si size; sljit_ub *inst; CHECK_ERROR(); check_sljit_emit_return(compiler, op, src, srcw); SLJIT_ASSERT(compiler->args >= 0); compiler->flags_saved = 0; FAIL_IF(emit_mov_before_return(compiler, op, src, srcw)); SLJIT_ASSERT(compiler->local_size > 0); FAIL_IF(emit_cum_binary(compiler, ADD_r_rm, ADD_rm_r, ADD, ADD_EAX_i32, SLJIT_LOCALS_REG, 0, SLJIT_LOCALS_REG, 0, SLJIT_IMM, compiler->local_size)); size = 2 + (compiler->saveds <= 3 ? compiler->saveds : 3); #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL) if (compiler->args > 2) size += 2; #else if (compiler->args > 0) size += 2; #endif inst = (sljit_ub*)ensure_buf(compiler, 1 + size); FAIL_IF(!inst); INC_SIZE(size); if (compiler->saveds > 0) POP_REG(reg_map[SLJIT_SAVED_REG1]); if (compiler->saveds > 1) POP_REG(reg_map[SLJIT_SAVED_REG2]); if (compiler->saveds > 2) POP_REG(reg_map[SLJIT_SAVED_REG3]); POP_REG(reg_map[TMP_REGISTER]); #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL) if (compiler->args > 2) RET_I16(sizeof(sljit_sw)); else RET(); #else if (compiler->args > 0) RET_I16(compiler->args * sizeof(sljit_sw)); else RET(); #endif return SLJIT_SUCCESS; }
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_return(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 src, sljit_sw srcw) { sljit_s32 i, tmp, size; sljit_u8 *inst; CHECK_ERROR(); CHECK(check_sljit_emit_return(compiler, op, src, srcw)); FAIL_IF(emit_mov_before_return(compiler, op, src, srcw)); #ifdef _WIN64 /* Restore xmm6 register: movaps xmm6, [rsp + 0x20] */ if (compiler->fscratches >= 6 || compiler->fsaveds >= 1) { inst = (sljit_u8*)ensure_buf(compiler, 1 + 5); FAIL_IF(!inst); INC_SIZE(5); *inst++ = GROUP_0F; sljit_unaligned_store_s32(inst, 0x20247428); } #endif if (compiler->local_size > 0) { if (compiler->local_size <= 127) { inst = (sljit_u8*)ensure_buf(compiler, 1 + 4); FAIL_IF(!inst); INC_SIZE(4); *inst++ = REX_W; *inst++ = GROUP_BINARY_83; *inst++ = MOD_REG | ADD | 4; *inst = compiler->local_size; } else { inst = (sljit_u8*)ensure_buf(compiler, 1 + 7); FAIL_IF(!inst); INC_SIZE(7); *inst++ = REX_W; *inst++ = GROUP_BINARY_81; *inst++ = MOD_REG | ADD | 4; sljit_unaligned_store_s32(inst, compiler->local_size); } } tmp = compiler->scratches; for (i = SLJIT_FIRST_SAVED_REG; i <= tmp; i++) { size = reg_map[i] >= 8 ? 2 : 1; inst = (sljit_u8*)ensure_buf(compiler, 1 + size); FAIL_IF(!inst); INC_SIZE(size); if (reg_map[i] >= 8) *inst++ = REX_B; POP_REG(reg_lmap[i]); } tmp = compiler->saveds < SLJIT_NUMBER_OF_SAVED_REGISTERS ? (SLJIT_S0 + 1 - compiler->saveds) : SLJIT_FIRST_SAVED_REG; for (i = tmp; i <= SLJIT_S0; i++) { size = reg_map[i] >= 8 ? 2 : 1; inst = (sljit_u8*)ensure_buf(compiler, 1 + size); FAIL_IF(!inst); INC_SIZE(size); if (reg_map[i] >= 8) *inst++ = REX_B; POP_REG(reg_lmap[i]); } inst = (sljit_u8*)ensure_buf(compiler, 1 + 1); FAIL_IF(!inst); INC_SIZE(1); RET(); return SLJIT_SUCCESS; }
SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_return(struct sljit_compiler *compiler, sljit_si op, sljit_si src, sljit_sw srcw) { sljit_si size; sljit_ub *inst; CHECK_ERROR(); check_sljit_emit_return(compiler, op, src, srcw); compiler->flags_saved = 0; FAIL_IF(emit_mov_before_return(compiler, op, src, srcw)); #ifdef _WIN64 /* Restore xmm6 with MOVAPS instruction. */ inst = (sljit_ub*)ensure_buf(compiler, 1 + 5); FAIL_IF(!inst); INC_SIZE(5); *inst++ = GROUP_0F; *(sljit_si*)inst = 0x20247428; #endif SLJIT_ASSERT(compiler->local_size > 0); if (compiler->local_size <= 127) { inst = (sljit_ub*)ensure_buf(compiler, 1 + 4); FAIL_IF(!inst); INC_SIZE(4); *inst++ = REX_W; *inst++ = GROUP_BINARY_83; *inst++ = MOD_REG | ADD | 4; *inst = compiler->local_size; } else { inst = (sljit_ub*)ensure_buf(compiler, 1 + 7); FAIL_IF(!inst); INC_SIZE(7); *inst++ = REX_W; *inst++ = GROUP_BINARY_81; *inst++ = MOD_REG | ADD | 4; *(sljit_si*)inst = compiler->local_size; } size = 1 + compiler->saveds; #ifndef _WIN64 if (compiler->saveds >= 2) size += compiler->saveds - 1; #else if (compiler->saveds >= 4) size += compiler->saveds - 3; if (compiler->scratches >= 5) size += (5 - 4) * 2; #endif inst = (sljit_ub*)ensure_buf(compiler, 1 + size); FAIL_IF(!inst); INC_SIZE(size); #ifdef _WIN64 if (compiler->scratches >= 5) { *inst++ = REX_B; POP_REG(reg_lmap[SLJIT_TEMPORARY_EREG2]); } #endif if (compiler->saveds >= 1) POP_REG(reg_map[SLJIT_SAVED_REG1]); if (compiler->saveds >= 2) { #ifndef _WIN64 *inst++ = REX_B; #endif POP_REG(reg_lmap[SLJIT_SAVED_REG2]); } if (compiler->saveds >= 3) { #ifndef _WIN64 *inst++ = REX_B; #endif POP_REG(reg_lmap[SLJIT_SAVED_REG3]); } if (compiler->saveds >= 4) { *inst++ = REX_B; POP_REG(reg_lmap[SLJIT_SAVED_EREG1]); } if (compiler->saveds >= 5) { *inst++ = REX_B; POP_REG(reg_lmap[SLJIT_SAVED_EREG2]); } RET(); return SLJIT_SUCCESS; }
SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_return(struct sljit_compiler *compiler, int op, int src, sljit_w srcw) { int size; sljit_ub *buf; CHECK_ERROR(); check_sljit_emit_return(compiler, op, src, srcw); compiler->flags_saved = 0; FAIL_IF(emit_mov_before_return(compiler, op, src, srcw)); if (compiler->local_size > 0) { if (compiler->local_size <= 127) { buf = (sljit_ub*)ensure_buf(compiler, 1 + 4); FAIL_IF(!buf); INC_SIZE(4); *buf++ = REX_W; *buf++ = 0x83; *buf++ = 0xc0 | (0 << 3) | 4; *buf = compiler->local_size; } else { buf = (sljit_ub*)ensure_buf(compiler, 1 + 7); FAIL_IF(!buf); INC_SIZE(7); *buf++ = REX_W; *buf++ = 0x81; *buf++ = 0xc0 | (0 << 3) | 4; *(sljit_hw*)buf = compiler->local_size; } } size = 1 + compiler->saveds; #ifndef _WIN64 if (compiler->saveds >= 2) size += compiler->saveds - 1; #else if (compiler->has_locals) size += 2; if (compiler->saveds >= 4) size += compiler->saveds - 3; if (compiler->temporaries >= 5) size += (5 - 4) * 2; #endif buf = (sljit_ub*)ensure_buf(compiler, 1 + size); FAIL_IF(!buf); INC_SIZE(size); #ifdef _WIN64 if (compiler->has_locals) { *buf++ = REX_B; POP_REG(reg_lmap[SLJIT_LOCALS_REG]); } if (compiler->temporaries >= 5) { *buf++ = REX_B; POP_REG(reg_lmap[SLJIT_TEMPORARY_EREG2]); } #endif if (compiler->saveds >= 1) POP_REG(reg_map[SLJIT_SAVED_REG1]); if (compiler->saveds >= 2) { #ifndef _WIN64 *buf++ = REX_B; #endif POP_REG(reg_lmap[SLJIT_SAVED_REG2]); } if (compiler->saveds >= 3) { #ifndef _WIN64 *buf++ = REX_B; #endif POP_REG(reg_lmap[SLJIT_SAVED_REG3]); } if (compiler->saveds >= 4) { *buf++ = REX_B; POP_REG(reg_lmap[SLJIT_SAVED_EREG1]); } if (compiler->saveds >= 5) { *buf++ = REX_B; POP_REG(reg_lmap[SLJIT_SAVED_EREG2]); } RET(); return SLJIT_SUCCESS; }