void Jit64::ComputeRC(const Gen::OpArg & arg) { if( arg.IsImm() ) { s32 value = (s32)arg.offset; if( value < 0 ) MOV(8, M(&PowerPC::ppcState.cr_fast[0]), Imm8(0x8)); else if( value > 0 ) MOV(8, M(&PowerPC::ppcState.cr_fast[0]), Imm8(0x4)); else MOV(8, M(&PowerPC::ppcState.cr_fast[0]), Imm8(0x2)); } else { if (arg.IsSimpleReg()) TEST(32, arg, arg); else CMP(32, arg, Imm8(0)); FixupBranch pLesser = J_CC(CC_L); FixupBranch pGreater = J_CC(CC_G); MOV(8, M(&PowerPC::ppcState.cr_fast[0]), Imm8(0x2)); // _x86Reg == 0 FixupBranch continue1 = J(); SetJumpTarget(pGreater); MOV(8, M(&PowerPC::ppcState.cr_fast[0]), Imm8(0x4)); // _x86Reg > 0 FixupBranch continue2 = J(); SetJumpTarget(pLesser); MOV(8, M(&PowerPC::ppcState.cr_fast[0]), Imm8(0x8)); // _x86Reg < 0 SetJumpTarget(continue1); SetJumpTarget(continue2); } }
void Jit64::ComputeRC(const Gen::OpArg & arg) { if (arg.IsImm()) { MOV(64, M(&PowerPC::ppcState.cr_val[0]), Imm32((s32)arg.offset)); } else { MOVSX(64, 32, RAX, arg); MOV(64, M(&PowerPC::ppcState.cr_val[0]), R(RAX)); } }
void EmuCodeBlock::UnsafeLoadToEAX(const Gen::OpArg & opAddress, int accessSize, s32 offset, bool signExtend) { #ifdef _M_X64 if (opAddress.IsSimpleReg()) { MOVZX(32, accessSize, EAX, MComplex(RBX, opAddress.GetSimpleReg(), SCALE_1, offset)); } else { MOV(32, R(EAX), opAddress); MOVZX(32, accessSize, EAX, MComplex(RBX, EAX, SCALE_1, offset)); } #else if (opAddress.IsImm()) { MOVZX(32, accessSize, EAX, M(Memory::base + (((u32)opAddress.offset + offset) & Memory::MEMVIEW32_MASK))); } else { if (!opAddress.IsSimpleReg(EAX)) MOV(32, R(EAX), opAddress); AND(32, R(EAX), Imm32(Memory::MEMVIEW32_MASK)); MOVZX(32, accessSize, EAX, MDisp(EAX, (u32)Memory::base + offset)); } #endif // Add a 2 bytes NOP to have some space for the backpatching if (accessSize == 8) NOP(2); if (accessSize == 32) { BSWAP(32, EAX); } else if (accessSize == 16) { BSWAP(32, EAX); if (signExtend) SAR(32, R(EAX), Imm8(16)); else SHR(32, R(EAX), Imm8(16)); } else if (signExtend) { // TODO: bake 8-bit into the original load. MOVSX(32, accessSize, EAX, R(EAX)); } }
void EmuCodeBlock::SafeLoadToEAX(const Gen::OpArg & opAddress, int accessSize, s32 offset, bool signExtend) { #if defined(_M_X64) #ifdef ENABLE_MEM_CHECK if (!Core::g_CoreStartupParameter.bMMU && !Core::g_CoreStartupParameter.bEnableDebugging) #else if (!Core::g_CoreStartupParameter.bMMU) #endif { UnsafeLoadToEAX(opAddress, accessSize, offset, signExtend); } else #endif { u32 mem_mask = Memory::ADDR_MASK_HW_ACCESS; if (Core::g_CoreStartupParameter.bMMU || Core::g_CoreStartupParameter.iTLBHack) { mem_mask |= Memory::ADDR_MASK_MEM1; } #ifdef ENABLE_MEM_CHECK if (Core::g_CoreStartupParameter.bEnableDebugging) { mem_mask |= Memory::EXRAM_MASK; } #endif if (opAddress.IsImm()) { u32 address = (u32)opAddress.offset + offset; if ((address & mem_mask) == 0) { UnsafeLoadToEAX(opAddress, accessSize, offset, signExtend); } else { switch (accessSize) { case 32: ABI_CallFunctionC(thunks.ProtectFunction((void *)&Memory::Read_U32, 1), address); break; case 16: ABI_CallFunctionC(thunks.ProtectFunction((void *)&Memory::Read_U16_ZX, 1), address); break; case 8: ABI_CallFunctionC(thunks.ProtectFunction((void *)&Memory::Read_U8_ZX, 1), address); break; } if (signExtend && accessSize < 32) { // Need to sign extend values coming from the Read_U* functions. MOVSX(32, accessSize, EAX, R(EAX)); } } } else { if (offset) { MOV(32, R(EAX), opAddress); ADD(32, R(EAX), Imm32(offset)); TEST(32, R(EAX), Imm32(mem_mask)); FixupBranch fast = J_CC(CC_Z); switch (accessSize) { case 32: ABI_CallFunctionR(thunks.ProtectFunction((void *)&Memory::Read_U32, 1), EAX); break; case 16: ABI_CallFunctionR(thunks.ProtectFunction((void *)&Memory::Read_U16_ZX, 1), EAX); break; case 8: ABI_CallFunctionR(thunks.ProtectFunction((void *)&Memory::Read_U8_ZX, 1), EAX); break; } if (signExtend && accessSize < 32) { // Need to sign extend values coming from the Read_U* functions. MOVSX(32, accessSize, EAX, R(EAX)); } FixupBranch exit = J(); SetJumpTarget(fast); UnsafeLoadToEAX(R(EAX), accessSize, 0, signExtend); SetJumpTarget(exit); } else { TEST(32, opAddress, Imm32(mem_mask)); FixupBranch fast = J_CC(CC_Z); switch (accessSize) { case 32: ABI_CallFunctionA(thunks.ProtectFunction((void *)&Memory::Read_U32, 1), opAddress); break; case 16: ABI_CallFunctionA(thunks.ProtectFunction((void *)&Memory::Read_U16_ZX, 1), opAddress); break; case 8: ABI_CallFunctionA(thunks.ProtectFunction((void *)&Memory::Read_U8_ZX, 1), opAddress); break; } if (signExtend && accessSize < 32) { // Need to sign extend values coming from the Read_U* functions. MOVSX(32, accessSize, EAX, R(EAX)); } FixupBranch exit = J(); SetJumpTarget(fast); UnsafeLoadToEAX(opAddress, accessSize, offset, signExtend); SetJumpTarget(exit); } } } }