Ejemplo n.º 1
0
void Jit64::ComputeRC(const Gen::OpArg & arg) {
	if( arg.IsImm() )
	{
		s32 value = (s32)arg.offset;
		if( value < 0 )
			MOV(8, M(&PowerPC::ppcState.cr_fast[0]), Imm8(0x8));
		else if( value > 0 )
			MOV(8, M(&PowerPC::ppcState.cr_fast[0]), Imm8(0x4));
		else
			MOV(8, M(&PowerPC::ppcState.cr_fast[0]), Imm8(0x2));
	}
	else
	{
		if (arg.IsSimpleReg())
			TEST(32, arg, arg);
		else
			CMP(32, arg, Imm8(0));
		FixupBranch pLesser  = J_CC(CC_L);
		FixupBranch pGreater = J_CC(CC_G);
		MOV(8, M(&PowerPC::ppcState.cr_fast[0]), Imm8(0x2)); // _x86Reg == 0
		FixupBranch continue1 = J();

		SetJumpTarget(pGreater);
		MOV(8, M(&PowerPC::ppcState.cr_fast[0]), Imm8(0x4)); // _x86Reg > 0
		FixupBranch continue2 = J();

		SetJumpTarget(pLesser);
		MOV(8, M(&PowerPC::ppcState.cr_fast[0]), Imm8(0x8)); // _x86Reg < 0

		SetJumpTarget(continue1);
		SetJumpTarget(continue2);
	}
}
Ejemplo n.º 2
0
void EmuCodeBlock::UnsafeLoadToEAX(const Gen::OpArg & opAddress, int accessSize, s32 offset, bool signExtend)
{
#ifdef _M_X64
	if (opAddress.IsSimpleReg())
	{
		MOVZX(32, accessSize, EAX, MComplex(RBX, opAddress.GetSimpleReg(), SCALE_1, offset));
	}
	else
	{
		MOV(32, R(EAX), opAddress);
		MOVZX(32, accessSize, EAX, MComplex(RBX, EAX, SCALE_1, offset));
	}
#else
	if (opAddress.IsImm())
	{
		MOVZX(32, accessSize, EAX, M(Memory::base + (((u32)opAddress.offset + offset) & Memory::MEMVIEW32_MASK)));
	}
	else
	{
		if (!opAddress.IsSimpleReg(EAX))
			MOV(32, R(EAX), opAddress);
		AND(32, R(EAX), Imm32(Memory::MEMVIEW32_MASK));
		MOVZX(32, accessSize, EAX, MDisp(EAX, (u32)Memory::base + offset));
	}
#endif

	// Add a 2 bytes NOP to have some space for the backpatching
	if (accessSize == 8)
		NOP(2);

	if (accessSize == 32)
	{
		BSWAP(32, EAX);
	}
	else if (accessSize == 16)
	{
		BSWAP(32, EAX);
		if (signExtend)
			SAR(32, R(EAX), Imm8(16));
		else
			SHR(32, R(EAX), Imm8(16));
	}
	else if (signExtend)
	{
		// TODO: bake 8-bit into the original load.
		MOVSX(32, accessSize, EAX, R(EAX));   
	}
}
Ejemplo n.º 3
0
void XEmitter::ABI_CallFunctionAC(int bits, const void *func, const Gen::OpArg &arg1, u32 param2)
{
	if (!arg1.IsSimpleReg(ABI_PARAM1))
		MOV(bits, R(ABI_PARAM1), arg1);
	MOV(32, R(ABI_PARAM2), Imm32(param2));
	ABI_CallFunction(func);
}
Ejemplo n.º 4
0
void XEmitter::ABI_CallFunctionAA(void *func, const Gen::OpArg &arg1, const Gen::OpArg &arg2)
{
	if (!arg1.IsSimpleReg(ABI_PARAM1))
		MOV(32, R(ABI_PARAM1), arg1);
	if (!arg2.IsSimpleReg(ABI_PARAM2))
		MOV(32, R(ABI_PARAM2), arg2);
	u64 distance = u64(func) - (u64(code) + 5);
	if (distance >= 0x0000000080000000ULL
	 && distance <  0xFFFFFFFF80000000ULL) {
	    // Far call
	    MOV(64, R(RAX), Imm64((u64)func));
	    CALLptr(R(RAX));
	} else {
	    CALL(func);
	}
}
Ejemplo n.º 5
0
void XEmitter::ABI_CallFunctionPAA(const void *func, void *param1, const Gen::OpArg &arg2, const Gen::OpArg &arg3) {
    MOV(64, R(ABI_PARAM1), ImmPtr(param1));
    if (!arg2.IsSimpleReg(ABI_PARAM2))
        MOV(32, R(ABI_PARAM2), arg2);
    if (!arg3.IsSimpleReg(ABI_PARAM3))
        MOV(32, R(ABI_PARAM3), arg3);
    u64 distance = u64(func) - (u64(code) + 5);
    if (distance >= 0x0000000080000000ULL
     && distance <  0xFFFFFFFF80000000ULL) {
        // Far call
        MOV(64, R(RAX), ImmPtr(func));
        CALLptr(R(RAX));
    } else {
        CALL(func);
    }
}
Ejemplo n.º 6
0
void Jit64::ComputeRC(const Gen::OpArg & arg)
{
	if (arg.IsImm())
	{
		MOV(64, M(&PowerPC::ppcState.cr_val[0]), Imm32((s32)arg.offset));
	}
	else
	{
		MOVSX(64, 32, RAX, arg);
		MOV(64, M(&PowerPC::ppcState.cr_val[0]), R(RAX));
	}
}
Ejemplo n.º 7
0
void XEmitter::ABI_CallFunctionAC(void *func, const Gen::OpArg &arg1, u32 param2)
{
	ABI_AlignStack(0);
	if (!arg1.IsSimpleReg(ABI_PARAM1))
		MOV(32, R(ABI_PARAM1), arg1);
	MOV(32, R(ABI_PARAM2), Imm32(param2));
	u64 distance = u64(func) - (u64(code) + 5);
	if (distance >= 0x0000000080000000ULL
	 && distance <  0xFFFFFFFF80000000ULL) {
		// Far call
		MOV(64, R(RAX), Imm64((u64)func));
		CALLptr(R(RAX));
	} else {
		CALL(func);
	}
	ABI_RestoreStack(0);
}
Ejemplo n.º 8
0
void XEmitter::ABI_CallFunctionA(int bits, const void *func, const Gen::OpArg &arg1)
{
	if (!arg1.IsSimpleReg(ABI_PARAM1))
		MOV(bits, R(ABI_PARAM1), arg1);
	ABI_CallFunction(func);
}
Ejemplo n.º 9
0
void EmuCodeBlock::SafeLoadToEAX(const Gen::OpArg & opAddress, int accessSize, s32 offset, bool signExtend)
{
#if defined(_M_X64)
#ifdef ENABLE_MEM_CHECK
	if (!Core::g_CoreStartupParameter.bMMU && !Core::g_CoreStartupParameter.bEnableDebugging)
#else
	if (!Core::g_CoreStartupParameter.bMMU)
#endif
	{
		UnsafeLoadToEAX(opAddress, accessSize, offset, signExtend);
	}
	else
#endif
	{
		u32 mem_mask = Memory::ADDR_MASK_HW_ACCESS;
		if (Core::g_CoreStartupParameter.bMMU || Core::g_CoreStartupParameter.iTLBHack)
		{
			mem_mask |= Memory::ADDR_MASK_MEM1;
		}

#ifdef ENABLE_MEM_CHECK
		if (Core::g_CoreStartupParameter.bEnableDebugging)
		{
			mem_mask |= Memory::EXRAM_MASK;
		}
#endif

		if (opAddress.IsImm())
		{
			u32 address = (u32)opAddress.offset + offset;
			if ((address & mem_mask) == 0)
			{
				UnsafeLoadToEAX(opAddress, accessSize, offset, signExtend);
			}
			else
			{
				switch (accessSize)
				{
				case 32: ABI_CallFunctionC(thunks.ProtectFunction((void *)&Memory::Read_U32, 1), address); break;
				case 16: ABI_CallFunctionC(thunks.ProtectFunction((void *)&Memory::Read_U16_ZX, 1), address); break;
				case 8:  ABI_CallFunctionC(thunks.ProtectFunction((void *)&Memory::Read_U8_ZX, 1), address); break;
				}
				if (signExtend && accessSize < 32)
				{
					// Need to sign extend values coming from the Read_U* functions.
					MOVSX(32, accessSize, EAX, R(EAX));
				}
			}
		}
		else
		{
			if (offset)
			{
				MOV(32, R(EAX), opAddress);
				ADD(32, R(EAX), Imm32(offset));
				TEST(32, R(EAX), Imm32(mem_mask));
				FixupBranch fast = J_CC(CC_Z);

				switch (accessSize)
				{
				case 32: ABI_CallFunctionR(thunks.ProtectFunction((void *)&Memory::Read_U32, 1), EAX); break;
				case 16: ABI_CallFunctionR(thunks.ProtectFunction((void *)&Memory::Read_U16_ZX, 1), EAX); break;
				case 8:  ABI_CallFunctionR(thunks.ProtectFunction((void *)&Memory::Read_U8_ZX, 1), EAX);  break;
				}
				if (signExtend && accessSize < 32)
				{
					// Need to sign extend values coming from the Read_U* functions.
					MOVSX(32, accessSize, EAX, R(EAX));
				}

				FixupBranch exit = J();
				SetJumpTarget(fast);
				UnsafeLoadToEAX(R(EAX), accessSize, 0, signExtend);
				SetJumpTarget(exit);
			}
			else
			{
				TEST(32, opAddress, Imm32(mem_mask));
				FixupBranch fast = J_CC(CC_Z);

				switch (accessSize)
				{
				case 32: ABI_CallFunctionA(thunks.ProtectFunction((void *)&Memory::Read_U32, 1), opAddress); break;
				case 16: ABI_CallFunctionA(thunks.ProtectFunction((void *)&Memory::Read_U16_ZX, 1), opAddress); break;
				case 8:  ABI_CallFunctionA(thunks.ProtectFunction((void *)&Memory::Read_U8_ZX, 1), opAddress);  break;
				}
				if (signExtend && accessSize < 32)
				{
					// Need to sign extend values coming from the Read_U* functions.
					MOVSX(32, accessSize, EAX, R(EAX));
				}

				FixupBranch exit = J();
				SetJumpTarget(fast);
				UnsafeLoadToEAX(opAddress, accessSize, offset, signExtend);
				SetJumpTarget(exit);
			}
		}
	}
}