void Jit::SetCCAndR0ForSafeAddress(MIPSGPReg rs, s16 offset, ARMReg tempReg, bool reverse) { SetR0ToEffectiveAddress(rs, offset); // There are three valid ranges. Each one gets a bit. const u32 BIT_SCRATCH = 1, BIT_RAM = 2, BIT_VRAM = 4; MOVI2R(tempReg, BIT_SCRATCH | BIT_RAM | BIT_VRAM); CMP(R0, AssumeMakeOperand2(PSP_GetScratchpadMemoryBase())); SetCC(CC_LO); BIC(tempReg, tempReg, BIT_SCRATCH); SetCC(CC_HS); CMP(R0, AssumeMakeOperand2(PSP_GetScratchpadMemoryEnd())); BIC(tempReg, tempReg, BIT_SCRATCH); // If it was in that range, later compares don't matter. CMP(R0, AssumeMakeOperand2(PSP_GetVidMemBase())); SetCC(CC_LO); BIC(tempReg, tempReg, BIT_VRAM); SetCC(CC_HS); CMP(R0, AssumeMakeOperand2(PSP_GetVidMemEnd())); BIC(tempReg, tempReg, BIT_VRAM); CMP(R0, AssumeMakeOperand2(PSP_GetKernelMemoryBase())); SetCC(CC_LO); BIC(tempReg, tempReg, BIT_RAM); SetCC(CC_HS); CMP(R0, AssumeMakeOperand2(PSP_GetUserMemoryEnd())); BIC(tempReg, tempReg, BIT_RAM); // If we left any bit set, the address is OK. SetCC(CC_AL); CMP(tempReg, 0); SetCC(reverse ? CC_EQ : CC_GT); }
void Jit::Comp_FPULS(u32 op) { CONDITIONAL_DISABLE; s32 offset = (s16)(op & 0xFFFF); int ft = _FT; int rs = _RS; // u32 addr = R(rs) + offset; // logBlocks = 1; bool doCheck = false; switch(op >> 26) { case 49: //FI(ft) = Memory::Read_U32(addr); break; //lwc1 fpr.MapReg(ft, MAP_NOINIT | MAP_DIRTY); if (gpr.IsImm(rs)) { u32 addr = (offset + gpr.GetImm(rs)) & 0x3FFFFFFF; MOVI2R(R0, addr + (u32)Memory::base); } else { gpr.MapReg(rs); if (g_Config.bFastMemory) { SetR0ToEffectiveAddress(rs, offset); } else { SetCCAndR0ForSafeAddress(rs, offset, R1); doCheck = true; } ADD(R0, R0, R11); } VLDR(fpr.R(ft), R0, 0); if (doCheck) { SetCC(CC_EQ); MOVI2R(R0, 0); VMOV(fpr.R(ft), R0); SetCC(CC_AL); } break; case 57: //Memory::Write_U32(FI(ft), addr); break; //swc1 fpr.MapReg(ft); if (gpr.IsImm(rs)) { u32 addr = (offset + gpr.GetImm(rs)) & 0x3FFFFFFF; MOVI2R(R0, addr + (u32)Memory::base); } else { gpr.MapReg(rs); if (g_Config.bFastMemory) { SetR0ToEffectiveAddress(rs, offset); } else { SetCCAndR0ForSafeAddress(rs, offset, R1); doCheck = true; } ADD(R0, R0, R11); } VSTR(fpr.R(ft), R0, 0); if (doCheck) { SetCC(CC_AL); } break; default: Comp_Generic(op); return; } }
void Jit::Comp_SVQ(MIPSOpcode op) { CONDITIONAL_DISABLE; int imm = (signed short)(op&0xFFFC); int vt = (((op >> 16) & 0x1f)) | ((op&1) << 5); MIPSGPReg rs = _RS; bool doCheck = false; switch (op >> 26) { case 54: //lv.q { // CC might be set by slow path below, so load regs first. u8 vregs[4]; GetVectorRegs(vregs, V_Quad, vt); fpr.MapRegsAndSpillLockV(vregs, V_Quad, MAP_DIRTY | MAP_NOINIT); if (gpr.IsImm(rs)) { u32 addr = (imm + gpr.GetImm(rs)) & 0x3FFFFFFF; MOVI2R(R0, addr + (u32)Memory::base); } else { gpr.MapReg(rs); if (g_Config.bFastMemory) { SetR0ToEffectiveAddress(rs, imm); } else { SetCCAndR0ForSafeAddress(rs, imm, R1); doCheck = true; } ADD(R0, R0, R11); } #ifdef __ARM_ARCH_7S__ FixupBranch skip; if (doCheck) { skip = B_CC(CC_EQ); } for (int i = 0; i < 4; i++) VLDR(fpr.V(vregs[i]), R0, i * 4); if (doCheck) { SetJumpTarget(skip); SetCC(CC_AL); } #else for (int i = 0; i < 4; i++) VLDR(fpr.V(vregs[i]), R0, i * 4); if (doCheck) { SetCC(CC_EQ); MOVI2R(R0, 0); for (int i = 0; i < 4; i++) VMOV(fpr.V(vregs[i]), R0); SetCC(CC_AL); } #endif } break; case 62: //sv.q { // CC might be set by slow path below, so load regs first. u8 vregs[4]; GetVectorRegs(vregs, V_Quad, vt); fpr.MapRegsAndSpillLockV(vregs, V_Quad, 0); if (gpr.IsImm(rs)) { u32 addr = (imm + gpr.GetImm(rs)) & 0x3FFFFFFF; MOVI2R(R0, addr + (u32)Memory::base); } else { gpr.MapReg(rs); if (g_Config.bFastMemory) { SetR0ToEffectiveAddress(rs, imm); } else { SetCCAndR0ForSafeAddress(rs, imm, R1); doCheck = true; } ADD(R0, R0, R11); } #ifdef __ARM_ARCH_7S__ FixupBranch skip; if (doCheck) { skip = B_CC(CC_EQ); } for (int i = 0; i < 4; i++) VSTR(fpr.V(vregs[i]), R0, i * 4); if (doCheck) { SetJumpTarget(skip); SetCC(CC_AL); } #else for (int i = 0; i < 4; i++) VSTR(fpr.V(vregs[i]), R0, i * 4); if (doCheck) { SetCC(CC_AL); } #endif } break; default: DISABLE; break; } fpr.ReleaseSpillLocksAndDiscardTemps(); }
void Jit::Comp_SV(MIPSOpcode op) { CONDITIONAL_DISABLE; s32 imm = (signed short)(op&0xFFFC); int vt = ((op >> 16) & 0x1f) | ((op & 3) << 5); MIPSGPReg rs = _RS; bool doCheck = false; switch (op >> 26) { case 50: //lv.s // VI(vt) = Memory::Read_U32(addr); { // CC might be set by slow path below, so load regs first. fpr.MapRegV(vt, MAP_DIRTY | MAP_NOINIT); if (gpr.IsImm(rs)) { u32 addr = (imm + gpr.GetImm(rs)) & 0x3FFFFFFF; MOVI2R(R0, addr + (u32)Memory::base); } else { gpr.MapReg(rs); if (g_Config.bFastMemory) { SetR0ToEffectiveAddress(rs, imm); } else { SetCCAndR0ForSafeAddress(rs, imm, R1); doCheck = true; } ADD(R0, R0, R11); } #ifdef __ARM_ARCH_7S__ FixupBranch skip; if (doCheck) { skip = B_CC(CC_EQ); } VLDR(fpr.V(vt), R0, 0); if (doCheck) { SetJumpTarget(skip); SetCC(CC_AL); } #else VLDR(fpr.V(vt), R0, 0); if (doCheck) { SetCC(CC_EQ); MOVI2F(fpr.V(vt), 0.0f, R0); SetCC(CC_AL); } #endif } break; case 58: //sv.s // Memory::Write_U32(VI(vt), addr); { // CC might be set by slow path below, so load regs first. fpr.MapRegV(vt); if (gpr.IsImm(rs)) { u32 addr = (imm + gpr.GetImm(rs)) & 0x3FFFFFFF; MOVI2R(R0, addr + (u32)Memory::base); } else { gpr.MapReg(rs); if (g_Config.bFastMemory) { SetR0ToEffectiveAddress(rs, imm); } else { SetCCAndR0ForSafeAddress(rs, imm, R1); doCheck = true; } ADD(R0, R0, R11); } #ifdef __ARM_ARCH_7S__ FixupBranch skip; if (doCheck) { skip = B_CC(CC_EQ); } VSTR(fpr.V(vt), R0, 0); if (doCheck) { SetJumpTarget(skip); SetCC(CC_AL); } #else VSTR(fpr.V(vt), R0, 0); if (doCheck) { SetCC(CC_AL); } #endif } break; default: DISABLE; } }
void Jit::Comp_SV(u32 op) { CONDITIONAL_DISABLE; s32 imm = (signed short)(op&0xFFFC); int vt = ((op >> 16) & 0x1f) | ((op & 3) << 5); int rs = _RS; bool doCheck = false; switch (op >> 26) { case 50: //lv.s // VI(vt) = Memory::Read_U32(addr); { // CC might be set by slow path below, so load regs first. fpr.MapRegV(vt, MAP_DIRTY | MAP_NOINIT); fpr.ReleaseSpillLocks(); if (gpr.IsImm(rs)) { u32 addr = (imm + gpr.GetImm(rs)) & 0x3FFFFFFF; MOVI2R(R0, addr + (u32)Memory::base); } else { gpr.MapReg(rs); if (g_Config.bFastMemory) { SetR0ToEffectiveAddress(rs, imm); } else { SetCCAndR0ForSafeAddress(rs, imm, R1); doCheck = true; } ADD(R0, R0, R11); } VLDR(fpr.V(vt), R0, 0); if (doCheck) { SetCC(CC_EQ); MOVI2R(R0, 0); VMOV(fpr.V(vt), R0); SetCC(CC_AL); } } break; case 58: //sv.s // Memory::Write_U32(VI(vt), addr); { // CC might be set by slow path below, so load regs first. fpr.MapRegV(vt); fpr.ReleaseSpillLocks(); if (gpr.IsImm(rs)) { u32 addr = (imm + gpr.GetImm(rs)) & 0x3FFFFFFF; MOVI2R(R0, addr + (u32)Memory::base); } else { gpr.MapReg(rs); if (g_Config.bFastMemory) { SetR0ToEffectiveAddress(rs, imm); } else { SetCCAndR0ForSafeAddress(rs, imm, R1); doCheck = true; } ADD(R0, R0, R11); } VSTR(fpr.V(vt), R0, 0); if (doCheck) { SetCC(CC_AL); } } break; default: DISABLE; } }