static void registerClobberCheck(AssemblyHelpers& jit, RegisterSet dontClobber) { if (!Options::clobberAllRegsInFTLICSlowPath()) return; RegisterSet clobber = RegisterSet::allRegisters(); clobber.exclude(RegisterSet::reservedHardwareRegisters()); clobber.exclude(RegisterSet::stackRegisters()); clobber.exclude(RegisterSet::calleeSaveRegisters()); clobber.exclude(dontClobber); GPRReg someGPR; for (Reg reg = Reg::first(); reg <= Reg::last(); reg = reg.next()) { if (!clobber.get(reg) || !reg.isGPR()) continue; jit.move(AssemblyHelpers::TrustedImm32(0x1337beef), reg.gpr()); someGPR = reg.gpr(); } for (Reg reg = Reg::first(); reg <= Reg::last(); reg = reg.next()) { if (!clobber.get(reg) || !reg.isFPR()) continue; jit.move64ToDouble(someGPR, reg.fpr()); } }
void AssemblyHelpers::emitStoreStructureWithTypeInfo(AssemblyHelpers& jit, TrustedImmPtr structure, RegisterID dest) { const Structure* structurePtr = static_cast<const Structure*>(structure.m_value); #if USE(JSVALUE64) jit.store64(TrustedImm64(structurePtr->idBlob()), MacroAssembler::Address(dest, JSCell::structureIDOffset())); if (!ASSERT_DISABLED) { Jump correctStructure = jit.branch32(Equal, MacroAssembler::Address(dest, JSCell::structureIDOffset()), TrustedImm32(structurePtr->id())); jit.abortWithReason(AHStructureIDIsValid); correctStructure.link(&jit); Jump correctIndexingType = jit.branch8(Equal, MacroAssembler::Address(dest, JSCell::indexingTypeOffset()), TrustedImm32(structurePtr->indexingType())); jit.abortWithReason(AHIndexingTypeIsValid); correctIndexingType.link(&jit); Jump correctType = jit.branch8(Equal, MacroAssembler::Address(dest, JSCell::typeInfoTypeOffset()), TrustedImm32(structurePtr->typeInfo().type())); jit.abortWithReason(AHTypeInfoIsValid); correctType.link(&jit); Jump correctFlags = jit.branch8(Equal, MacroAssembler::Address(dest, JSCell::typeInfoFlagsOffset()), TrustedImm32(structurePtr->typeInfo().inlineTypeFlags())); jit.abortWithReason(AHTypeInfoInlineTypeFlagsAreValid); correctFlags.link(&jit); } #else // Do a 32-bit wide store to initialize the cell's fields. jit.store32(TrustedImm32(structurePtr->objectInitializationBlob()), MacroAssembler::Address(dest, JSCell::indexingTypeOffset())); jit.storePtr(structure, MacroAssembler::Address(dest, JSCell::structureIDOffset())); #endif }
void reboxAccordingToFormat( ValueFormat format, AssemblyHelpers& jit, GPRReg value, GPRReg scratch1, GPRReg scratch2) { switch (format) { case ValueFormatInt32: { jit.zeroExtend32ToPtr(value, value); jit.or64(GPRInfo::tagTypeNumberRegister, value); break; } case ValueFormatUInt32: { jit.zeroExtend32ToPtr(value, value); jit.moveDoubleTo64(FPRInfo::fpRegT0, scratch2); jit.boxInt52(value, value, scratch1, FPRInfo::fpRegT0); jit.move64ToDouble(scratch2, FPRInfo::fpRegT0); break; } case ValueFormatInt52: { jit.rshift64(AssemblyHelpers::TrustedImm32(JSValue::int52ShiftAmount), value); jit.moveDoubleTo64(FPRInfo::fpRegT0, scratch2); jit.boxInt52(value, value, scratch1, FPRInfo::fpRegT0); jit.move64ToDouble(scratch2, FPRInfo::fpRegT0); break; } case ValueFormatStrictInt52: { jit.moveDoubleTo64(FPRInfo::fpRegT0, scratch2); jit.boxInt52(value, value, scratch1, FPRInfo::fpRegT0); jit.move64ToDouble(scratch2, FPRInfo::fpRegT0); break; } case ValueFormatBoolean: { jit.zeroExtend32ToPtr(value, value); jit.or32(MacroAssembler::TrustedImm32(ValueFalse), value); break; } case ValueFormatJSValue: { // Done already! break; } case ValueFormatDouble: { jit.moveDoubleTo64(FPRInfo::fpRegT0, scratch1); jit.move64ToDouble(value, FPRInfo::fpRegT0); jit.boxDouble(FPRInfo::fpRegT0, value); jit.move64ToDouble(scratch1, FPRInfo::fpRegT0); break; } default: RELEASE_ASSERT_NOT_REACHED(); break; } }
void ValueRep::emitRestore(AssemblyHelpers& jit, Reg reg) const { if (reg.isGPR()) { switch (kind()) { case Register: if (isGPR()) jit.move(gpr(), reg.gpr()); else jit.moveDoubleTo64(fpr(), reg.gpr()); break; case Stack: jit.load64(AssemblyHelpers::Address(GPRInfo::callFrameRegister, offsetFromFP()), reg.gpr()); break; case Constant: jit.move(AssemblyHelpers::TrustedImm64(value()), reg.gpr()); break; default: RELEASE_ASSERT_NOT_REACHED(); break; } return; } switch (kind()) { case Register: if (isGPR()) jit.move64ToDouble(gpr(), reg.fpr()); else jit.moveDouble(fpr(), reg.fpr()); break; case Stack: jit.loadDouble(AssemblyHelpers::Address(GPRInfo::callFrameRegister, offsetFromFP()), reg.fpr()); break; case Constant: jit.move(AssemblyHelpers::TrustedImm64(value()), jit.scratchRegister()); jit.move64ToDouble(jit.scratchRegister(), reg.fpr()); break; default: RELEASE_ASSERT_NOT_REACHED(); break; } }
static void generateRegisterRestoration(AssemblyHelpers& jit) { #if ENABLE(FTL_JIT) RegisterSet toSave = registersToPreserve(); ptrdiff_t offset = registerPreservationOffset(); ASSERT(!toSave.get(GPRInfo::regT4)); // We need to place the stack pointer back to where the caller thought they left it. // But also, in order to recover the registers, we need to figure out how big the // arguments area is. jit.load32( AssemblyHelpers::Address( AssemblyHelpers::stackPointerRegister, (JSStack::ArgumentCount - JSStack::CallerFrameAndPCSize) * sizeof(Register) + PayloadOffset), GPRInfo::regT4); jit.move(GPRInfo::regT4, GPRInfo::regT2); jit.lshift32(AssemblyHelpers::TrustedImm32(3), GPRInfo::regT2); jit.addPtr(AssemblyHelpers::TrustedImm32(offset), AssemblyHelpers::stackPointerRegister); jit.addPtr(AssemblyHelpers::stackPointerRegister, GPRInfo::regT2); // We saved things at: // // adjSP + (JSStack::CallFrameHeaderSize - JSStack::CallerFrameAndPCSize + NumArgs) * 8 // // Where: // // adjSP = origSP - offset // // regT2 now points at: // // origSP + NumArgs * 8 // = adjSP + offset + NumArgs * 8 // // So if we subtract offset and then add JSStack::CallFrameHeaderSize and subtract // JSStack::CallerFrameAndPCSize, we'll get the thing we want. ptrdiff_t currentOffset = -offset + sizeof(Register) * ( JSStack::CallFrameHeaderSize - JSStack::CallerFrameAndPCSize); jit.loadPtr(AssemblyHelpers::Address(GPRInfo::regT2, currentOffset), GPRInfo::regT1); for (GPRReg gpr = AssemblyHelpers::firstRegister(); gpr <= AssemblyHelpers::lastRegister(); gpr = static_cast<GPRReg>(gpr + 1)) { if (!toSave.get(gpr)) continue; currentOffset += sizeof(Register); jit.load64(AssemblyHelpers::Address(GPRInfo::regT2, currentOffset), gpr); } // Thunks like this rely on the ArgumentCount being intact. Pay it forward. jit.store32( GPRInfo::regT4, AssemblyHelpers::Address( AssemblyHelpers::stackPointerRegister, (JSStack::ArgumentCount - JSStack::CallerFrameAndPCSize) * sizeof(Register) + PayloadOffset)); if (!ASSERT_DISABLED) { AssemblyHelpers::Jump ok = jit.branchPtr( AssemblyHelpers::Above, GPRInfo::regT1, AssemblyHelpers::TrustedImmPtr(static_cast<size_t>(0x1000))); jit.breakpoint(); ok.link(&jit); } jit.jump(GPRInfo::regT1); #else // ENABLE(FTL_JIT) UNUSED_PARAM(jit); UNREACHABLE_FOR_PLATFORM(); #endif // ENABLE(FTL_JIT) }