JITByIdGenerator::JITByIdGenerator( CodeBlock* codeBlock, CodeOrigin codeOrigin, CallSiteIndex callSite, AccessType accessType, const RegisterSet& usedRegisters, JSValueRegs base, JSValueRegs value) : JITInlineCacheGenerator(codeBlock, codeOrigin, callSite, accessType) , m_base(base) , m_value(value) { m_stubInfo->patch.usedRegisters = usedRegisters; m_stubInfo->patch.baseGPR = static_cast<int8_t>(base.payloadGPR()); m_stubInfo->patch.valueGPR = static_cast<int8_t>(value.payloadGPR()); #if USE(JSVALUE32_64) m_stubInfo->patch.baseTagGPR = static_cast<int8_t>(base.tagGPR()); m_stubInfo->patch.valueTagGPR = static_cast<int8_t>(value.tagGPR()); #endif }
JITGetByIdGenerator::JITGetByIdGenerator( CodeBlock* codeBlock, CodeOrigin codeOrigin, CallSiteIndex callSite, const RegisterSet& usedRegisters, JSValueRegs base, JSValueRegs value, SpillRegistersMode spillMode) : JITByIdGenerator( codeBlock, codeOrigin, callSite, AccessType::Get, usedRegisters, base, value, spillMode) { RELEASE_ASSERT(base.payloadGPR() != value.tagGPR()); }
JITGetByIdGenerator::JITGetByIdGenerator( CodeBlock* codeBlock, CodeOrigin codeOrigin, CallSiteIndex callSite, const RegisterSet& usedRegisters, UniquedStringImpl* propertyName, JSValueRegs base, JSValueRegs value, AccessType accessType) : JITByIdGenerator(codeBlock, codeOrigin, callSite, accessType, usedRegisters, base, value) , m_isLengthAccess(propertyName == codeBlock->vm()->propertyNames->length.impl()) { RELEASE_ASSERT(base.payloadGPR() != value.tagGPR()); }
JITByIdGenerator::JITByIdGenerator( CodeBlock* codeBlock, CodeOrigin codeOrigin, const RegisterSet& usedRegisters, JSValueRegs base, JSValueRegs value, bool registersFlushed) : JITInlineCacheGenerator(codeBlock, codeOrigin) , m_base(base) , m_value(value) { m_stubInfo->patch.registersFlushed = registersFlushed; m_stubInfo->patch.usedRegisters = usedRegisters; // This is a convenience - in cases where the only registers you're using are base/value, // it allows you to pass RegisterSet() as the usedRegisters argument. m_stubInfo->patch.usedRegisters.set(base); m_stubInfo->patch.usedRegisters.set(value); m_stubInfo->patch.baseGPR = static_cast<int8_t>(base.payloadGPR()); m_stubInfo->patch.valueGPR = static_cast<int8_t>(value.payloadGPR()); #if USE(JSVALUE32_64) m_stubInfo->patch.valueTagGPR = static_cast<int8_t>(value.tagGPR()); #endif }
JITByIdGenerator::JITByIdGenerator( CodeBlock* codeBlock, CodeOrigin codeOrigin, CallSiteIndex callSite, AccessType accessType, const RegisterSet& usedRegisters, JSValueRegs base, JSValueRegs value, SpillRegistersMode spillMode) : JITInlineCacheGenerator(codeBlock, codeOrigin, callSite, accessType) , m_base(base) , m_value(value) { m_stubInfo->patch.spillMode = spillMode; m_stubInfo->patch.usedRegisters = usedRegisters; // This is a convenience - in cases where the only registers you're using are base/value, // it allows you to pass RegisterSet() as the usedRegisters argument. m_stubInfo->patch.usedRegisters.set(base); m_stubInfo->patch.usedRegisters.set(value); m_stubInfo->patch.baseGPR = static_cast<int8_t>(base.payloadGPR()); m_stubInfo->patch.valueGPR = static_cast<int8_t>(value.payloadGPR()); #if USE(JSVALUE32_64) m_stubInfo->patch.valueTagGPR = static_cast<int8_t>(value.tagGPR()); #endif }
void ScratchRegisterAllocator::lock(JSValueRegs regs) { lock(regs.tagGPR()); lock(regs.payloadGPR()); }
void CallFrameShuffler::emitLoad(CachedRecovery& location) { if (!location.recovery().isInJSStack()) return; if (verbose) dataLog(" * Loading ", location.recovery(), " into "); VirtualRegister reg { location.recovery().virtualRegister() }; MacroAssembler::Address address { addressForOld(reg) }; bool tryFPR { true }; JSValueRegs wantedJSValueRegs { location.wantedJSValueRegs() }; if (wantedJSValueRegs) { if (wantedJSValueRegs.payloadGPR() != InvalidGPRReg && !m_registers[wantedJSValueRegs.payloadGPR()] && !m_lockedRegisters.get(wantedJSValueRegs.payloadGPR())) tryFPR = false; if (wantedJSValueRegs.tagGPR() != InvalidGPRReg && !m_registers[wantedJSValueRegs.tagGPR()] && !m_lockedRegisters.get(wantedJSValueRegs.tagGPR())) tryFPR = false; } if (tryFPR && location.loadsIntoFPR()) { FPRReg resultFPR = location.wantedFPR(); if (resultFPR == InvalidFPRReg || m_registers[resultFPR] || m_lockedRegisters.get(resultFPR)) resultFPR = getFreeFPR(); if (resultFPR != InvalidFPRReg) { m_jit.loadDouble(address, resultFPR); DataFormat dataFormat = DataFormatJS; if (location.recovery().dataFormat() == DataFormatDouble) dataFormat = DataFormatDouble; updateRecovery(location, ValueRecovery::inFPR(resultFPR, dataFormat)); if (verbose) dataLog(location.recovery(), "\n"); if (reg == newAsOld(dangerFrontier())) updateDangerFrontier(); return; } } if (location.loadsIntoGPR()) { GPRReg resultGPR { wantedJSValueRegs.payloadGPR() }; if (resultGPR == InvalidGPRReg || m_registers[resultGPR] || m_lockedRegisters.get(resultGPR)) resultGPR = getFreeGPR(); ASSERT(resultGPR != InvalidGPRReg); m_jit.loadPtr(address.withOffset(PayloadOffset), resultGPR); updateRecovery(location, ValueRecovery::inGPR(resultGPR, location.recovery().dataFormat())); if (verbose) dataLog(location.recovery(), "\n"); if (reg == newAsOld(dangerFrontier())) updateDangerFrontier(); return; } ASSERT(location.recovery().technique() == DisplacedInJSStack); GPRReg payloadGPR { wantedJSValueRegs.payloadGPR() }; GPRReg tagGPR { wantedJSValueRegs.tagGPR() }; if (payloadGPR == InvalidGPRReg || m_registers[payloadGPR] || m_lockedRegisters.get(payloadGPR)) payloadGPR = getFreeGPR(); m_lockedRegisters.set(payloadGPR); if (tagGPR == InvalidGPRReg || m_registers[tagGPR] || m_lockedRegisters.get(tagGPR)) tagGPR = getFreeGPR(); m_lockedRegisters.clear(payloadGPR); ASSERT(payloadGPR != InvalidGPRReg && tagGPR != InvalidGPRReg && tagGPR != payloadGPR); m_jit.loadPtr(address.withOffset(PayloadOffset), payloadGPR); m_jit.loadPtr(address.withOffset(TagOffset), tagGPR); updateRecovery(location, ValueRecovery::inPair(tagGPR, payloadGPR)); if (verbose) dataLog(location.recovery(), "\n"); if (reg == newAsOld(dangerFrontier())) updateDangerFrontier(); }
void CallFrameShuffler::emitDisplace(CachedRecovery& location) { ASSERT(location.recovery().isInRegisters()); JSValueRegs wantedJSValueRegs { location.wantedJSValueRegs() }; ASSERT(wantedJSValueRegs); // We don't support wanted FPRs on 32bit platforms GPRReg wantedTagGPR { wantedJSValueRegs.tagGPR() }; GPRReg wantedPayloadGPR { wantedJSValueRegs.payloadGPR() }; if (wantedTagGPR != InvalidGPRReg) { ASSERT(!m_lockedRegisters.get(wantedTagGPR)); if (CachedRecovery* currentTag { m_registers[wantedTagGPR] }) { if (currentTag == &location) { if (verbose) dataLog(" + ", wantedTagGPR, " is OK\n"); } else { // This can never happen on 32bit platforms since we // have at most one wanted JSValueRegs, for the // callee, and no callee-save registers. RELEASE_ASSERT_NOT_REACHED(); } } } if (wantedPayloadGPR != InvalidGPRReg) { ASSERT(!m_lockedRegisters.get(wantedPayloadGPR)); if (CachedRecovery* currentPayload { m_registers[wantedPayloadGPR] }) { if (currentPayload == &location) { if (verbose) dataLog(" + ", wantedPayloadGPR, " is OK\n"); } else { // See above RELEASE_ASSERT_NOT_REACHED(); } } } if (location.recovery().technique() == InPair || location.recovery().isInGPR()) { GPRReg payloadGPR; if (location.recovery().technique() == InPair) payloadGPR = location.recovery().payloadGPR(); else payloadGPR = location.recovery().gpr(); if (wantedPayloadGPR == InvalidGPRReg) wantedPayloadGPR = payloadGPR; if (payloadGPR != wantedPayloadGPR) { if (location.recovery().technique() == InPair && wantedPayloadGPR == location.recovery().tagGPR()) { if (verbose) dataLog(" * Swapping ", payloadGPR, " and ", wantedPayloadGPR, "\n"); m_jit.swap(payloadGPR, wantedPayloadGPR); updateRecovery(location, ValueRecovery::inPair(payloadGPR, wantedPayloadGPR)); } else { if (verbose) dataLog(" * Moving ", payloadGPR, " into ", wantedPayloadGPR, "\n"); m_jit.move(payloadGPR, wantedPayloadGPR); if (location.recovery().technique() == InPair) { updateRecovery(location, ValueRecovery::inPair(location.recovery().tagGPR(), wantedPayloadGPR)); } else { updateRecovery(location, ValueRecovery::inGPR(wantedPayloadGPR, location.recovery().dataFormat())); } } } if (wantedTagGPR == InvalidGPRReg) wantedTagGPR = getFreeGPR(); switch (location.recovery().dataFormat()) { case DataFormatInt32: if (verbose) dataLog(" * Moving int32 tag into ", wantedTagGPR, "\n"); m_jit.move(MacroAssembler::TrustedImm32(JSValue::Int32Tag), wantedTagGPR); break; case DataFormatCell: if (verbose) dataLog(" * Moving cell tag into ", wantedTagGPR, "\n"); m_jit.move(MacroAssembler::TrustedImm32(JSValue::CellTag), wantedTagGPR); break; case DataFormatBoolean: if (verbose) dataLog(" * Moving boolean tag into ", wantedTagGPR, "\n"); m_jit.move(MacroAssembler::TrustedImm32(JSValue::BooleanTag), wantedTagGPR); break; case DataFormatJS: ASSERT(wantedTagGPR != location.recovery().payloadGPR()); if (wantedTagGPR != location.recovery().tagGPR()) { if (verbose) dataLog(" * Moving ", location.recovery().tagGPR(), " into ", wantedTagGPR, "\n"); m_jit.move(location.recovery().tagGPR(), wantedTagGPR); } break; default: RELEASE_ASSERT_NOT_REACHED(); } } else { ASSERT(location.recovery().isInFPR()); if (wantedTagGPR == InvalidGPRReg) { ASSERT(wantedPayloadGPR != InvalidGPRReg); m_lockedRegisters.set(wantedPayloadGPR); wantedTagGPR = getFreeGPR(); m_lockedRegisters.clear(wantedPayloadGPR); } if (wantedPayloadGPR == InvalidGPRReg) { m_lockedRegisters.set(wantedTagGPR); wantedPayloadGPR = getFreeGPR(); m_lockedRegisters.clear(wantedTagGPR); } m_jit.boxDouble(location.recovery().fpr(), wantedTagGPR, wantedPayloadGPR); } updateRecovery(location, ValueRecovery::inPair(wantedTagGPR, wantedPayloadGPR)); }