JSValue CallFrame::uncheckedActivation() const { CodeBlock* codeBlock = this->codeBlock(); RELEASE_ASSERT(codeBlock->needsActivation()); VirtualRegister activationRegister = codeBlock->activationRegister(); return registers()[activationRegister.offset()].jsValue(); }
void CallFrame::setActivation(JSLexicalEnvironment* lexicalEnvironment) { CodeBlock* codeBlock = this->codeBlock(); RELEASE_ASSERT(codeBlock->needsActivation()); VirtualRegister activationRegister = codeBlock->activationRegister(); registers()[activationRegister.offset()] = lexicalEnvironment; }
JSLexicalEnvironment* CallFrame::lexicalEnvironment() const { CodeBlock* codeBlock = this->codeBlock(); RELEASE_ASSERT(codeBlock->needsActivation()); VirtualRegister activationRegister = codeBlock->activationRegister(); return registers()[activationRegister.offset()].Register::lexicalEnvironment(); }
EncodedJSValue JSLexicalEnvironment::argumentsGetter(ExecState*, JSObject* slotBase, EncodedJSValue, PropertyName) { JSLexicalEnvironment* lexicalEnvironment = jsCast<JSLexicalEnvironment*>(slotBase); CallFrame* callFrame = CallFrame::create(reinterpret_cast<Register*>(lexicalEnvironment->m_registers)); return JSValue::encode(jsUndefined()); VirtualRegister argumentsRegister = callFrame->codeBlock()->argumentsRegister(); if (JSValue arguments = callFrame->uncheckedR(argumentsRegister.offset()).jsValue()) return JSValue::encode(arguments); int realArgumentsRegister = unmodifiedArgumentsRegister(argumentsRegister).offset(); JSValue arguments = JSValue(Arguments::create(callFrame->vm(), callFrame)); callFrame->uncheckedR(argumentsRegister.offset()) = arguments; callFrame->uncheckedR(realArgumentsRegister) = arguments; ASSERT(callFrame->uncheckedR(realArgumentsRegister).jsValue().inherits(Arguments::info())); return JSValue::encode(callFrame->uncheckedR(realArgumentsRegister).jsValue()); }
EncodedJSValue JSActivation::argumentsGetter(ExecState*, EncodedJSValue slotBase, EncodedJSValue, PropertyName) { JSActivation* activation = jsCast<JSActivation*>(JSValue::decode(slotBase)); CallFrame* callFrame = CallFrame::create(reinterpret_cast<Register*>(activation->m_registers)); ASSERT(!activation->isTornOff() && (callFrame->codeBlock()->usesArguments() || callFrame->codeBlock()->usesEval())); if (activation->isTornOff() || !(callFrame->codeBlock()->usesArguments() || callFrame->codeBlock()->usesEval())) return JSValue::encode(jsUndefined()); VirtualRegister argumentsRegister = callFrame->codeBlock()->argumentsRegister(); if (JSValue arguments = callFrame->uncheckedR(argumentsRegister.offset()).jsValue()) return JSValue::encode(arguments); int realArgumentsRegister = unmodifiedArgumentsRegister(argumentsRegister).offset(); JSValue arguments = JSValue(Arguments::create(callFrame->vm(), callFrame)); callFrame->uncheckedR(argumentsRegister.offset()) = arguments; callFrame->uncheckedR(realArgumentsRegister) = arguments; ASSERT(callFrame->uncheckedR(realArgumentsRegister).jsValue().inherits(Arguments::info())); return JSValue::encode(callFrame->uncheckedR(realArgumentsRegister).jsValue()); }
void emitSetupVarargsFrameFastCase(CCallHelpers& jit, GPRReg numUsedSlotsGPR, GPRReg scratchGPR1, GPRReg scratchGPR2, GPRReg scratchGPR3, ValueRecovery argCountRecovery, VirtualRegister firstArgumentReg, unsigned firstVarArgOffset, CCallHelpers::JumpList& slowCase) { CCallHelpers::JumpList end; if (argCountRecovery.isConstant()) { // FIXME: We could constant-fold a lot of the computation below in this case. // https://bugs.webkit.org/show_bug.cgi?id=141486 jit.move(CCallHelpers::TrustedImm32(argCountRecovery.constant().asInt32()), scratchGPR1); } else jit.load32(CCallHelpers::payloadFor(argCountRecovery.virtualRegister()), scratchGPR1); if (firstVarArgOffset) { CCallHelpers::Jump sufficientArguments = jit.branch32(CCallHelpers::GreaterThan, scratchGPR1, CCallHelpers::TrustedImm32(firstVarArgOffset + 1)); jit.move(CCallHelpers::TrustedImm32(1), scratchGPR1); CCallHelpers::Jump endVarArgs = jit.jump(); sufficientArguments.link(&jit); jit.sub32(CCallHelpers::TrustedImm32(firstVarArgOffset), scratchGPR1); endVarArgs.link(&jit); } slowCase.append(jit.branch32(CCallHelpers::Above, scratchGPR1, CCallHelpers::TrustedImm32(maxArguments + 1))); emitSetVarargsFrame(jit, scratchGPR1, true, numUsedSlotsGPR, scratchGPR2); slowCase.append(jit.branchPtr(CCallHelpers::Above, CCallHelpers::AbsoluteAddress(jit.vm()->addressOfStackLimit()), scratchGPR2)); // Initialize ArgumentCount. jit.store32(scratchGPR1, CCallHelpers::Address(scratchGPR2, JSStack::ArgumentCount * static_cast<int>(sizeof(Register)) + PayloadOffset)); // Copy arguments. jit.signExtend32ToPtr(scratchGPR1, scratchGPR1); CCallHelpers::Jump done = jit.branchSubPtr(CCallHelpers::Zero, CCallHelpers::TrustedImm32(1), scratchGPR1); // scratchGPR1: argumentCount CCallHelpers::Label copyLoop = jit.label(); int argOffset = (firstArgumentReg.offset() - 1 + firstVarArgOffset) * static_cast<int>(sizeof(Register)); #if USE(JSVALUE64) jit.load64(CCallHelpers::BaseIndex(GPRInfo::callFrameRegister, scratchGPR1, CCallHelpers::TimesEight, argOffset), scratchGPR3); jit.store64(scratchGPR3, CCallHelpers::BaseIndex(scratchGPR2, scratchGPR1, CCallHelpers::TimesEight, CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register)))); #else // USE(JSVALUE64), so this begins the 32-bit case jit.load32(CCallHelpers::BaseIndex(GPRInfo::callFrameRegister, scratchGPR1, CCallHelpers::TimesEight, argOffset + TagOffset), scratchGPR3); jit.store32(scratchGPR3, CCallHelpers::BaseIndex(scratchGPR2, scratchGPR1, CCallHelpers::TimesEight, CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register)) + TagOffset)); jit.load32(CCallHelpers::BaseIndex(GPRInfo::callFrameRegister, scratchGPR1, CCallHelpers::TimesEight, argOffset + PayloadOffset), scratchGPR3); jit.store32(scratchGPR3, CCallHelpers::BaseIndex(scratchGPR2, scratchGPR1, CCallHelpers::TimesEight, CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register)) + PayloadOffset)); #endif // USE(JSVALUE64), end of 32-bit case jit.branchSubPtr(CCallHelpers::NonZero, CCallHelpers::TrustedImm32(1), scratchGPR1).linkTo(copyLoop, &jit); done.link(&jit); }
void BytecodeGeneratorification::run() { // We calculate the liveness at each merge point. This gives us the information which registers should be saved and resumed conservatively. { GeneratorLivenessAnalysis pass(*this); pass.run(); } UnlinkedCodeBlock* codeBlock = m_graph.codeBlock(); BytecodeRewriter rewriter(m_graph); // Setup the global switch for the generator. { unsigned nextToEnterPoint = enterPoint() + opcodeLength(op_enter); unsigned switchTableIndex = m_graph.codeBlock()->numberOfSwitchJumpTables(); VirtualRegister state = virtualRegisterForArgument(static_cast<int32_t>(JSGeneratorFunction::GeneratorArgument::State)); auto& jumpTable = m_graph.codeBlock()->addSwitchJumpTable(); jumpTable.min = 0; jumpTable.branchOffsets.resize(m_yields.size() + 1); jumpTable.branchOffsets.fill(0); jumpTable.add(0, nextToEnterPoint); for (unsigned i = 0; i < m_yields.size(); ++i) jumpTable.add(i + 1, m_yields[i].point); rewriter.insertFragmentBefore(nextToEnterPoint, [&](BytecodeRewriter::Fragment& fragment) { fragment.appendInstruction(op_switch_imm, switchTableIndex, nextToEnterPoint, state.offset()); }); } for (const YieldData& data : m_yields) { VirtualRegister scope = virtualRegisterForArgument(static_cast<int32_t>(JSGeneratorFunction::GeneratorArgument::Frame)); // Emit save sequence. rewriter.insertFragmentBefore(data.point, [&](BytecodeRewriter::Fragment& fragment) { data.liveness.forEachSetBit([&](size_t index) { VirtualRegister operand = virtualRegisterForLocal(index); Storage storage = storageForGeneratorLocal(index); fragment.appendInstruction( op_put_to_scope, scope.offset(), // scope storage.identifierIndex, // identifier operand.offset(), // value GetPutInfo(DoNotThrowIfNotFound, LocalClosureVar, InitializationMode::NotInitialization).operand(), // info m_generatorFrameSymbolTableIndex, // symbol table constant index storage.scopeOffset.offset() // scope offset ); }); // Insert op_ret just after save sequence. fragment.appendInstruction(op_ret, data.argument); }); // Emit resume sequence. rewriter.insertFragmentAfter(data.point, [&](BytecodeRewriter::Fragment& fragment) { data.liveness.forEachSetBit([&](size_t index) { VirtualRegister operand = virtualRegisterForLocal(index); Storage storage = storageForGeneratorLocal(index); UnlinkedValueProfile profile = codeBlock->addValueProfile(); fragment.appendInstruction( op_get_from_scope, operand.offset(), // dst scope.offset(), // scope storage.identifierIndex, // identifier GetPutInfo(DoNotThrowIfNotFound, LocalClosureVar, InitializationMode::NotInitialization).operand(), // info 0, // local scope depth storage.scopeOffset.offset(), // scope offset profile // profile ); }); }); // Clip the unnecessary bytecodes. rewriter.removeBytecode(data.point); } rewriter.execute(); }
void handleBlockForTryCatch(BasicBlock* block, InsertionSet& insertionSet) { HandlerInfo* currentExceptionHandler = nullptr; FastBitVector liveAtCatchHead; liveAtCatchHead.resize(m_graph.block(0)->variablesAtTail.numberOfLocals()); HandlerInfo* cachedHandlerResult; CodeOrigin cachedCodeOrigin; auto catchHandler = [&] (CodeOrigin origin) -> HandlerInfo* { ASSERT(origin); if (origin == cachedCodeOrigin) return cachedHandlerResult; unsigned bytecodeIndexToCheck = origin.bytecodeIndex; cachedCodeOrigin = origin; while (1) { InlineCallFrame* inlineCallFrame = origin.inlineCallFrame; CodeBlock* codeBlock = m_graph.baselineCodeBlockFor(inlineCallFrame); if (HandlerInfo* handler = codeBlock->handlerForBytecodeOffset(bytecodeIndexToCheck)) { liveAtCatchHead.clearAll(); unsigned catchBytecodeIndex = handler->target; m_graph.forAllLocalsLiveInBytecode(CodeOrigin(catchBytecodeIndex, inlineCallFrame), [&] (VirtualRegister operand) { liveAtCatchHead[operand.toLocal()] = true; }); cachedHandlerResult = handler; break; } if (!inlineCallFrame) { cachedHandlerResult = nullptr; break; } bytecodeIndexToCheck = inlineCallFrame->directCaller.bytecodeIndex; origin = inlineCallFrame->directCaller; } return cachedHandlerResult; }; Operands<VariableAccessData*> currentBlockAccessData(block->variablesAtTail.numberOfArguments(), block->variablesAtTail.numberOfLocals(), nullptr); HashSet<InlineCallFrame*> seenInlineCallFrames; auto flushEverything = [&] (NodeOrigin origin, unsigned index) { RELEASE_ASSERT(currentExceptionHandler); auto flush = [&] (VirtualRegister operand, bool alwaysInsert) { if ((operand.isLocal() && liveAtCatchHead[operand.toLocal()]) || operand.isArgument() || alwaysInsert) { ASSERT(isValidFlushLocation(block, index, operand)); VariableAccessData* accessData = currentBlockAccessData.operand(operand); if (!accessData) accessData = newVariableAccessData(operand); currentBlockAccessData.operand(operand) = accessData; insertionSet.insertNode(index, SpecNone, Flush, origin, OpInfo(accessData)); } }; for (unsigned local = 0; local < block->variablesAtTail.numberOfLocals(); local++) flush(virtualRegisterForLocal(local), false); for (InlineCallFrame* inlineCallFrame : seenInlineCallFrames) flush(VirtualRegister(inlineCallFrame->stackOffset + CallFrame::thisArgumentOffset()), true); flush(VirtualRegister(CallFrame::thisArgumentOffset()), true); seenInlineCallFrames.clear(); }; for (unsigned nodeIndex = 0; nodeIndex < block->size(); nodeIndex++) { Node* node = block->at(nodeIndex); { HandlerInfo* newHandler = catchHandler(node->origin.semantic); if (newHandler != currentExceptionHandler && currentExceptionHandler) flushEverything(node->origin, nodeIndex); currentExceptionHandler = newHandler; } if (currentExceptionHandler && (node->op() == SetLocal || node->op() == SetArgument)) { InlineCallFrame* inlineCallFrame = node->origin.semantic.inlineCallFrame; if (inlineCallFrame) seenInlineCallFrames.add(inlineCallFrame); VirtualRegister operand = node->local(); int stackOffset = inlineCallFrame ? inlineCallFrame->stackOffset : 0; if ((operand.isLocal() && liveAtCatchHead[operand.toLocal()]) || operand.isArgument() || (operand.offset() == stackOffset + CallFrame::thisArgumentOffset())) { ASSERT(isValidFlushLocation(block, nodeIndex, operand)); VariableAccessData* variableAccessData = currentBlockAccessData.operand(operand); if (!variableAccessData) variableAccessData = newVariableAccessData(operand); insertionSet.insertNode(nodeIndex, SpecNone, Flush, node->origin, OpInfo(variableAccessData)); } } if (node->accessesStack(m_graph)) currentBlockAccessData.operand(node->local()) = node->variableAccessData(); } if (currentExceptionHandler) { NodeOrigin origin = block->at(block->size() - 1)->origin; flushEverything(origin, block->size()); } }