void VariableEventStream::reconstruct( CodeBlock* codeBlock, CodeOrigin codeOrigin, MinifiedGraph& graph, unsigned index, Operands<ValueRecovery>& valueRecoveries) const { ASSERT(codeBlock->jitType() == JITCode::DFGJIT); CodeBlock* baselineCodeBlock = codeBlock->baselineVersion(); unsigned numVariables; if (codeOrigin.inlineCallFrame) numVariables = baselineCodeBlockForInlineCallFrame(codeOrigin.inlineCallFrame)->m_numCalleeRegisters + VirtualRegister(codeOrigin.inlineCallFrame->stackOffset).toLocal() + 1; else numVariables = baselineCodeBlock->m_numCalleeRegisters; // Crazy special case: if we're at index == 0 then this must be an argument check // failure, in which case all variables are already set up. The recoveries should // reflect this. if (!index) { valueRecoveries = Operands<ValueRecovery>(codeBlock->numParameters(), numVariables); for (size_t i = 0; i < valueRecoveries.size(); ++i) { valueRecoveries[i] = ValueRecovery::displacedInJSStack( VirtualRegister(valueRecoveries.operandForIndex(i)), DataFormatJS); } return; } // Step 1: Find the last checkpoint, and figure out the number of virtual registers as we go. unsigned startIndex = index - 1; while (at(startIndex).kind() != Reset) startIndex--; // Step 2: Create a mock-up of the DFG's state and execute the events. Operands<ValueSource> operandSources(codeBlock->numParameters(), numVariables); for (unsigned i = operandSources.size(); i--;) operandSources[i] = ValueSource(SourceIsDead); HashMap<MinifiedID, MinifiedGenerationInfo> generationInfos; for (unsigned i = startIndex; i < index; ++i) { const VariableEvent& event = at(i); switch (event.kind()) { case Reset: // nothing to do. break; case BirthToFill: case BirthToSpill: case Birth: { MinifiedGenerationInfo info; info.update(event); generationInfos.add(event.id(), info); break; } case Fill: case Spill: case Death: { HashMap<MinifiedID, MinifiedGenerationInfo>::iterator iter = generationInfos.find(event.id()); ASSERT(iter != generationInfos.end()); iter->value.update(event); break; } case MovHintEvent: if (operandSources.hasOperand(event.bytecodeRegister())) operandSources.setOperand(event.bytecodeRegister(), ValueSource(event.id())); break; case SetLocalEvent: if (operandSources.hasOperand(event.bytecodeRegister())) operandSources.setOperand(event.bytecodeRegister(), ValueSource::forDataFormat(event.machineRegister(), event.dataFormat())); break; default: RELEASE_ASSERT_NOT_REACHED(); break; } } // Step 3: Compute value recoveries! valueRecoveries = Operands<ValueRecovery>(codeBlock->numParameters(), numVariables); for (unsigned i = 0; i < operandSources.size(); ++i) { ValueSource& source = operandSources[i]; if (source.isTriviallyRecoverable()) { valueRecoveries[i] = source.valueRecovery(); continue; } ASSERT(source.kind() == HaveNode); MinifiedNode* node = graph.at(source.id()); MinifiedGenerationInfo info = generationInfos.get(source.id()); if (!info.alive) { valueRecoveries[i] = ValueRecovery::constant(jsUndefined()); continue; } if (tryToSetConstantRecovery(valueRecoveries[i], node)) continue; ASSERT(info.format != DataFormatNone); if (info.filled) { if (info.format == DataFormatDouble) { valueRecoveries[i] = ValueRecovery::inFPR(info.u.fpr, DataFormatDouble); continue; } #if USE(JSVALUE32_64) if (info.format & DataFormatJS) { valueRecoveries[i] = ValueRecovery::inPair(info.u.pair.tagGPR, info.u.pair.payloadGPR); continue; } #endif valueRecoveries[i] = ValueRecovery::inGPR(info.u.gpr, info.format); continue; } valueRecoveries[i] = ValueRecovery::displacedInJSStack(static_cast<VirtualRegister>(info.u.virtualReg), info.format); } }
bool run() { // This enumerates the locals that we actually care about and packs them. So for example // if we use local 1, 3, 4, 5, 7, then we remap them: 1->0, 3->1, 4->2, 5->3, 7->4. We // treat a variable as being "used" if there exists an access to it (SetLocal, GetLocal, // Flush, PhantomLocal). BitVector usedLocals; // Collect those variables that are used from IR. bool hasNodesThatNeedFixup = false; for (BlockIndex blockIndex = m_graph.numBlocks(); blockIndex--;) { BasicBlock* block = m_graph.block(blockIndex); if (!block) continue; for (unsigned nodeIndex = block->size(); nodeIndex--;) { Node* node = block->at(nodeIndex); switch (node->op()) { case GetLocal: case SetLocal: case Flush: case PhantomLocal: { VariableAccessData* variable = node->variableAccessData(); if (variable->local().isArgument()) break; usedLocals.set(variable->local().toLocal()); break; } case GetLocalUnlinked: { VirtualRegister operand = node->unlinkedLocal(); if (operand.isArgument()) break; usedLocals.set(operand.toLocal()); hasNodesThatNeedFixup = true; break; } case LoadVarargs: case ForwardVarargs: { LoadVarargsData* data = node->loadVarargsData(); if (data->count.isLocal()) usedLocals.set(data->count.toLocal()); if (data->start.isLocal()) { // This part really relies on the contiguity of stack layout // assignments. ASSERT(VirtualRegister(data->start.offset() + data->limit - 1).isLocal()); for (unsigned i = data->limit; i--;) usedLocals.set(VirtualRegister(data->start.offset() + i).toLocal()); } // the else case shouldn't happen. hasNodesThatNeedFixup = true; break; } case PutStack: case GetStack: { StackAccessData* stack = node->stackAccessData(); if (stack->local.isArgument()) break; usedLocals.set(stack->local.toLocal()); break; } default: break; } } } for (InlineCallFrameSet::iterator iter = m_graph.m_plan.inlineCallFrames->begin(); !!iter; ++iter) { InlineCallFrame* inlineCallFrame = *iter; if (inlineCallFrame->isVarargs()) { usedLocals.set(VirtualRegister( JSStack::ArgumentCount + inlineCallFrame->stackOffset).toLocal()); } for (unsigned argument = inlineCallFrame->arguments.size(); argument-- > 1;) { usedLocals.set(VirtualRegister( virtualRegisterForArgument(argument).offset() + inlineCallFrame->stackOffset).toLocal()); } } Vector<unsigned> allocation(usedLocals.size()); m_graph.m_nextMachineLocal = 0; for (unsigned i = 0; i < usedLocals.size(); ++i) { if (!usedLocals.get(i)) { allocation[i] = UINT_MAX; continue; } allocation[i] = m_graph.m_nextMachineLocal++; } for (unsigned i = m_graph.m_variableAccessData.size(); i--;) { VariableAccessData* variable = &m_graph.m_variableAccessData[i]; if (!variable->isRoot()) continue; if (variable->local().isArgument()) { variable->machineLocal() = variable->local(); continue; } size_t local = variable->local().toLocal(); if (local >= allocation.size()) continue; if (allocation[local] == UINT_MAX) continue; variable->machineLocal() = assign(allocation, variable->local()); } for (StackAccessData* data : m_graph.m_stackAccessData) { if (!data->local.isLocal()) { data->machineLocal = data->local; continue; } if (static_cast<size_t>(data->local.toLocal()) >= allocation.size()) continue; if (allocation[data->local.toLocal()] == UINT_MAX) continue; data->machineLocal = assign(allocation, data->local); } // This register is never valid for DFG code blocks. codeBlock()->setActivationRegister(VirtualRegister()); if (LIKELY(!m_graph.hasDebuggerEnabled())) codeBlock()->setScopeRegister(VirtualRegister()); else codeBlock()->setScopeRegister(assign(allocation, codeBlock()->scopeRegister())); for (unsigned i = m_graph.m_inlineVariableData.size(); i--;) { InlineVariableData data = m_graph.m_inlineVariableData[i]; InlineCallFrame* inlineCallFrame = data.inlineCallFrame; if (inlineCallFrame->isVarargs()) { inlineCallFrame->argumentCountRegister = assign( allocation, VirtualRegister(inlineCallFrame->stackOffset + JSStack::ArgumentCount)); } for (unsigned argument = inlineCallFrame->arguments.size(); argument-- > 1;) { ArgumentPosition& position = m_graph.m_argumentPositions[ data.argumentPositionStart + argument]; VariableAccessData* variable = position.someVariable(); ValueSource source; if (!variable) source = ValueSource(SourceIsDead); else { source = ValueSource::forFlushFormat( variable->machineLocal(), variable->flushFormat()); } inlineCallFrame->arguments[argument] = source.valueRecovery(); } RELEASE_ASSERT(inlineCallFrame->isClosureCall == !!data.calleeVariable); if (inlineCallFrame->isClosureCall) { VariableAccessData* variable = data.calleeVariable->find(); ValueSource source = ValueSource::forFlushFormat( variable->machineLocal(), variable->flushFormat()); inlineCallFrame->calleeRecovery = source.valueRecovery(); } else RELEASE_ASSERT(inlineCallFrame->calleeRecovery.isConstant()); } // Fix GetLocalUnlinked's variable references. if (hasNodesThatNeedFixup) { for (BlockIndex blockIndex = m_graph.numBlocks(); blockIndex--;) { BasicBlock* block = m_graph.block(blockIndex); if (!block) continue; for (unsigned nodeIndex = block->size(); nodeIndex--;) { Node* node = block->at(nodeIndex); switch (node->op()) { case GetLocalUnlinked: { node->setUnlinkedMachineLocal(assign(allocation, node->unlinkedLocal())); break; } case LoadVarargs: case ForwardVarargs: { LoadVarargsData* data = node->loadVarargsData(); data->machineCount = assign(allocation, data->count); data->machineStart = assign(allocation, data->start); break; } default: break; } } } } return true; }
void VariableEventStream::reconstruct( CodeBlock* codeBlock, CodeOrigin codeOrigin, MinifiedGraph& graph, unsigned index, Operands<ValueRecovery>& valueRecoveries) const { ASSERT(codeBlock->getJITType() == JITCode::DFGJIT); CodeBlock* baselineCodeBlock = codeBlock->baselineVersion(); unsigned numVariables; if (codeOrigin.inlineCallFrame) numVariables = baselineCodeBlockForInlineCallFrame(codeOrigin.inlineCallFrame)->m_numCalleeRegisters + codeOrigin.inlineCallFrame->stackOffset; else numVariables = baselineCodeBlock->m_numCalleeRegisters; // Crazy special case: if we're at index == 0 then this must be an argument check // failure, in which case all variables are already set up. The recoveries should // reflect this. if (!index) { valueRecoveries = Operands<ValueRecovery>(codeBlock->numParameters(), numVariables); for (size_t i = 0; i < valueRecoveries.size(); ++i) valueRecoveries[i] = ValueRecovery::alreadyInJSStack(); return; } // Step 1: Find the last checkpoint, and figure out the number of virtual registers as we go. unsigned startIndex = index - 1; while (at(startIndex).kind() != Reset) startIndex--; #if DFG_ENABLE(DEBUG_VERBOSE) dataLogF("Computing OSR exit recoveries starting at seq#%u.\n", startIndex); #endif // Step 2: Create a mock-up of the DFG's state and execute the events. Operands<ValueSource> operandSources(codeBlock->numParameters(), numVariables); Vector<MinifiedGenerationInfo, 32> generationInfos(graph.originalGraphSize()); for (unsigned i = startIndex; i < index; ++i) { const VariableEvent& event = at(i); switch (event.kind()) { case Reset: // nothing to do. break; case BirthToFill: case BirthToSpill: case Fill: case Spill: case Death: generationInfos[event.nodeIndex()].update(event); break; case MovHint: if (operandSources.hasOperand(event.operand())) operandSources.setOperand(event.operand(), ValueSource(event.nodeIndex())); break; case SetLocalEvent: if (operandSources.hasOperand(event.operand())) operandSources.setOperand(event.operand(), ValueSource::forDataFormat(event.dataFormat())); break; default: RELEASE_ASSERT_NOT_REACHED(); break; } } // Step 3: Record the things that are live, so we can get to them more quickly. Vector<unsigned, 16> indicesOfLiveThings; for (unsigned i = 0; i < generationInfos.size(); ++i) { if (generationInfos[i].format != DataFormatNone) indicesOfLiveThings.append(i); } // Step 4: Compute value recoveries! valueRecoveries = Operands<ValueRecovery>(codeBlock->numParameters(), numVariables); for (unsigned i = 0; i < operandSources.size(); ++i) { ValueSource& source = operandSources[i]; if (source.isTriviallyRecoverable()) { valueRecoveries[i] = source.valueRecovery(); continue; } ASSERT(source.kind() == HaveNode); MinifiedNode* node = graph.at(source.nodeIndex()); if (node) { if (node->hasConstantNumber()) { valueRecoveries[i] = ValueRecovery::constant( codeBlock->constantRegister( FirstConstantRegisterIndex + node->constantNumber()).get()); continue; } if (node->hasWeakConstant()) { valueRecoveries[i] = ValueRecovery::constant(node->weakConstant()); continue; } if (node->op() == PhantomArguments) { valueRecoveries[i] = ValueRecovery::argumentsThatWereNotCreated(); continue; } } MinifiedGenerationInfo* info = &generationInfos[source.nodeIndex()]; if (info->format == DataFormatNone) { // Try to see if there is an alternate node that would contain the value we want. // There are four possibilities: // // Int32ToDouble: We can use this in place of the original node, but // we'd rather not; so we use it only if it is the only remaining // live version. // // ValueToInt32: If the only remaining live version of the value is // ValueToInt32, then we can use it. // // UInt32ToNumber: If the only live version of the value is a UInt32ToNumber // then the only remaining uses are ones that want a properly formed number // rather than a UInt32 intermediate. // // DoubleAsInt32: Same as UInt32ToNumber. // // The reverse of the above: This node could be a UInt32ToNumber, but its // alternative is still alive. This means that the only remaining uses of // the number would be fine with a UInt32 intermediate. bool found = false; if (node && node->op() == UInt32ToNumber) { NodeIndex nodeIndex = node->child1(); node = graph.at(nodeIndex); info = &generationInfos[nodeIndex]; if (info->format != DataFormatNone) found = true; } if (!found) { NodeIndex int32ToDoubleIndex = NoNode; NodeIndex valueToInt32Index = NoNode; NodeIndex uint32ToNumberIndex = NoNode; NodeIndex doubleAsInt32Index = NoNode; for (unsigned i = 0; i < indicesOfLiveThings.size(); ++i) { NodeIndex nodeIndex = indicesOfLiveThings[i]; node = graph.at(nodeIndex); if (!node) continue; if (!node->hasChild1()) continue; if (node->child1() != source.nodeIndex()) continue; ASSERT(generationInfos[nodeIndex].format != DataFormatNone); switch (node->op()) { case Int32ToDouble: int32ToDoubleIndex = nodeIndex; break; case ValueToInt32: valueToInt32Index = nodeIndex; break; case UInt32ToNumber: uint32ToNumberIndex = nodeIndex; break; case DoubleAsInt32: doubleAsInt32Index = nodeIndex; break; default: break; } } NodeIndex nodeIndexToUse; if (doubleAsInt32Index != NoNode) nodeIndexToUse = doubleAsInt32Index; else if (int32ToDoubleIndex != NoNode) nodeIndexToUse = int32ToDoubleIndex; else if (valueToInt32Index != NoNode) nodeIndexToUse = valueToInt32Index; else if (uint32ToNumberIndex != NoNode) nodeIndexToUse = uint32ToNumberIndex; else nodeIndexToUse = NoNode; if (nodeIndexToUse != NoNode) { info = &generationInfos[nodeIndexToUse]; ASSERT(info->format != DataFormatNone); found = true; } } if (!found) { valueRecoveries[i] = ValueRecovery::constant(jsUndefined()); continue; } } ASSERT(info->format != DataFormatNone); if (info->filled) { if (info->format == DataFormatDouble) { valueRecoveries[i] = ValueRecovery::inFPR(info->u.fpr); continue; } #if USE(JSVALUE32_64) if (info->format & DataFormatJS) { valueRecoveries[i] = ValueRecovery::inPair(info->u.pair.tagGPR, info->u.pair.payloadGPR); continue; } #endif valueRecoveries[i] = ValueRecovery::inGPR(info->u.gpr, info->format); continue; } valueRecoveries[i] = ValueRecovery::displacedInJSStack(static_cast<VirtualRegister>(info->u.virtualReg), info->format); } // Step 5: Make sure that for locals that coincide with true call frame headers, the exit compiler knows // that those values don't have to be recovered. Signal this by using ValueRecovery::alreadyInJSStack() for (InlineCallFrame* inlineCallFrame = codeOrigin.inlineCallFrame; inlineCallFrame; inlineCallFrame = inlineCallFrame->caller.inlineCallFrame) { for (unsigned i = JSStack::CallFrameHeaderSize; i--;) valueRecoveries.setLocal(inlineCallFrame->stackOffset - i - 1, ValueRecovery::alreadyInJSStack()); } }
bool run() { SharedSymbolTable* symbolTable = codeBlock()->symbolTable(); // This enumerates the locals that we actually care about and packs them. So for example // if we use local 1, 3, 4, 5, 7, then we remap them: 1->0, 3->1, 4->2, 5->3, 7->4. We // treat a variable as being "used" if there exists an access to it (SetLocal, GetLocal, // Flush, PhantomLocal). BitVector usedLocals; // Collect those variables that are used from IR. bool hasGetLocalUnlinked = false; for (BlockIndex blockIndex = m_graph.numBlocks(); blockIndex--;) { BasicBlock* block = m_graph.block(blockIndex); if (!block) continue; for (unsigned nodeIndex = block->size(); nodeIndex--;) { Node* node = block->at(nodeIndex); switch (node->op()) { case GetLocal: case SetLocal: case Flush: case PhantomLocal: { VariableAccessData* variable = node->variableAccessData(); if (variable->local().isArgument()) break; usedLocals.set(variable->local().toLocal()); break; } case GetLocalUnlinked: { VirtualRegister operand = node->unlinkedLocal(); if (operand.isArgument()) break; usedLocals.set(operand.toLocal()); hasGetLocalUnlinked = true; break; } default: break; } } } // Ensure that captured variables and captured inline arguments are pinned down. // They should have been because of flushes, except that the flushes can be optimized // away. if (symbolTable) { for (int i = symbolTable->captureStart(); i > symbolTable->captureEnd(); i--) usedLocals.set(VirtualRegister(i).toLocal()); } if (codeBlock()->usesArguments()) { usedLocals.set(codeBlock()->argumentsRegister().toLocal()); usedLocals.set(unmodifiedArgumentsRegister(codeBlock()->argumentsRegister()).toLocal()); } if (codeBlock()->uncheckedActivationRegister().isValid()) usedLocals.set(codeBlock()->activationRegister().toLocal()); for (InlineCallFrameSet::iterator iter = m_graph.m_inlineCallFrames->begin(); !!iter; ++iter) { InlineCallFrame* inlineCallFrame = *iter; if (!inlineCallFrame->executable->usesArguments()) continue; VirtualRegister argumentsRegister = m_graph.argumentsRegisterFor(inlineCallFrame); usedLocals.set(argumentsRegister.toLocal()); usedLocals.set(unmodifiedArgumentsRegister(argumentsRegister).toLocal()); for (unsigned argument = inlineCallFrame->arguments.size(); argument-- > 1;) { usedLocals.set(VirtualRegister( virtualRegisterForArgument(argument).offset() + inlineCallFrame->stackOffset).toLocal()); } } Vector<unsigned> allocation(usedLocals.size()); m_graph.m_nextMachineLocal = 0; for (unsigned i = 0; i < usedLocals.size(); ++i) { if (!usedLocals.get(i)) { allocation[i] = UINT_MAX; continue; } allocation[i] = m_graph.m_nextMachineLocal++; } for (unsigned i = m_graph.m_variableAccessData.size(); i--;) { VariableAccessData* variable = &m_graph.m_variableAccessData[i]; if (!variable->isRoot()) continue; if (variable->local().isArgument()) { variable->machineLocal() = variable->local(); continue; } size_t local = variable->local().toLocal(); if (local >= allocation.size()) continue; if (allocation[local] == UINT_MAX) continue; variable->machineLocal() = virtualRegisterForLocal( allocation[variable->local().toLocal()]); } if (codeBlock()->usesArguments()) { VirtualRegister argumentsRegister = virtualRegisterForLocal( allocation[codeBlock()->argumentsRegister().toLocal()]); RELEASE_ASSERT( virtualRegisterForLocal(allocation[ unmodifiedArgumentsRegister( codeBlock()->argumentsRegister()).toLocal()]) == unmodifiedArgumentsRegister(argumentsRegister)); codeBlock()->setArgumentsRegister(argumentsRegister); } if (codeBlock()->uncheckedActivationRegister().isValid()) { codeBlock()->setActivationRegister( virtualRegisterForLocal(allocation[codeBlock()->activationRegister().toLocal()])); } for (unsigned i = m_graph.m_inlineVariableData.size(); i--;) { InlineVariableData data = m_graph.m_inlineVariableData[i]; InlineCallFrame* inlineCallFrame = data.inlineCallFrame; if (inlineCallFrame->executable->usesArguments()) { inlineCallFrame->argumentsRegister = virtualRegisterForLocal( allocation[m_graph.argumentsRegisterFor(inlineCallFrame).toLocal()]); RELEASE_ASSERT( virtualRegisterForLocal(allocation[unmodifiedArgumentsRegister( m_graph.argumentsRegisterFor(inlineCallFrame)).toLocal()]) == unmodifiedArgumentsRegister(inlineCallFrame->argumentsRegister)); } for (unsigned argument = inlineCallFrame->arguments.size(); argument-- > 1;) { ArgumentPosition& position = m_graph.m_argumentPositions[ data.argumentPositionStart + argument]; VariableAccessData* variable = position.someVariable(); ValueSource source; if (!variable) source = ValueSource(SourceIsDead); else { source = ValueSource::forFlushFormat( variable->machineLocal(), variable->flushFormat()); } inlineCallFrame->arguments[argument] = source.valueRecovery(); } RELEASE_ASSERT(inlineCallFrame->isClosureCall == !!data.calleeVariable); if (inlineCallFrame->isClosureCall) { ValueSource source = ValueSource::forFlushFormat( data.calleeVariable->machineLocal(), data.calleeVariable->flushFormat()); inlineCallFrame->calleeRecovery = source.valueRecovery(); } else RELEASE_ASSERT(inlineCallFrame->calleeRecovery.isConstant()); } if (symbolTable) { if (symbolTable->captureCount()) { unsigned captureStartLocal = allocation[ VirtualRegister(codeBlock()->symbolTable()->captureStart()).toLocal()]; ASSERT(captureStartLocal != UINT_MAX); m_graph.m_machineCaptureStart = virtualRegisterForLocal(captureStartLocal).offset(); } else m_graph.m_machineCaptureStart = virtualRegisterForLocal(0).offset(); // This is an abomination. If we had captured an argument then the argument ends // up being "slow", meaning that loads of the argument go through an extra lookup // table. if (const SlowArgument* slowArguments = symbolTable->slowArguments()) { auto newSlowArguments = std::make_unique<SlowArgument[]>( symbolTable->parameterCount()); for (size_t i = symbolTable->parameterCount(); i--;) { newSlowArguments[i] = slowArguments[i]; VirtualRegister reg = VirtualRegister(slowArguments[i].index); if (reg.isLocal()) newSlowArguments[i].index = virtualRegisterForLocal(allocation[reg.toLocal()]).offset(); } m_graph.m_slowArguments = std::move(newSlowArguments); } } // Fix GetLocalUnlinked's variable references. if (hasGetLocalUnlinked) { for (BlockIndex blockIndex = m_graph.numBlocks(); blockIndex--;) { BasicBlock* block = m_graph.block(blockIndex); if (!block) continue; for (unsigned nodeIndex = block->size(); nodeIndex--;) { Node* node = block->at(nodeIndex); switch (node->op()) { case GetLocalUnlinked: { VirtualRegister operand = node->unlinkedLocal(); if (operand.isLocal()) operand = virtualRegisterForLocal(allocation[operand.toLocal()]); node->setUnlinkedMachineLocal(operand); break; } default: break; } } } } return true; }