bool reachedTranslationLimit(TransKind kind, SrcKey sk, const SrcRec& srcRec) { const auto numTrans = srcRec.translations().size(); // Optimized translations perform this check at relocation time to avoid // invalidating all of their SrcKeys early. if (kind == TransKind::Optimize) return false; if ((kind == TransKind::Profile && numTrans != RuntimeOption::EvalJitMaxProfileTranslations) || (kind != TransKind::Profile && numTrans != RuntimeOption::EvalJitMaxTranslations)) { return false; } INC_TPC(max_trans); if (debug && Trace::moduleEnabled(Trace::mcg, 2)) { const auto& tns = srcRec.translations(); TRACE(1, "Too many (%zd) translations: %s, BC offset %d\n", tns.size(), sk.unit()->filepath()->data(), sk.offset()); SKTRACE(2, sk, "{\n"); TCA topTrans = srcRec.getTopTranslation(); for (size_t i = 0; i < tns.size(); ++i) { auto const rec = transdb::getTransRec(tns[i].mainStart()); assertx(rec); SKTRACE(2, sk, "%zd %p\n", i, tns[i].mainStart()); if (tns[i].mainStart() == topTrans) { SKTRACE(2, sk, "%zd: *Top*\n", i); } if (rec->kind == TransKind::Anchor) { SKTRACE(2, sk, "%zd: Anchor\n", i); } else { SKTRACE(2, sk, "%zd: guards {\n", i); for (unsigned j = 0; j < rec->guards.size(); ++j) { FTRACE(2, "{}\n", rec->guards[j]); } SKTRACE(2, sk, "%zd } guards\n", i); } } SKTRACE(2, sk, "} /* Too many translations */\n"); } return true; }
/* * Get location metadata for the inputs of `ni'. */ InputInfoVec getInputs(NormalizedInstruction& ni, FPInvOffset bcSPOff) { InputInfoVec inputs; if (isAlwaysNop(ni.op())) return inputs; always_assert_flog( instrInfo.count(ni.op()), "Invalid opcode in getInputsImpl: {}\n", opcodeToName(ni.op()) ); UNUSED auto const sk = ni.source; auto const& info = instrInfo[ni.op()]; auto const flags = info.in; auto stackOff = bcSPOff; if (flags & FStack) { stackOff -= ni.imm[0].u_IVA; // arguments consumed stackOff -= kNumActRecCells; // ActRec is torn down as well } if (flags & FuncdRef) inputs.needsRefCheck = true; if (flags & IgnoreInnerType) ni.ignoreInnerType = true; if (flags & Stack1) { SKTRACE(1, sk, "getInputs: Stack1 %d\n", stackOff.offset); inputs.emplace_back(Location::Stack { stackOff-- }); if (flags & DontGuardStack1) inputs.back().dontGuard = true; if (flags & Stack2) { SKTRACE(1, sk, "getInputs: Stack2 %d\n", stackOff.offset); inputs.emplace_back(Location::Stack { stackOff-- }); if (flags & Stack3) { SKTRACE(1, sk, "getInputs: Stack3 %d\n", stackOff.offset); inputs.emplace_back(Location::Stack { stackOff-- }); } } } if (flags & StackI) { inputs.emplace_back(Location::Stack { BCSPRelOffset{ni.imm[0].u_IVA}.to<FPInvOffset>(bcSPOff) }); } if (flags & StackN) { int numArgs = (ni.op() == Op::NewPackedArray || ni.op() == Op::NewVecArray || ni.op() == Op::ConcatN) ? ni.imm[0].u_IVA : ni.immVec.numStackValues(); SKTRACE(1, sk, "getInputs: StackN %d %d\n", stackOff.offset, numArgs); for (int i = 0; i < numArgs; i++) { inputs.emplace_back(Location::Stack { stackOff-- }); inputs.back().dontGuard = true; inputs.back().dontBreak = true; } } if (flags & BStackN) { int numArgs = ni.imm[0].u_IVA; SKTRACE(1, sk, "getInputs: BStackN %d %d\n", stackOff.offset, numArgs); for (int i = 0; i < numArgs; i++) { inputs.emplace_back(Location::Stack { stackOff-- }); } } if (flags & Local) { // (Almost) all instructions that take a Local have its index at their // first immediate. auto const loc = ni.imm[localImmIdx(ni.op())].u_IVA; SKTRACE(1, sk, "getInputs: local %d\n", loc); inputs.emplace_back(Location::Local { uint32_t(loc) }); } if (flags & AllLocals) ni.ignoreInnerType = true; if (flags & MKey) { auto mk = ni.imm[memberKeyImmIdx(ni.op())].u_KA; switch (mk.mcode) { case MEL: case MPL: inputs.emplace_back(Location::Local { uint32_t(mk.iva) }); break; case MEC: case MPC: inputs.emplace_back(Location::Stack { BCSPRelOffset{int32_t(mk.iva)}.to<FPInvOffset>(bcSPOff) }); break; case MW: case MEI: case MET: case MPT: case MQT: // The inputs vector is only used for deciding when to break the // tracelet, which can never happen for these cases. break; } } SKTRACE(1, sk, "stack args: virtual sfo now %d\n", stackOff.offset); TRACE(1, "%s\n", Trace::prettyNode("Inputs", inputs).c_str()); if ((flags & DontGuardAny) || dontGuardAnyInputs(ni.op())) { for (auto& info : inputs) info.dontGuard = true; } return inputs; }
/* * getInputs -- * Returns locations for this instruction's inputs. */ InputInfoVec getInputs(NormalizedInstruction& ni) { InputInfoVec inputs; auto UNUSED sk = ni.source; if (isAlwaysNop(ni.op())) return inputs; assertx(inputs.empty()); always_assert_flog( instrInfo.count(ni.op()), "Invalid opcode in getInputsImpl: {}\n", opcodeToName(ni.op()) ); const InstrInfo& info = instrInfo[ni.op()]; Operands input = info.in; BCSPOffset spOff{0}; if (input & FuncdRef) { inputs.needsRefCheck = true; } if (input & Iter) { inputs.emplace_back(Location(Location::Iter, ni.imm[0].u_IVA)); } if (input & FStack) { spOff += ni.imm[0].u_IVA; // arguments consumed spOff += kNumActRecCells; // ActRec is torn down as well } if (input & IgnoreInnerType) ni.ignoreInnerType = true; if (input & Stack1) { SKTRACE(1, sk, "getInputs: stack1 %d\n", spOff.offset); inputs.emplace_back(Location(spOff++)); if (input & DontGuardStack1) inputs.back().dontGuard = true; if (input & Stack2) { SKTRACE(1, sk, "getInputs: stack2 %d\n", spOff.offset); inputs.emplace_back(Location(spOff++)); if (input & Stack3) { SKTRACE(1, sk, "getInputs: stack3 %d\n", spOff.offset); inputs.emplace_back(Location(spOff++)); } } } if (input & StackI) { inputs.emplace_back(Location(BCSPOffset{ni.imm[0].u_IVA})); } if (input & StackN) { int numArgs = (ni.op() == Op::NewPackedArray || ni.op() == Op::ConcatN) ? ni.imm[0].u_IVA : ni.immVec.numStackValues(); SKTRACE(1, sk, "getInputs: stackN %d %d\n", spOff.offset, numArgs); for (int i = 0; i < numArgs; i++) { inputs.emplace_back(Location(spOff++)); inputs.back().dontGuard = true; inputs.back().dontBreak = true; } } if (input & BStackN) { int numArgs = ni.imm[0].u_IVA; SKTRACE(1, sk, "getInputs: BStackN %d %d\n", spOff.offset, numArgs); for (int i = 0; i < numArgs; i++) { inputs.emplace_back(Location(spOff++)); } } if (input & Local) { // (Almost) all instructions that take a Local have its index at // their first immediate. auto const loc = ni.imm[localImmIdx(ni.op())].u_IVA; SKTRACE(1, sk, "getInputs: local %d\n", loc); inputs.emplace_back(Location(Location::Local, loc)); } if (input & MKey) { auto mk = ni.imm[memberKeyImmIdx(ni.op())].u_KA; switch (mk.mcode) { case MEL: case MPL: inputs.emplace_back(Location(Location::Local, mk.iva)); break; case MEC: case MPC: inputs.emplace_back(Location(BCSPOffset{int32_t(mk.iva)})); break; case MW: case MEI: case MET: case MPT: case MQT: // The inputs vector is only used for deciding when to break the // tracelet, which can never happen for these cases. break; } } if (input & AllLocals) { ni.ignoreInnerType = true; } SKTRACE(1, sk, "stack args: virtual sfo now %d\n", spOff.offset); TRACE(1, "%s\n", Trace::prettyNode("Inputs", inputs).c_str()); if (inputs.size() && ((input & DontGuardAny) || dontGuardAnyInputs(ni.op()))) { for (int i = inputs.size(); i--; ) { inputs[i].dontGuard = true; } } if (input & This) { inputs.emplace_back(Location(Location::This)); } return inputs; }