void interpOne(IRGS& env, folly::Optional<Type> outType, int popped, int pushed, InterpOneData& idata) { auto const unit = curUnit(env); spillStack(env); env.irb->exceptionStackBoundary(); auto const op = unit->getOpcode(bcOff(env)); auto& iInfo = getInstrInfo(op); if (iInfo.type == jit::InstrFlags::OutFDesc) { env.fpiStack.push(FPIInfo { sp(env), env.irb->spOffset(), nullptr }); } else if (isFCallStar(op) && !env.fpiStack.empty()) { env.fpiStack.pop(); } idata.bcOff = bcOff(env); idata.cellsPopped = popped; idata.cellsPushed = pushed; idata.opcode = op; gen( env, opcodeChangesPC(idata.opcode) ? InterpOneCF : InterpOne, outType, idata, sp(env), fp(env) ); assertx(env.irb->stackDeficit() == 0); }
static void recordActRecPush(const SrcKey sk, const StringData* name, const StringData* clsName, bool staticCall) { auto unit = sk.unit(); FTRACE(2, "annotation: recordActRecPush: {}@{} {}{}{} ({}static)\n", unit->filepath()->data(), sk.offset(), clsName ? clsName->data() : "", clsName ? "::" : "", name, !staticCall ? "non" : ""); SrcKey next(sk); next.advance(unit); const FPIEnt *fpi = sk.func()->findFPI(next.offset()); assert(fpi); assert(name->isStatic()); assert(sk.offset() == fpi->m_fpushOff); auto const fcall = SrcKey { sk.func(), fpi->m_fcallOff, sk.resumed() }; assert(isFCallStar(*reinterpret_cast<const Op*>(unit->at(fcall.offset())))); auto const func = lookupDirectFunc(sk, name, clsName, staticCall); if (func) { recordFunc(fcall, func); } }
void handleStackOverflow(ActRec* calleeAR) { /* * First synchronize registers. * * We're called in two situations: either this is the first frame after a * re-entry, in which case calleeAR->m_sfp is enterTCHelper's native stack, * or we're called in the middle of one VM entry (from a func prologue). We * want to raise the exception from the caller's FCall instruction in the * second case, and in the first case we have to raise in a special way * inside this re-entry. * * Either way the stack depth is below the calleeAR by numArgs, because we * haven't run func prologue duties yet. */ auto& unsafeRegs = vmRegsUnsafe(); auto const isReentry = calleeAR == vmFirstAR(); auto const arToSync = isReentry ? calleeAR : calleeAR->m_sfp; unsafeRegs.fp = arToSync; unsafeRegs.stack.top() = reinterpret_cast<Cell*>(calleeAR) - calleeAR->numArgs(); auto const func_base = arToSync->func()->base(); // calleeAR m_soff is 0 in the re-entry case, so we'll set pc to the func // base. But it also doesn't matter because we're going to throw a special // VMReenterStackOverflow in that case so the unwinder won't worry about it. unsafeRegs.pc = arToSync->func()->unit()->at(func_base + calleeAR->m_soff); tl_regState = VMRegState::CLEAN; if (!isReentry) { /* * The normal case - we were called via FCall, or FCallArray. We need to * construct the pc of the fcall from the return address (which will be * after the fcall). Because fcall is a variable length instruction, and * because we sometimes delete instructions from the instruction stream, we * need to use fpi regions to find the fcall. */ const FPIEnt* fe = liveFunc()->findPrecedingFPI( liveUnit()->offsetOf(vmpc())); vmpc() = liveUnit()->at(fe->m_fcallOff); assertx(isFCallStar(peek_op(vmpc()))); raise_error("Stack overflow"); } else { /* * We were called via re-entry. Leak the params and the ActRec, and tell * the unwinder that there's nothing left to do in this "entry". * * Also, the caller hasn't set up the m_invName area on the ActRec (unless * it was a magic call), since it's the prologue's responsibility if it's a * non-magic call. We can just null it out since we're fatalling. */ vmsp() = reinterpret_cast<Cell*>(calleeAR + 1); calleeAR->setVarEnv(nullptr); throw VMReenterStackOverflow(); } not_reached(); }
int instrSpToArDelta(const Op* opcode) { // This function should only be called for instructions that read // the current FPI assert(instrReadsCurrentFpi(*opcode)); // The delta from sp to ar is equal to the number of values on the stack // that will be consumed by this instruction (numPops) plus the number of // parameters pushed onto the stack so far that are not being consumed by // this instruction (numExtra). For the FPass* instructions, numExtra will // be equal to the first immediate argument (param id). For the FCall // instructions, numExtra will be 0 because all of the parameters on the // stack are already accounted for by numPops. int numPops = instrNumPops(opcode); int numExtra = isFCallStar(*opcode) ? 0 : getImm(opcode, 0).u_IVA; return numPops + numExtra; }
bool instrIsNonCallControlFlow(Op opcode) { if (!instrIsControlFlow(opcode) || isFCallStar(opcode)) return false; switch (opcode) { case OpContEnter: case OpFCallBuiltin: case OpIncl: case OpInclOnce: case OpReq: case OpReqOnce: case OpReqDoc: return false; default: return true; } }
static void recordActRecPush(NormalizedInstruction& i, const Unit* unit, const StringData* name, const StringData* clsName, bool staticCall) { const SrcKey& sk = i.source; FTRACE(2, "annotation: recordActRecPush: {}@{} {}{}{} ({}static)\n", unit->filepath()->data(), sk.offset(), clsName ? clsName->data() : "", clsName ? "::" : "", name, !staticCall ? "non" : ""); SrcKey next(sk); next.advance(unit); const FPIEnt *fpi = curFunc()->findFPI(next.offset()); assert(fpi); assert(name->isStatic()); assert(sk.offset() == fpi->m_fpushOff); SrcKey fcall = sk; fcall.m_offset = fpi->m_fcallOff; assert(isFCallStar(*unit->at(fcall.offset()))); if (clsName) { const Class* cls = Unit::lookupUniqueClass(clsName); bool magic = false; const Func* func = lookupImmutableMethod(cls, name, magic, staticCall); if (func) { recordFunc(i, fcall, func); } return; } const Func* func = Unit::lookupFunc(name); if (func && func->isNameBindingImmutable(unit)) { // this will never go into a call cache, so we dont need to // encode the args. it will be used in OpFCall below to // set the i->funcd. recordFunc(i, fcall, func); } else { // It's not enough to remember the function name; we also need to encode // the number of arguments and current flag disposition. int numArgs = getImm(unit->at(sk.offset()), 0).u_IVA; recordNameAndArgs(fcall, name, numArgs); } }
bool instrIsNonCallControlFlow(Op opcode) { if (!instrIsControlFlow(opcode) || isFCallStar(opcode)) return false; switch (opcode) { case OpAwait: case OpYield: case OpYieldK: case OpContEnter: case OpContRaise: case OpContEnterDelegate: case OpYieldFromDelegate: case OpFCallBuiltin: case OpIncl: case OpInclOnce: case OpReq: case OpReqOnce: case OpReqDoc: return false; default: return true; } }