void FixupMap::fixupWorkSimulated(ExecutionContext* ec) const { TRACE(1, "fixup(begin):\n"); auto isVMFrame = [] (ActRec* ar, const vixl::Simulator* sim) { // If this assert is failing, you may have forgotten a sync point somewhere assert(ar); bool ret = uintptr_t(ar) - s_stackLimit >= s_stackSize && !sim->is_on_stack(ar); assert(!ret || (ar >= vmStack().getStackLowAddress() && ar < vmStack().getStackHighAddress()) || ar->resumed()); return ret; }; // For each nested simulator (corresponding to nested VM invocations), look at // its PC to find a potential fixup key. // // Callstack walking is necessary, because we may get called from a // uniqueStub. for (int i = ec->m_activeSims.size() - 1; i >= 0; --i) { auto const* sim = ec->m_activeSims[i]; auto* rbp = reinterpret_cast<ActRec*>(sim->xreg(JIT::ARM::rVmFp.code())); auto tca = reinterpret_cast<TCA>(sim->pc()); TRACE(2, "considering frame %p, %p\n", rbp, tca); while (rbp && !isVMFrame(rbp, sim)) { tca = reinterpret_cast<TCA>(rbp->m_savedRip); rbp = rbp->m_sfp; } if (!rbp) continue; auto* ent = m_fixups.find(tca); if (!ent) { continue; } if (ent->isIndirect()) { not_implemented(); } VMRegs regs; regsFromActRec(tca, rbp, ent->fixup, ®s); TRACE(2, "fixup(end): func %s fp %p sp %p pc %p\b", regs.m_fp->m_func->name()->data(), regs.m_fp, regs.m_sp, regs.m_pc); vmfp() = const_cast<ActRec*>(regs.m_fp); vmpc() = reinterpret_cast<PC>(regs.m_pc); vmsp() = regs.m_sp; return; } // This shouldn't be reached. always_assert(false); }
// Unwind the frame for a builtin. Currently only used when switching // modes for hphpd_break and fb_enable_code_coverage. void unwindBuiltinFrame() { auto& stack = vmStack(); auto& fp = vmfp(); assert(fp->m_func->methInfo()); assert(fp->m_func->name()->isame(s_hphpd_break.get()) || fp->m_func->name()->isame(s_fb_enable_code_coverage.get())); // Free any values that may be on the eval stack. We know there // can't be FPI regions and it can't be a generator body because // it's a builtin frame. auto const evalTop = reinterpret_cast<TypedValue*>(vmfp()); while (stack.topTV() < evalTop) { stack.popTV(); } // Free the locals and VarEnv if there is one auto rv = make_tv<KindOfNull>(); frame_free_locals_inl(fp, fp->m_func->numLocals(), &rv); // Tear down the frame Offset pc = -1; ActRec* sfp = g_context->getPrevVMState(fp, &pc); assert(pc != -1); fp = sfp; vmpc() = fp->m_func->unit()->at(pc); stack.discardAR(); stack.pushNull(); // return value }
UnwindAction enterUnwinder() { auto fault = g_context->m_faults.back(); return unwind( vmfp(), // by ref vmStack(), // by ref vmpc(), // by ref fault ); }
VMRegAnchor::VMRegAnchor(ActRec* ar) : m_old(tl_regState) { assert(tl_regState == VMRegState::DIRTY); tl_regState = VMRegState::CLEAN; auto prevAr = g_context->getOuterVMFrame(ar); const Func* prevF = prevAr->m_func; assert(!ar->resumed()); auto& regs = vmRegs(); regs.stack.top() = (TypedValue*)ar - ar->numArgs(); assert(vmStack().isValidAddress((uintptr_t)vmsp())); regs.pc = prevF->unit()->at(prevF->base() + ar->m_soff); regs.fp = prevAr; }
VMRegAnchor::VMRegAnchor(ActRec* ar) : m_old(tl_regState) { // Some C++ entry points have an ActRec prepared from after a call // instruction. This syncs us to right after the call instruction. assert(tl_regState == VMRegState::DIRTY); m_old = VMRegState::DIRTY; tl_regState = VMRegState::CLEAN; auto prevAr = g_context->getOuterVMFrame(ar); const Func* prevF = prevAr->m_func; assert(!ar->resumed()); auto& regs = vmRegs(); regs.stack.top() = (TypedValue*)ar - ar->numArgs(); assert(vmStack().isValidAddress((uintptr_t)vmsp())); regs.pc = prevF->unit()->at(prevF->base() + ar->m_soff); regs.fp = prevAr; }
bool EventHook::RunInterceptHandler(ActRec* ar) { const Func* func = ar->func(); if (LIKELY(func->maybeIntercepted() == 0)) return true; // Intercept only original generator / async function calls, not resumption. if (ar->resumed()) return true; Variant* h = get_intercept_handler(func->fullNameStr(), &func->maybeIntercepted()); if (!h) return true; /* * In production mode, only functions that we have assumed can be * intercepted during static analysis should actually be * intercepted. */ if (RuntimeOption::RepoAuthoritative && !RuntimeOption::EvalJitEnableRenameFunction) { if (!(func->attrs() & AttrInterceptable)) { raise_error("fb_intercept was used on a non-interceptable function (%s) " "in RepoAuthoritative mode", func->fullName()->data()); } } VMRegAnchor _; PC savePc = vmpc(); Variant doneFlag = true; Variant called_on; if (ar->hasThis()) { called_on = Variant(ar->getThis()); } else if (ar->hasClass()) { // For static methods, give handler the name of called class called_on = Variant(const_cast<StringData*>(ar->getClass()->name())); } Variant intArgs = PackedArrayInit(5) .append(VarNR(ar->func()->fullName())) .append(called_on) .append(get_frame_args_with_ref(ar)) .append(h->asCArrRef()[1]) .appendRef(doneFlag) .toArray(); Variant ret = vm_call_user_func(h->asCArrRef()[0], intArgs); if (doneFlag.toBoolean()) { Offset pcOff; ActRec* outer = g_context->getPrevVMState(ar, &pcOff); frame_free_locals_inl_no_hook<true>(ar, ar->func()->numLocals()); Stack& stack = vmStack(); stack.top() = (Cell*)(ar + 1); cellDup(*ret.asCell(), *stack.allocTV()); vmfp() = outer; vmpc() = outer ? outer->func()->unit()->at(pcOff) : nullptr; return false; } vmfp() = ar; vmpc() = savePc; return true; }