static void recordActRecPush(const SrcKey sk, const StringData* name, const StringData* clsName, bool staticCall) { auto unit = sk.unit(); FTRACE(2, "annotation: recordActRecPush: {}@{} {}{}{} ({}static)\n", unit->filepath()->data(), sk.offset(), clsName ? clsName->data() : "", clsName ? "::" : "", name, !staticCall ? "non" : ""); SrcKey next(sk); next.advance(unit); const FPIEnt *fpi = sk.func()->findFPI(next.offset()); assert(fpi); assert(name->isStatic()); assert(sk.offset() == fpi->m_fpushOff); auto const fcall = SrcKey { sk.func(), fpi->m_fcallOff, sk.resumed() }; assert(isFCallStar(*reinterpret_cast<const Op*>(unit->at(fcall.offset())))); auto const func = lookupDirectFunc(sk, name, clsName, staticCall); if (func) { recordFunc(fcall, func); } }
static void recordFunc(const SrcKey sk, const Func* func) { FTRACE(2, "annotation: recordFunc: {}@{} {}\n", sk.unit()->filepath()->data(), sk.offset(), func->fullName()->data()); s_callDB.insert(std::make_pair(sk, func)); }
bool InliningDecider::canInlineAt(SrcKey callSK, const Func* callee) const { if (!callee || !RuntimeOption::EvalHHIREnableGenTimeInlining || RuntimeOption::EvalJitEnableRenameFunction || callee->attrs() & AttrInterceptable) { return false; } if (callee->cls()) { if (!classHasPersistentRDS(callee->cls())) { // if the callee's class is not persistent, its still ok // to use it if we're jitting into a method of a subclass auto ctx = callSK.func()->cls(); if (!ctx || !ctx->classof(callee->cls())) { return false; } } } else { auto const handle = callee->funcHandle(); if (handle == rds::kInvalidHandle || !rds::isPersistentHandle(handle)) { // if the callee isn't persistent, its still ok to // use it if its defined at the top level in the same // unit as the caller if (callee->unit() != callSK.unit() || !callee->top()) { return false; } } } // If inlining was disabled... don't inline. if (m_disabled) return false; // TODO(#3331014): We have this hack until more ARM codegen is working. if (arch() == Arch::ARM) return false; // We can only inline at normal FCalls. if (callSK.op() != Op::FCall && callSK.op() != Op::FCallD) { return false; } // Don't inline from resumed functions. The inlining mechanism doesn't have // support for these---it has no way to redefine stack pointers relative to // the frame pointer, because in a resumed function the frame pointer points // into the heap instead of into the eval stack. if (callSK.resumed()) return false; // TODO(#4238160): Inlining into pseudomain callsites is still buggy. if (callSK.func()->isPseudoMain()) return false; if (!isCalleeInlinable(callSK, callee) || !checkNumArgs(callSK, callee)) { return false; } return true; }
void sktrace(SrcKey sk, const char *fmt, ...) { if (!Trace::enabled) return; auto inst = instrToString((Op*)sk.unit()->at(sk.offset())); Trace::trace("%s: %20s ", show(sk).c_str(), inst.c_str()); va_list a; va_start(a, fmt); Trace::vtrace(fmt, a); va_end(a); }
std::string show(SrcKey sk) { auto func = sk.func(); auto unit = sk.unit(); const char *filepath = "*anonFile*"; if (unit->filepath()->data() && unit->filepath()->size()) { filepath = unit->filepath()->data(); } return folly::format("{}:{} in {}(id 0x{:#x})@{: >6}", filepath, unit->getLineNumber(sk.offset()), func->isPseudoMain() ? "pseudoMain" : func->fullName()->data(), (unsigned long long)sk.getFuncId(), sk.offset()).str(); }
std::string show(SrcKey sk) { auto func = sk.func(); auto unit = sk.unit(); const char *filepath = "*anonFile*"; if (unit->filepath()->data() && unit->filepath()->size()) { filepath = unit->filepath()->data(); } return folly::sformat("{}:{} in {}(id 0x{:#x})@{: >6}{}{}", filepath, unit->getLineNumber(sk.offset()), func->isPseudoMain() ? "pseudoMain" : func->fullName()->data(), (uint32_t)sk.funcID(), sk.offset(), sk.resumed() ? "r" : "", sk.hasThis() ? "t" : "", sk.prologue() ? "p" : ""); }
bool reachedTranslationLimit(TransKind kind, SrcKey sk, const SrcRec& srcRec) { const auto numTrans = srcRec.translations().size(); // Optimized translations perform this check at relocation time to avoid // invalidating all of their SrcKeys early. if (kind == TransKind::Optimize) return false; if ((kind == TransKind::Profile && numTrans != RuntimeOption::EvalJitMaxProfileTranslations) || (kind != TransKind::Profile && numTrans != RuntimeOption::EvalJitMaxTranslations)) { return false; } INC_TPC(max_trans); if (debug && Trace::moduleEnabled(Trace::mcg, 2)) { const auto& tns = srcRec.translations(); TRACE(1, "Too many (%zd) translations: %s, BC offset %d\n", tns.size(), sk.unit()->filepath()->data(), sk.offset()); SKTRACE(2, sk, "{\n"); TCA topTrans = srcRec.getTopTranslation(); for (size_t i = 0; i < tns.size(); ++i) { auto const rec = transdb::getTransRec(tns[i].mainStart()); assertx(rec); SKTRACE(2, sk, "%zd %p\n", i, tns[i].mainStart()); if (tns[i].mainStart() == topTrans) { SKTRACE(2, sk, "%zd: *Top*\n", i); } if (rec->kind == TransKind::Anchor) { SKTRACE(2, sk, "%zd: Anchor\n", i); } else { SKTRACE(2, sk, "%zd: guards {\n", i); for (unsigned j = 0; j < rec->guards.size(); ++j) { FTRACE(2, "{}\n", rec->guards[j]); } SKTRACE(2, sk, "%zd } guards\n", i); } } SKTRACE(2, sk, "} /* Too many translations */\n"); } return true; }
bool Translator::isSrcKeyInBL(SrcKey sk) { auto unit = sk.unit(); if (unit->isInterpretOnly()) return true; Lock l(m_dbgBlacklistLock); if (m_dbgBLSrcKey.find(sk) != m_dbgBLSrcKey.end()) { return true; } // Loop until the end of the basic block inclusively. This is useful for // function exit breakpoints, which are implemented by blacklisting the RetC // opcodes. PC pc = nullptr; do { pc = (pc == nullptr) ? unit->at(sk.offset()) : pc + instrLen(pc); if (m_dbgBLPC.checkPC(pc)) { m_dbgBLSrcKey.insert(sk); return true; } } while (!opcodeBreaksBB(peek_op(pc))); return false; }