void reclaimFunction(const Func* func) { BlockingLeaseHolder writer(Translator::WriteLease()); auto it = s_funcTCData.find(func); if (it == s_funcTCData.end()) return; ITRACE(1, "Tearing down func {} (id={})\n", func->fullName()->data(), func->getFuncId()); Trace::Indent _i; auto& data = it->second; auto& us = mcg->ustubs(); ITRACE(1, "Smashing prologues\n"); clobberFuncGuards(func); for (auto& caller : data.callers) { ITRACE(1, "Unsmashing call @ {} (guard = {})\n", caller.first, caller.second.isGuard); // It should be impossible to reach a prologue that has been reclaimed // through an immutable stub, as this would imply the function is still // reachable. auto addr = caller.second.isGuard ? us.bindCallStub : nullptr; smashCall(caller.first, addr); s_smashedCalls.erase(caller.first); } auto movedData = folly::makeMoveWrapper(std::move(data)); auto fname = func->fullName()->data(); auto fid = func->getFuncId(); // We just smashed all of those callers-- treadmill the free to avoid a // race (threads executing callers may end up inside the guard even though // the function is now unreachable). Once the following block runs the guards // should be unreachable. Treadmill::enqueue([fname, fid, movedData] { BlockingLeaseHolder writer(Translator::WriteLease()); ITRACE(1, "Reclaiming func {} (id={})\n", fname, fid); Trace::Indent _i; { ITRACE(1, "Reclaiming Prologues\n"); Trace::Indent _i; for (auto& loc : movedData->prologues) { reclaimTranslation(loc); } } for (auto* rec : movedData->srcRecs) { reclaimSrcRec(rec); } }); s_funcTCData.erase(it); }
/* * Reclaim all translations associated with a SrcRec including the anchor * translation. */ void reclaimSrcRec(const SrcRec* rec) { assertOwnsCodeLock(); assertOwnsMetadataLock(); ITRACE(1, "Reclaiming SrcRec addr={} anchor={}\n", (void*)rec, rec->getFallbackTranslation()); Trace::Indent _i; auto anchor = rec->getFallbackTranslation(); code().blockFor(anchor).free(anchor, svcreq::stub_size()); for (auto& loc : rec->translations()) { reclaimTranslation(loc); } }
void SrcRec::replaceOldTranslations() { // Everyone needs to give up on old translations; send them to the anchor, // which is a REQ_RETRANSLATE. auto translations = std::move(m_translations); m_tailFallbackJumps.clear(); m_topTranslation = nullptr; /* * It may seem a little weird that we're about to point every * incoming branch at the anchor, since that's going to just * unconditionally retranslate this SrcKey and never patch the * incoming branch to do something else. * * The reason this is ok is this mechanism is only used in * non-RepoAuthoritative mode, and the granularity of code * invalidation there is such that we'll only have incoming branches * like this basically within the same file since we don't have * whole program analysis. * * This means all these incoming branches are about to go away * anyway ... * * If we ever change that we'll have to change this to patch to * some sort of rebind requests. */ assertx(!RuntimeOption::RepoAuthoritative || RuntimeOption::EvalJitPGO); patchIncomingBranches(m_anchorTranslation); // Now that we've smashed all the IBs for these translations they should be // unreachable-- to prevent a race we treadmill here and then reclaim their // associated TC space if (RuntimeOption::EvalEnableReusableTC) { auto trans = folly::makeMoveWrapper(std::move(translations)); Treadmill::enqueue([trans]() mutable { for (auto& loc : *trans) { reclaimTranslation(loc); } trans->clear(); }); return; } translations.clear(); }