static void recordActRecPush(const SrcKey sk, const StringData* name, const StringData* clsName, bool staticCall) { auto unit = sk.unit(); FTRACE(2, "annotation: recordActRecPush: {}@{} {}{}{} ({}static)\n", unit->filepath()->data(), sk.offset(), clsName ? clsName->data() : "", clsName ? "::" : "", name, !staticCall ? "non" : ""); SrcKey next(sk); next.advance(unit); const FPIEnt *fpi = sk.func()->findFPI(next.offset()); assert(fpi); assert(name->isStatic()); assert(sk.offset() == fpi->m_fpushOff); auto const fcall = SrcKey { sk.func(), fpi->m_fcallOff, sk.resumed() }; assert(isFCallStar(*reinterpret_cast<const Op*>(unit->at(fcall.offset())))); auto const func = lookupDirectFunc(sk, name, clsName, staticCall); if (func) { recordFunc(fcall, func); } }
std::string show(SrcKey sk) { auto func = sk.func(); auto unit = sk.unit(); const char *filepath = "*anonFile*"; if (unit->filepath()->data() && unit->filepath()->size()) { filepath = unit->filepath()->data(); } return folly::format("{}:{} in {}(id 0x{:#x})@{: >6}", filepath, unit->getLineNumber(sk.offset()), func->isPseudoMain() ? "pseudoMain" : func->fullName()->data(), (unsigned long long)sk.getFuncId(), sk.offset()).str(); }
std::string show(SrcKey sk) { auto func = sk.func(); auto unit = sk.unit(); const char *filepath = "*anonFile*"; if (unit->filepath()->data() && unit->filepath()->size()) { filepath = unit->filepath()->data(); } return folly::sformat("{}:{} in {}(id 0x{:#x})@{: >6}{}{}", filepath, unit->getLineNumber(sk.offset()), func->isPseudoMain() ? "pseudoMain" : func->fullName()->data(), (uint32_t)sk.funcID(), sk.offset(), sk.resumed() ? "r" : "", sk.hasThis() ? "t" : "", sk.prologue() ? "p" : ""); }
void recordGdbTranslation(SrcKey sk, const Func* srcFunc, const CodeBlock& cb, const TCA start, const TCA end, bool exit, bool inPrologue) { assertx(cb.contains(start) && cb.contains(end)); if (start != end) { assertOwnsCodeLock(); if (!RuntimeOption::EvalJitNoGdb) { Debug::DebugInfo::Get()->recordTracelet( Debug::TCRange(start, end, &cb == &code().cold()), srcFunc, srcFunc->unit() ? srcFunc->unit()->at(sk.offset()) : nullptr, exit, inPrologue ); } if (RuntimeOption::EvalPerfPidMap) { Debug::DebugInfo::Get()->recordPerfMap( Debug::TCRange(start, end, &cb == &code().cold()), sk, srcFunc, exit, inPrologue ); } } }
void prepareForNextHHBC(IRGS& env, const NormalizedInstruction* ni, SrcKey newSk, bool lastBcInst) { FTRACE(1, "------------------- prepareForNextHHBC ------------------\n"); env.currentNormalizedInstruction = ni; always_assert_flog( IMPLIES(isInlining(env), !env.lastBcInst), "Tried to end trace while inlining." ); always_assert_flog( IMPLIES(isInlining(env), !env.firstBcInst), "Inlining while still at the first region instruction." ); always_assert(env.bcStateStack.size() >= env.inlineLevel + 1); auto pops = env.bcStateStack.size() - 1 - env.inlineLevel; while (pops--) env.bcStateStack.pop_back(); always_assert_flog(env.bcStateStack.back().func() == newSk.func(), "Tried to update current SrcKey with a different func"); env.bcStateStack.back().setOffset(newSk.offset()); updateMarker(env); env.lastBcInst = lastBcInst; env.catchCreator = nullptr; env.irb->prepareForNextHHBC(); }
static TransIDSet findPredTrans(TransID dstID, const ProfData* profData, const SrcDB& srcDB, const TcaTransIDMap& jmpToTransID) { SrcKey dstSK = profData->transSrcKey(dstID); const SrcRec* dstSR = srcDB.find(dstSK); assertx(dstSR); TransIDSet predSet; for (auto& inBr : dstSR->incomingBranches()) { TransID srcID = folly::get_default(jmpToTransID, inBr.toSmash(), kInvalidTransID); FTRACE(5, "findPredTrans: toSmash = {} srcID = {}\n", inBr.toSmash(), srcID); if (srcID != kInvalidTransID && profData->isKindProfile(srcID)) { auto srcSuccOffsets = profData->transLastSrcKey(srcID).succOffsets(); if (srcSuccOffsets.count(dstSK.offset())) { predSet.insert(srcID); } else { FTRACE(5, "findPredTrans: WARNING: incoming branch with impossible " "control flow between translations: {} -> {}" "(probably due to side exit)\n", srcID, dstID); } } } return predSet; }
void addDbgGuardImpl(SrcKey sk) { vixl::MacroAssembler a { tx64->mainCode }; vixl::Label after; vixl::Label interpReqAddr; // Get the debugger-attached flag from thread-local storage. Don't bother // saving caller-saved regs around the host call; this is between blocks. emitTLSLoad<ThreadInfo>(a, ThreadInfo::s_threadInfo, rAsm); // Is the debugger attached? a. Ldr (rAsm.W(), rAsm[dbgOff]); a. Tst (rAsm, 0xff); // skip jump to stubs if no debugger attached a. B (&after, vixl::eq); a. Ldr (rAsm, &interpReqAddr); a. Br (rAsm); if (!a.isFrontierAligned(8)) { a. Nop (); assert(a.isFrontierAligned(8)); } a. bind (&interpReqAddr); TCA interpReq = emitServiceReq(tx64->stubsCode, REQ_INTERPRET, sk.offset(), 0); a. dc64 (interpReq); a. bind (&after); }
static void recordActRecPush(const SrcKey& sk, const Unit* unit, const FPIEnt* fpi, const StringData* name, const StringData* clsName, bool staticCall) { // sk is the address of a FPush* of the function whose static name // is name. The boundaries of FPI regions are such that we can't quite // find the FCall that matches this FuncD without decoding forward to // the end; this is not ideal, but is hopefully affordable at translation // time. ASSERT(name->isStatic()); ASSERT(sk.offset() == fpi->m_fpushOff); SrcKey fcall; SrcKey next(sk); next.advance(unit); do { if (*unit->at(next.offset()) == OpFCall) { // Remember the last FCall in the region; the region might end // with UnboxR, e.g. fcall = next; } next.advance(unit); } while (next.offset() <= fpi->m_fcallOff); ASSERT(*unit->at(fcall.offset()) == OpFCall); if (clsName) { const Class* cls = Unit::lookupClass(clsName); bool magic = false; const Func* func = lookupImmutableMethod(cls, name, magic, staticCall); if (func) { recordFunc(fcall, func); } return; } const Func* func = Unit::lookupFunc(name); if (func && func->isNameBindingImmutable(unit)) { // this will never go into a call cache, so we dont need to // encode the args. it will be used in OpFCall below to // set the i->funcd. recordFunc(fcall, func); } else { // It's not enough to remember the function name; we also need to encode // the number of arguments and current flag disposition. int numArgs = getImm(unit->at(sk.offset()), 0).u_IVA; recordNameAndArgs(fcall, name, numArgs); } }
void IRTranslator::translateFCallArray(const NormalizedInstruction& i) { const Offset pcOffset = i.offset(); SrcKey next = i.nextSk(); const Offset after = next.offset(); HHIR_EMIT(FCallArray, pcOffset, after, jit::callDestroysLocals(i, m_hhbcTrans.curFunc())); }
static void recordFunc(const SrcKey sk, const Func* func) { FTRACE(2, "annotation: recordFunc: {}@{} {}\n", sk.unit()->filepath()->data(), sk.offset(), func->fullName()->data()); s_callDB.insert(std::make_pair(sk, func)); }
std::string showShort(SrcKey sk) { if (!sk.valid()) return "<invalid SrcKey>"; return folly::format( "{}(id {:#x})@{}{}", sk.func()->fullName(), sk.funcID(), sk.offset(), sk.resumed() ? "r" : "" ).str(); }
TCA emit_retranslate_stub(CodeBlock& cb, FPInvOffset spOff, SrcKey target, TransFlags trflags) { return emit_persistent( cb, target.resumed() ? folly::none : folly::make_optional(spOff), REQ_RETRANSLATE, target.offset(), trflags.packed ); }
void sktrace(SrcKey sk, const char *fmt, ...) { if (!Trace::enabled) return; auto inst = instrToString((Op*)sk.unit()->at(sk.offset())); Trace::trace("%s: %20s ", show(sk).c_str(), inst.c_str()); va_list a; va_start(a, fmt); Trace::vtrace(fmt, a); va_end(a); }
/* * Returns the last BC offset in the region that corresponds to the * function where the region starts. This will normally be the offset * of the last instruction in the last block, except if the function * ends with an inlined call. In this case, the offset of the * corresponding FCall* in the function that starts the region is * returned. */ static Offset findLastBcOffset(const RegionDescPtr region) { assert(region->blocks.size() > 0); auto& blocks = region->blocks; FuncId startFuncId = blocks[0]->start().getFuncId(); for (int i = blocks.size() - 1; i >= 0; i--) { SrcKey sk = blocks[i]->last(); if (sk.getFuncId() == startFuncId) { return sk.offset(); } } not_reached(); }
void addDbgGuardImpl(SrcKey sk) { Asm a { tx64->mainCode }; // Emit the checks for debugger attach emitTLSLoad<ThreadInfo>(a, ThreadInfo::s_threadInfo, reg::rAsm); a. load_reg64_disp_reg32(reg::rAsm, dbgOff, reg::rAsm); a. testb((int8_t)0xff, rbyte(reg::rAsm)); // Branch to a special REQ_INTERPRET if attached TCA fallback = emitServiceReq(tx64->stubsCode, REQ_INTERPRET, sk.offset(), 0); a. jnz(fallback); }
static void recordFunc(NormalizedInstruction& i, const SrcKey& sk, const Func* func) { FTRACE(2, "annotation: recordFunc: {}@{} {}\n", i.m_unit->filepath()->data(), sk.offset(), func->fullName()->data()); CallRecord cr; cr.m_type = Function; cr.m_func = func; s_callDB.insert(std::make_pair(sk, cr)); i.directCall = true; }
static void recordActRecPush(NormalizedInstruction& i, const Unit* unit, const StringData* name, const StringData* clsName, bool staticCall) { const SrcKey& sk = i.source; FTRACE(2, "annotation: recordActRecPush: {}@{} {}{}{} ({}static)\n", unit->filepath()->data(), sk.offset(), clsName ? clsName->data() : "", clsName ? "::" : "", name, !staticCall ? "non" : ""); SrcKey next(sk); next.advance(unit); const FPIEnt *fpi = curFunc()->findFPI(next.offset()); assert(fpi); assert(name->isStatic()); assert(sk.offset() == fpi->m_fpushOff); SrcKey fcall = sk; fcall.m_offset = fpi->m_fcallOff; assert(isFCallStar(*unit->at(fcall.offset()))); if (clsName) { const Class* cls = Unit::lookupUniqueClass(clsName); bool magic = false; const Func* func = lookupImmutableMethod(cls, name, magic, staticCall); if (func) { recordFunc(i, fcall, func); } return; } const Func* func = Unit::lookupFunc(name); if (func && func->isNameBindingImmutable(unit)) { // this will never go into a call cache, so we dont need to // encode the args. it will be used in OpFCall below to // set the i->funcd. recordFunc(i, fcall, func); } else { // It's not enough to remember the function name; we also need to encode // the number of arguments and current flag disposition. int numArgs = getImm(unit->at(sk.offset()), 0).u_IVA; recordNameAndArgs(fcall, name, numArgs); } }
TransRec::TransRec(SrcKey _src, TransID transID, TransKind _kind, TCA _aStart, uint32_t _aLen, TCA _acoldStart, uint32_t _acoldLen, TCA _afrozenStart, uint32_t _afrozenLen, RegionDescPtr region, std::vector<TransBCMapping> _bcMapping, Annotations&& _annotations, bool _hasLoop) : bcMapping(_bcMapping) , annotations(std::move(_annotations)) , funcName(_src.func()->fullName()->data()) , src(_src) , md5(_src.func()->unit()->md5()) , aStart(_aStart) , acoldStart(_acoldStart) , afrozenStart(_afrozenStart) , aLen(_aLen) , acoldLen(_acoldLen) , afrozenLen(_afrozenLen) , bcStart(_src.offset()) , id(transID) , kind(_kind) , hasLoop(_hasLoop) { if (funcName.empty()) funcName = "Pseudo-main"; if (!region) return; assertx(!region->empty()); for (auto& block : region->blocks()) { auto sk = block->start(); blocks.emplace_back(Block{sk.unit()->md5(), sk.offset(), block->last().advanced().offset()}); } auto& firstBlock = *region->blocks().front(); for (auto const& pred : firstBlock.typePreConditions()) { guards.emplace_back(show(pred)); } }
TransRec::TransRec(SrcKey _src, TransKind _kind, TCA _aStart, uint32_t _aLen, TCA _acoldStart, uint32_t _acoldLen, TCA _afrozenStart, uint32_t _afrozenLen, RegionDescPtr region, std::vector<TransBCMapping> _bcMapping, bool _isLLVM) : bcMapping(_bcMapping) , funcName(_src.func()->fullName()->data()) , src(_src) , md5(_src.func()->unit()->md5()) , aStart(_aStart) , acoldStart(_acoldStart) , afrozenStart(_afrozenStart) , aLen(_aLen) , acoldLen(_acoldLen) , afrozenLen(_afrozenLen) , bcStart(_src.offset()) , id(0) , kind(_kind) , isLLVM(_isLLVM) { if (funcName.empty()) funcName = "Pseudo-main"; if (!region) return; assertx(!region->empty()); for (auto& block : region->blocks()) { auto sk = block->start(); blocks.emplace_back(Block{sk.unit()->md5(), sk.offset(), block->last().advanced().offset()}); } auto& firstBlock = *region->blocks().front(); auto guardRange = firstBlock.typePreds().equal_range(firstBlock.start()); for (; guardRange.first != guardRange.second; ++guardRange.first) { guards.emplace_back(show(guardRange.first->second)); } }
bool reachedTranslationLimit(TransKind kind, SrcKey sk, const SrcRec& srcRec) { const auto numTrans = srcRec.translations().size(); // Optimized translations perform this check at relocation time to avoid // invalidating all of their SrcKeys early. if (kind == TransKind::Optimize) return false; if ((kind == TransKind::Profile && numTrans != RuntimeOption::EvalJitMaxProfileTranslations) || (kind != TransKind::Profile && numTrans != RuntimeOption::EvalJitMaxTranslations)) { return false; } INC_TPC(max_trans); if (debug && Trace::moduleEnabled(Trace::mcg, 2)) { const auto& tns = srcRec.translations(); TRACE(1, "Too many (%zd) translations: %s, BC offset %d\n", tns.size(), sk.unit()->filepath()->data(), sk.offset()); SKTRACE(2, sk, "{\n"); TCA topTrans = srcRec.getTopTranslation(); for (size_t i = 0; i < tns.size(); ++i) { auto const rec = transdb::getTransRec(tns[i].mainStart()); assertx(rec); SKTRACE(2, sk, "%zd %p\n", i, tns[i].mainStart()); if (tns[i].mainStart() == topTrans) { SKTRACE(2, sk, "%zd: *Top*\n", i); } if (rec->kind == TransKind::Anchor) { SKTRACE(2, sk, "%zd: Anchor\n", i); } else { SKTRACE(2, sk, "%zd: guards {\n", i); for (unsigned j = 0; j < rec->guards.size(); ++j) { FTRACE(2, "{}\n", rec->guards[j]); } SKTRACE(2, sk, "%zd } guards\n", i); } } SKTRACE(2, sk, "} /* Too many translations */\n"); } return true; }
bool Translator::isSrcKeyInBL(SrcKey sk) { auto unit = sk.unit(); if (unit->isInterpretOnly()) return true; Lock l(m_dbgBlacklistLock); if (m_dbgBLSrcKey.find(sk) != m_dbgBLSrcKey.end()) { return true; } // Loop until the end of the basic block inclusively. This is useful for // function exit breakpoints, which are implemented by blacklisting the RetC // opcodes. PC pc = nullptr; do { pc = (pc == nullptr) ? unit->at(sk.offset()) : pc + instrLen(pc); if (m_dbgBLPC.checkPC(pc)) { m_dbgBLSrcKey.insert(sk); return true; } } while (!opcodeBreaksBB(peek_op(pc))); return false; }
std::string showShort(SrcKey sk) { return folly::format("{}(id 0x{:#x})@{}", sk.func()->fullName()->data(), sk.getFuncId(), sk.offset()).str(); }
/* * Checks if the given region is well-formed, which entails the * following properties: * * 1) The region has at least one block. * * 2) Each block in the region has a different id. * * 3) All arcs involve blocks within the region. * * 4) For each arc, the bytecode offset of the dst block must * possibly follow the execution of the src block. * * 5) Each block contains at most one successor corresponding to a * given SrcKey. * * 6) The region doesn't contain any loops, unless JitLoops is * enabled. * * 7) All blocks are reachable from the entry block. * * 8) For each block, there must be a path from the entry to it that * includes only earlier blocks in the region. * * 9) The region is topologically sorted unless loops are enabled. * * 10) The block-retranslation chains cannot have cycles. * */ bool check(const RegionDesc& region, std::string& error) { auto bad = [&](const std::string& errorMsg) { error = errorMsg; return false; }; // 1) The region has at least one block. if (region.empty()) return bad("empty region"); RegionDesc::BlockIdSet blockSet; for (auto b : region.blocks()) { auto bid = b->id(); // 2) Each block in the region has a different id. if (blockSet.count(bid)) { return bad(folly::sformat("many blocks with id {}", bid)); } blockSet.insert(bid); } for (auto b : region.blocks()) { auto bid = b->id(); SrcKey lastSk = region.block(bid)->last(); OffsetSet validSuccOffsets = lastSk.succOffsets(); OffsetSet succOffsets; for (auto succ : region.succs(bid)) { SrcKey succSk = region.block(succ)->start(); Offset succOffset = succSk.offset(); // 3) All arcs involve blocks within the region. if (blockSet.count(succ) == 0) { return bad(folly::sformat("arc with dst not in the region: {} -> {}", bid, succ)); } // Checks 4) and 5) below don't make sense for arcs corresponding // to inlined calls and returns, so skip them in such cases. // This won't be possible once task #4076399 is done. if (lastSk.func() != succSk.func()) continue; // 4) For each arc, the bytecode offset of the dst block must // possibly follow the execution of the src block. if (validSuccOffsets.count(succOffset) == 0) { return bad(folly::sformat("arc with impossible control flow: {} -> {}", bid, succ)); } // 5) Each block contains at most one successor corresponding to a // given SrcKey. if (succOffsets.count(succOffset) > 0) { return bad(folly::sformat("block {} has multiple successors with SK {}", bid, show(succSk))); } succOffsets.insert(succOffset); } for (auto pred : region.preds(bid)) { if (blockSet.count(pred) == 0) { return bad(folly::sformat("arc with src not in the region: {} -> {}", pred, bid)); } } } // 6) is checked by dfsCheck. DFSChecker dfsCheck(region); if (!dfsCheck.check(region.entry()->id())) { return bad("region is cyclic"); } // 7) All blocks are reachable from the entry (first) block. if (dfsCheck.numVisited() != blockSet.size()) { return bad("region has unreachable blocks"); } // 8) and 9) are checked below. RegionDesc::BlockIdSet visited; auto& blocks = region.blocks(); for (unsigned i = 0; i < blocks.size(); i++) { auto bid = blocks[i]->id(); unsigned nVisited = 0; for (auto pred : region.preds(bid)) { nVisited += visited.count(pred); } // 8) For each block, there must be a path from the entry to it that // includes only earlier blocks in the region. if (nVisited == 0 && i != 0) { return bad(folly::sformat("block {} appears before all its predecessors", bid)); } // 9) The region is topologically sorted unless loops are enabled. if (!RuntimeOption::EvalJitLoops && nVisited != region.preds(bid).size()) { return bad(folly::sformat("non-topological order (bid: {})", bid)); } visited.insert(bid); } // 10) The block-retranslation chains cannot have cycles. for (auto b : blocks) { auto bid = b->id(); RegionDesc::BlockIdSet chainSet; chainSet.insert(bid); while (auto next = region.nextRetrans(bid)) { auto nextId = next.value(); if (chainSet.count(nextId)) { return bad(folly::sformat("cyclic retranslation chain for block {}", bid)); } chainSet.insert(nextId); bid = nextId; } } return true; }
std::string showShort(SrcKey sk) { if (!sk.valid()) return "<invalid SrcKey>"; return folly::format("{}(id {:#x})@{}", sk.func()->fullName()->data(), sk.getFuncId(), sk.offset()).str(); }