std::unique_ptr<llvm::MemoryBuffer> PystonObjectCache::getObject(const llvm::Module* M) #endif { static StatCounter jit_objectcache_hits("num_jit_objectcache_hits"); static StatCounter jit_objectcache_misses("num_jit_objectcache_misses"); module_identifier = M->getModuleIdentifier(); RELEASE_ASSERT(!hash_before_codegen.empty(), "hash should have already got calculated"); if (!haveCacheFileForHash()) { #if 0 // This code helps with identifying why we got a cache miss for a file. // - clear the cache directory // - run pyston // - run pyston a second time // - Now look for "*_second.ll" files in the cache directory and compare them to the "*_first.ll" IR dump std::string llvm_ir; llvm::raw_string_ostream sstr(llvm_ir); M->print(sstr, 0); sstr.flush(); llvm::sys::fs::create_directories(cache_dir.str()); std::string filename = cache_dir.str().str() + "/" + module_identifier + "_first.ll"; if (llvm::sys::fs::exists(filename)) filename = cache_dir.str().str() + "/" + module_identifier + "_second.ll"; FILE* f = fopen(filename.c_str(), "wt"); ASSERT(f, "%s", strerror(errno)); fwrite(llvm_ir.c_str(), 1, llvm_ir.size(), f); fclose(f); #endif // This file isn't in our cache jit_objectcache_misses.log(); return NULL; } llvm::SmallString<128> cache_file = cache_dir; llvm::sys::path::append(cache_file, hash_before_codegen); std::unique_ptr<llvm::MemoryBuffer> mem_buff = CompressedFile::getFile(cache_file); if (!mem_buff) { jit_objectcache_misses.log(); return NULL; } jit_objectcache_hits.log(); return mem_buff; }
extern "C" void PyString_InternInPlace(PyObject** p) noexcept { BoxedString* s = (BoxedString*)*p; if (s == NULL || !PyString_Check(s)) Py_FatalError("PyString_InternInPlace: strings only please!"); /* If it's a string subclass, we don't really know what putting it in the interned dict might do. */ if (!PyString_CheckExact(s)) return; if (PyString_CHECK_INTERNED(s)) return; auto it = interned_strings.find(s); if (it != interned_strings.end()) { auto entry = *it; Py_INCREF(entry); Py_DECREF(*p); *p = entry; } else { // TODO: do CPython's refcounting here num_interned_strings.log(); interned_strings.insert(s); Py_INCREF(s); // CPython returns mortal but in our current implementation they are inmortal s->interned_state = SSTATE_INTERNED_IMMORTAL; } }
PhiAnalysis::PhiAnalysis(const SourceInfo::ArgNames& arg_names, CFG* cfg, LivenessAnalysis* liveness, ScopeInfo* scope_info) : definedness(arg_names, cfg, scope_info), liveness(liveness) { Timer _t("PhiAnalysis()", 10); for (CFGBlock* block : cfg->blocks) { RequiredSet required; if (block->predecessors.size() > 1) { for (CFGBlock* pred : block->predecessors) { const RequiredSet& defined = definedness.getDefinedNamesAtEnd(pred); for (const auto& s : defined) { if (required.count(s) == 0 && liveness->isLiveAtEnd(s, pred)) { // printf("%d-%d %s\n", pred->idx, block->idx, s.c_str()); required.insert(s); } } } } required_phis.insert(make_pair(block, std::move(required))); } static StatCounter us_phis("us_compiling_analysis_phis"); us_phis.log(_t.end()); }
void* compilePartialFunc(OSRExit* exit) { LOCK_REGION(codegen_rwlock.asWrite()); assert(exit); assert(exit->parent_cf); assert(exit->parent_cf->effort < EffortLevel::MAXIMAL); stat_osrexits.log(); // if (VERBOSITY("irgen") >= 1) printf("In compilePartialFunc, handling %p\n", exit); assert(exit->parent_cf->clfunc); CompiledFunction*& new_cf = exit->parent_cf->clfunc->osr_versions[exit->entry]; if (new_cf == NULL) { EffortLevel::EffortLevel new_effort = EffortLevel::MAXIMAL; if (exit->parent_cf->effort == EffortLevel::INTERPRETED) new_effort = EffortLevel::MINIMAL; // EffortLevel::EffortLevel new_effort = (EffortLevel::EffortLevel)(exit->parent_cf->effort + 1); // new_effort = EffortLevel::MAXIMAL; CompiledFunction* compiled = compileFunction(exit->parent_cf->clfunc, exit->parent_cf->spec, new_effort, exit->entry); assert(compiled = new_cf); } return new_cf->code; }
AST_Module* parse_file(const char* fn) { STAT_TIMER(t0, "us_timer_cpyton_parsing"); Timer _t("parsing"); if (ENABLE_PYPA_PARSER) { AST_Module* rtn = pypa_parse(fn); assert(rtn); return rtn; } FILE* fp = popen(getParserCommandLine(fn).c_str(), "r"); BufferedReader* reader = new BufferedReader(fp); AST* rtn = readASTMisc(reader); reader->fill(); ASSERT(reader->bytesBuffered() == 0, "%d", reader->bytesBuffered()); delete reader; int code = pclose(fp); assert(code == 0); assert(rtn->type == AST_TYPE::Module); long us = _t.end(); static StatCounter us_parsing("us_parsing"); us_parsing.log(us); return ast_cast<AST_Module>(rtn); }
std::unique_ptr<PhiAnalysis> computeRequiredPhis(const OSREntryDescriptor* entry_descriptor, LivenessAnalysis* liveness) { static StatCounter counter("num_phi_analysis"); counter.log(); auto cfg = entry_descriptor->code->source->cfg; int num_vregs = cfg->getVRegInfo().getTotalNumOfVRegs(); VRegMap<DefinednessAnalysis::DefinitionLevel> initial_map(num_vregs); for (int vreg = 0; vreg < num_vregs; vreg++) { initial_map[vreg] = DefinednessAnalysis::Undefined; } for (const auto& p : entry_descriptor->args) { int vreg = p.first; ASSERT(initial_map[vreg] == DefinednessAnalysis::Undefined, "%d %d", vreg, initial_map[vreg]); if (entry_descriptor->potentially_undefined[vreg]) initial_map[vreg] = DefinednessAnalysis::PotentiallyDefined; else initial_map[vreg] = DefinednessAnalysis::Defined; } return std::unique_ptr<PhiAnalysis>( new PhiAnalysis(std::move(initial_map), entry_descriptor->backedge->target, true, liveness)); }
std::unique_ptr<PhiAnalysis> computeRequiredPhis(const ParamNames& args, CFG* cfg, LivenessAnalysis* liveness) { static StatCounter counter("num_phi_analysis"); counter.log(); auto&& vreg_info = cfg->getVRegInfo(); int num_vregs = vreg_info.getTotalNumOfVRegs(); VRegMap<DefinednessAnalysis::DefinitionLevel> initial_map(num_vregs); assert(vreg_info.hasVRegsAssigned()); for (int vreg = 0; vreg < num_vregs; vreg++) { initial_map[vreg] = DefinednessAnalysis::Undefined; } for (BST_Name* n : args.allArgsAsName()) { ScopeInfo::VarScopeType vst = n->lookup_type; assert(vst != ScopeInfo::VarScopeType::UNKNOWN); assert(vst != ScopeInfo::VarScopeType::GLOBAL); // global-and-local error if (vst == ScopeInfo::VarScopeType::NAME) continue; assert(n->vreg >= 0); initial_map[n->vreg] = DefinednessAnalysis::Defined; } assert(initial_map.numVregs() == vreg_info.getTotalNumOfVRegs()); return std::unique_ptr<PhiAnalysis>( new PhiAnalysis(std::move(initial_map), cfg->getStartingBlock(), false, liveness)); }
PhiAnalysis::PhiAnalysis(VRegMap<DefinednessAnalysis::DefinitionLevel> initial_map, CFGBlock* initial_block, bool initials_need_phis, LivenessAnalysis* liveness) : definedness(), empty_set(initial_map.numVregs()), liveness(liveness) { auto cfg = initial_block->cfg; auto&& vreg_info = cfg->getVRegInfo(); // I think this should always be the case -- if we're going to generate phis for the initial block, // then we should include the initial arguments as an extra entry point. assert(initials_need_phis == (initial_block->predecessors.size() > 0)); int num_vregs = initial_map.numVregs(); assert(num_vregs == vreg_info.getTotalNumOfVRegs()); definedness.run(liveness->getCodeConstants(), std::move(initial_map), initial_block); Timer _t("PhiAnalysis()", 10); for (const auto& p : definedness.defined_at_end) { CFGBlock* block = p.first; assert(!required_phis.count(block)); VRegSet& required = required_phis.insert(std::make_pair(block, VRegSet(num_vregs))).first->second; int npred = 0; for (CFGBlock* pred : block->predecessors) { if (definedness.defined_at_end.count(pred)) npred++; } if (npred > 1 || (initials_need_phis && block == initial_block)) { for (CFGBlock* pred : block->predecessors) { if (!definedness.defined_at_end.count(pred)) continue; const VRegSet& defined = definedness.getDefinedVregsAtEnd(pred); for (int vreg : defined) { if (!required[vreg] && liveness->isLiveAtEnd(vreg, pred)) { // printf("%d-%d %s\n", pred->idx, block->idx, vreg_info.getName(vreg).c_str()); required.set(vreg); } } } } if (VERBOSITY() >= 3) { printf("Phis required at end of %d:", block->idx); for (auto vreg : required) { printf(" %s", vreg_info.getName(vreg).c_str()); } printf("\n"); } } static StatCounter us_phis("us_compiling_analysis_phis"); us_phis.log(_t.end()); }
void ICSlotRewrite::commit(CommitHook* hook, std::vector<void*> gc_references) { bool still_valid = true; for (int i = 0; i < dependencies.size(); i++) { int orig_version = dependencies[i].second; ICInvalidator* invalidator = dependencies[i].first; if (orig_version != invalidator->version()) { still_valid = false; break; } } if (!still_valid) { if (VERBOSITY() >= 3) printf("not committing %s icentry since a dependency got updated before commit\n", debug_name); return; } uint8_t* slot_start = getSlotStart(); uint8_t* continue_point = (uint8_t*)ic->continue_addr; bool do_commit = hook->finishAssembly(continue_point - slot_start); if (!do_commit) return; assert(!assembler.hasFailed()); for (int i = 0; i < dependencies.size(); i++) { ICInvalidator* invalidator = dependencies[i].first; invalidator->addDependent(ic_entry); } ic->next_slot_to_try++; // if (VERBOSITY()) printf("Commiting to %p-%p\n", start, start + ic->slot_size); memcpy(slot_start, buf, ic->getSlotSize()); for (auto p : ic_entry->gc_references) { int& i = ic_gc_references[p]; if (i == 1) ic_gc_references.erase(p); else --i; } ic_entry->gc_references = std::move(gc_references); for (auto p : ic_entry->gc_references) ic_gc_references[p]++; ic->times_rewritten++; if (ic->times_rewritten == IC_MEGAMORPHIC_THRESHOLD) { static StatCounter megamorphic_ics("megamorphic_ics"); megamorphic_ics.log(); } llvm::sys::Memory::InvalidateInstructionCache(slot_start, ic->getSlotSize()); }
void _bytesAllocatedTripped() { gc_registered_bytes.log(bytesAllocatedSinceCollection); bytesAllocatedSinceCollection = 0; if (!gcIsEnabled()) return; threading::GLPromoteRegion _lock; runCollection(); }
void Rewriter::commit() { static StatCounter rewriter_commits("rewriter_commits"); rewriter_commits.log(); // make sure we left the stack the way we found it: assert(pushes.size() == 0); assert(alloca_bytes == 0); rewrite->commit(decision_path, this); }
extern "C" char* reoptCompiledFunc(CompiledFunction *cf) { if (VERBOSITY("irgen") >= 1) printf("In reoptCompiledFunc, %p, %ld\n", cf, cf->times_called); stat_reopt.log(); assert(cf->effort < EffortLevel::MAXIMAL); assert(cf->clfunc->versions.size()); CompiledFunction *new_cf = _doReopt(cf, (EffortLevel::EffortLevel(cf->effort + 1))); assert(!new_cf->is_interpreted); return (char*)new_cf->code; }
void runCollection() { static StatCounter sc("gc_collections"); sc.log(); ncollections++; if (VERBOSITY("gc") >= 2) printf("Collection #%d\n", ncollections); Timer _t("collecting", /*min_usec=*/10000); markPhase(); sweepPhase(); if (VERBOSITY("gc") >= 2) printf("Collection #%d done\n\n", ncollections); long us = _t.end(); static StatCounter sc_us("gc_collections_us"); sc_us.log(us); }
virtual bool runOnFunction(llvm::Function& f) { Timer _t("inlining"); bool rtn = _runOnFunction(f); static StatCounter us_inlining("us_compiling_optimizing_inlining"); long us = _t.end(); us_inlining.log(us); return rtn; }
void runCollection() { static StatCounter sc("gc_collections"); sc.log(); ncollections++; if (VERBOSITY("gc") >= 2) printf("Collection #%d\n", ncollections); Timer _t("collecting", /*min_usec=*/10000); markPhase(); std::list<Box*, StlCompatAllocator<Box*>> weakly_referenced; sweepPhase(weakly_referenced); for (auto o : weakly_referenced) { PyWeakReference** list = (PyWeakReference**)PyObject_GET_WEAKREFS_LISTPTR(o); while (PyWeakReference* head = *list) { assert(isValidGCObject(head)); if (head->wr_object != Py_None) { _PyWeakref_ClearRef(head); if (head->wr_callback) { runtimeCall(head->wr_callback, ArgPassSpec(1), reinterpret_cast<Box*>(head), NULL, NULL, NULL, NULL); head->wr_callback = NULL; } } } } if (VERBOSITY("gc") >= 2) printf("Collection #%d done\n\n", ncollections); long us = _t.end(); static StatCounter sc_us("gc_collections_us"); sc_us.log(us); // dumpHeapStatistics(); }
static void compileIR(CompiledFunction* cf, EffortLevel::EffortLevel effort) { assert(cf); assert(cf->func); // g.engine->finalizeOBject(); if (VERBOSITY("irgen") >= 1) { printf("Compiling...\n"); // g.cur_module->dump(); } void* compiled = NULL; if (effort > EffortLevel::INTERPRETED) { Timer _t("to jit the IR"); #if LLVMREV < 215967 g.engine->addModule(cf->func->getParent()); #else g.engine->addModule(std::unique_ptr<llvm::Module>(cf->func->getParent())); #endif compiled = (void*)g.engine->getFunctionAddress(cf->func->getName()); assert(compiled); cf->llvm_code = embedConstantPtr(compiled, cf->func->getType()); long us = _t.end(); static StatCounter us_jitting("us_compiling_jitting"); us_jitting.log(us); static StatCounter num_jits("num_jits"); num_jits.log(); } else { // HAX just get it for now; this is just to make sure everything works //(void*)g.func_registry.getFunctionAddress(cf->func->getName()); } cf->code = compiled; if (VERBOSITY("irgen") >= 1) { printf("Compiled function to %p\n", compiled); } StackMap* stackmap = parseStackMap(); patchpoints::processStackmap(stackmap); }
// This probably belongs in list.cpp? extern "C" void listGCHandler(GCVisitor *v, void* p) { boxGCHandler(v, p); BoxedList *l = (BoxedList*)p; int size = l->size; if (size) { v->visit(l->elts); v->visitRange((void**)&l->elts->elts[0], (void**)&l->elts->elts[size]); } static StatCounter sc("gc_listelts_visited"); sc.log(size); }
void runCollection() { static StatCounter sc("gc_collections"); sc.log(); if (VERBOSITY("gc") >= 2) printf("Collection #%d\n", ++ncollections); //if (ncollections == 754) { //raise(SIGTRAP); //} markPhase(); sweepPhase(); }
bool LivenessAnalysis::isLiveAtEnd(const std::string& name, CFGBlock* block) { Timer _t("LivenessAnalysis()", 10); if (name[0] != '#') return true; if (block->successors.size() == 0) return false; int idx = getStringIndex(name); if (!result_cache.count(idx)) { std::unordered_map<CFGBlock*, bool>& map = result_cache[idx]; // Approach: // - Find all uses (blocks where the status is USED) // - Trace backwards, marking all blocks as live-at-end // - If we hit a block that is KILLED, stop for (CFGBlock* b : cfg->blocks) { auto status = liveness_cache[b]->nameStatus(idx); if (status != LivenessBBVisitor::USED) continue; std::deque<CFGBlock*> q; for (CFGBlock* pred : b->predecessors) { q.push_back(pred); } while (q.size()) { CFGBlock* thisblock = q.front(); q.pop_front(); if (map[thisblock]) continue; map[thisblock] = true; if (liveness_cache[thisblock]->nameStatus(idx) != LivenessBBVisitor::KILLED) { for (CFGBlock* pred : thisblock->predecessors) { q.push_back(pred); } } } } } // Note: this one gets counted as part of us_compiling_irgen as well: static StatCounter us_liveness("us_compiling_analysis_liveness"); us_liveness.log(_t.end()); return result_cache[idx][block]; }
LivenessAnalysis::LivenessAnalysis(CFG* cfg) : cfg(cfg) { Timer _t("LivenessAnalysis()", 10); for (CFGBlock* b : cfg->blocks) { auto visitor = new LivenessBBVisitor(this); // livenessCache unique_ptr will delete it. for (AST_stmt* stmt : b->body) { stmt->accept(visitor); } liveness_cache.insert(std::make_pair(b, std::unique_ptr<LivenessBBVisitor>(visitor))); } static StatCounter us_liveness("us_compiling_analysis_liveness"); us_liveness.log(_t.end()); }
Rewriter* Rewriter::createRewriter(void* ic_rtn_addr, int num_orig_args, int num_temp_regs, const char* debug_name) { assert(num_temp_regs <= 2 && "unsupported"); static StatCounter rewriter_nopatch("rewriter_nopatch"); ICInfo* ic = getICInfo(ic_rtn_addr); if (ic == NULL) { rewriter_nopatch.log(); return NULL; } assert(ic->getCallingConvention() == llvm::CallingConv::C && "Rewriter[1] only supports the C calling convention!"); return new Rewriter(ic->startRewrite(debug_name), num_orig_args, num_temp_regs); }
BoxedString* internStringImmortal(llvm::StringRef s) noexcept { auto it = interned_strings.find_as(s); if (it != interned_strings.end()) return incref(*it); num_interned_strings.log(); BoxedString* entry = boxString(s); // CPython returns mortal but in our current implementation they are inmortal entry->interned_state = SSTATE_INTERNED_IMMORTAL; interned_strings.insert((BoxedString*)entry); Py_INCREF(entry); return entry; }
LivenessAnalysis::LivenessAnalysis(CFG* cfg, const CodeConstants& code_constants) : cfg(cfg), code_constants(code_constants), result_cache(cfg->getVRegInfo().getTotalNumOfVRegs()) { Timer _t("LivenessAnalysis()", 100); for (CFGBlock* b : cfg->blocks) { auto visitor = new LivenessBBVisitor(this); // livenessCache unique_ptr will delete it. for (BST_stmt* stmt : b->body) { stmt->accept(visitor); } liveness_cache.insert(std::make_pair(b, std::unique_ptr<LivenessBBVisitor>(visitor))); } static StatCounter us_liveness("us_compiling_analysis_liveness"); us_liveness.log(_t.end()); }
void ICSlotRewrite::commit(CommitHook* hook) { bool still_valid = true; for (int i = 0; i < dependencies.size(); i++) { int orig_version = dependencies[i].second; ICInvalidator* invalidator = dependencies[i].first; if (orig_version != invalidator->version()) { still_valid = false; break; } } if (!still_valid) { if (VERBOSITY() >= 3) printf("not committing %s icentry since a dependency got updated before commit\n", debug_name); return; } ICSlotInfo* ic_entry = ic->pickEntryForRewrite(debug_name); if (ic_entry == NULL) return; uint8_t* slot_start = (uint8_t*)ic->start_addr + ic_entry->idx * ic->getSlotSize(); uint8_t* continue_point = (uint8_t*)ic->continue_addr; bool do_commit = hook->finishAssembly(ic_entry, continue_point - slot_start); if (!do_commit) return; assert(assembler->isExactlyFull()); assert(!assembler->hasFailed()); for (int i = 0; i < dependencies.size(); i++) { ICInvalidator* invalidator = dependencies[i].first; invalidator->addDependent(ic_entry); } // if (VERBOSITY()) printf("Commiting to %p-%p\n", start, start + ic->slot_size); memcpy(slot_start, buf, ic->getSlotSize()); ic->times_rewritten++; if (ic->times_rewritten == MEGAMORPHIC_THRESHOLD) { static StatCounter megamorphic_ics("megamorphic_ics"); megamorphic_ics.log(); } llvm::sys::Memory::InvalidateInstructionCache(slot_start, ic->getSlotSize()); }
void logException(ExcInfo* exc_info) { #if STAT_EXCEPTIONS static StatCounter num_exceptions("num_exceptions"); num_exceptions.log(); std::string stat_name; if (PyType_Check(exc_info->type)) stat_name = "num_exceptions_" + std::string(static_cast<BoxedClass*>(exc_info->type)->tp_name); else stat_name = "num_exceptions_" + std::string(exc_info->value->cls->tp_name); Stats::log(Stats::getStatCounter(stat_name)); #if STAT_EXCEPTIONS_LOCATION logByCurrentPythonLine(stat_name); #endif #endif }
// Parsing the file is somewhat expensive since we have to shell out to cpython; // it's not a huge deal right now, but this caching version can significantly cut down // on the startup time (40ms -> 10ms). AST_Module* caching_parse(const char* fn) { Timer _t("parsing"); int code; std::string cache_fn = std::string(fn) + "c"; struct stat source_stat, cache_stat; code = stat(fn, &source_stat); assert(code == 0); code = stat(cache_fn.c_str(), &cache_stat); if (code != 0 || cache_stat.st_mtime < source_stat.st_mtime || (cache_stat.st_mtime == source_stat.st_mtime && cache_stat.st_mtim.tv_nsec < source_stat.st_mtim.tv_nsec)) { _reparse(fn, cache_fn); } FILE *fp = fopen(cache_fn.c_str(), "r"); assert(fp); while (true) { char buf[MAGIC_STRING_LENGTH]; int read = fread(buf, 1, MAGIC_STRING_LENGTH, fp); if (read != 4 || strncmp(buf, MAGIC_STRING, MAGIC_STRING_LENGTH) != 0) { fclose(fp); _reparse(fn, cache_fn); fp = fopen(cache_fn.c_str(), "r"); assert(fp); } else { break; } } BufferedReader *reader = new BufferedReader(fp); AST* rtn = readASTMisc(reader); reader->fill(); assert(reader->bytesBuffered() == 0); delete reader; assert(rtn->type == AST_TYPE::Module); long us = _t.end(); static StatCounter us_parsing("us_parsing"); us_parsing.log(us); return static_cast<AST_Module*>(rtn); }
__attribute__((always_inline)) bool _doFree(GCAllocation* al, std::vector<Box*>* weakly_referenced) { static StatCounter gc_safe_destructors("gc_safe_destructor_calls"); #ifndef NVALGRIND VALGRIND_DISABLE_ERROR_REPORTING; #endif GCKind alloc_kind = al->kind_id; #ifndef NVALGRIND VALGRIND_ENABLE_ERROR_REPORTING; #endif if (alloc_kind == GCKind::PYTHON || alloc_kind == GCKind::CONSERVATIVE_PYTHON) { #ifndef NVALGRIND VALGRIND_DISABLE_ERROR_REPORTING; #endif Box* b = (Box*)al->user_data; #ifndef NVALGRIND VALGRIND_ENABLE_ERROR_REPORTING; #endif assert(b->cls); if (isWeaklyReferenced(b)) { assert(weakly_referenced && "attempting to free a weakly referenced object manually"); weakly_referenced->push_back(b); return false; } ASSERT(!hasOrderedFinalizer(b->cls) || hasFinalized(al) || alloc_kind == GCKind::CONSERVATIVE_PYTHON, "%s", getTypeName(b)); if (b->cls->tp_dealloc != dealloc_null && b->cls->has_safe_tp_dealloc) { gc_safe_destructors.log(); GCAllocation* al = GCAllocation::fromUserData(b); assert(!hasFinalized(al)); assert(!hasOrderedFinalizer(b->cls)); // Don't bother setting the finalized flag since the object is getting freed right now. b->cls->tp_dealloc(b); } } return true; }
DefinednessAnalysis::DefinednessAnalysis(const SourceInfo::ArgNames& arg_names, CFG* cfg, ScopeInfo* scope_info) : scope_info(scope_info) { Timer _t("DefinednessAnalysis()", 10); results = computeFixedPoint(cfg, DefinednessBBAnalyzer(cfg, arg_names), false); for (const auto& p : results) { RequiredSet required; for (const auto& p2 : p.second) { if (scope_info->refersToGlobal(p2.first)) continue; // printf("%d %s %d\n", p.first->idx, p2.first.c_str(), p2.second); required.insert(p2.first); } defined_at_end.insert(make_pair(p.first, required)); } static StatCounter us_definedness("us_compiling_analysis_definedness"); us_definedness.log(_t.end()); }
void NotifyObjectEmitted(const llvm::ObjectImage &Obj) { static StatCounter code_bytes("code_bytes"); code_bytes.log(Obj.getData().size()); llvm::error_code code; for (llvm::object::symbol_iterator I = Obj.begin_symbols(), E = Obj.end_symbols(); I != E; #if LLVMREV < 200442 I = I.increment(code) #else ++I #endif ) { llvm::object::section_iterator section(Obj.end_sections()); code = I->getSection(section); assert(!code); bool is_text; code = section->isText(is_text); assert(!code); if (!is_text) continue; llvm::StringRef name; uint64_t addr, size, offset; code = I->getName(name); assert(!code); code = I->getAddress(addr); assert(!code); code = I->getSize(size); assert(!code); code = I->getFileOffset(offset); assert(!code); if (name == ".text") continue; //printf("%lx %lx %lx %s\n", addr, addr + size, offset, name.data()); g.func_addr_registry.registerFunction(name.data(), (void*)addr, size, NULL); } }
virtual void NotifyObjectEmitted(const llvm::object::ObjectFile& Obj, const llvm::RuntimeDyld::LoadedObjectInfo& L) { static StatCounter code_bytes("code_bytes"); code_bytes.log(Obj.getData().size()); llvm_error_code code; for (const auto& sym : Obj.symbols()) { llvm::object::section_iterator section(Obj.section_end()); code = sym.getSection(section); assert(!code); bool is_text; #if LLVMREV < 219314 code = section->isText(is_text); assert(!code); #else is_text = section->isText(); #endif if (!is_text) continue; llvm::StringRef name; code = sym.getName(name); assert(!code); uint64_t size; code = sym.getSize(size); assert(!code); if (name == ".text") continue; uint64_t sym_addr = L.getSymbolLoadAddress(name); assert(sym_addr); g.func_addr_registry.registerFunction(name.data(), (void*)sym_addr, size, NULL); } }