void runtime·racefuncenter1(uintptr pc) { // If the caller PC is lessstack, use slower runtime·callers // to walk across the stack split to find the real caller. if(pc == (uintptr)runtime·lessstack) runtime·callers(2, &pc, 1); m->racecall = true; runtime∕race·FuncEnter(g->racectx, (void*)pc); m->racecall = false; }
void runtime·Callers(intgo skip, Slice pc, intgo retn) { // runtime.callers uses pc.array==nil as a signal // to print a stack trace. Pick off 0-length pc here // so that we don't let a nil pc slice get to it. if(pc.len == 0) retn = 0; else retn = runtime·callers(skip, (uintptr*)pc.array, pc.len); FLUSH(&retn); }
void runtime·racefuncenter(uintptr pc) { // If the caller PC is lessstack, use slower runtime·callers // to walk across the stack split to find the real caller. // Same thing if the PC is on the heap, which should be a // closure trampoline. if(pc == (uintptr)runtime·lessstack || (pc >= (uintptr)runtime·mheap.arena_start && pc < (uintptr)runtime·mheap.arena_used)) runtime·callers(2, &pc, 1); m->racecall = true; runtime∕race·FuncEnter(g->goid-1, (void*)pc); m->racecall = false; }
void runtime·Callers(intgo skip, Slice pc, intgo retn) { retn = 0; FLUSH(&retn); #line 103 "/home/14/ren/source/golang/go/src/pkg/runtime/runtime1.goc" // runtime.callers uses pc.array==nil as a signal // to print a stack trace. Pick off 0-length pc here // so that we don't let a nil pc slice get to it. if(pc.len == 0) retn = 0; else retn = runtime·callers(skip, (uintptr*)pc.array, pc.len); FLUSH(&retn); }
void runtime·Callers(intgo skip, Slice pc, intgo retn) { retn = 0; FLUSH(&retn); #line 103 "C:\Users\ADMINI~1\AppData\Local\Temp\2\makerelease686069423\go\src\pkg\runtime\runtime1.goc" // runtime.callers uses pc.array==nil as a signal // to print a stack trace. Pick off 0-length pc here // so that we don't let a nil pc slice get to it. if(pc.len == 0) retn = 0; else retn = runtime·callers(skip, (uintptr*)pc.array, pc.len); FLUSH(&retn); }
void runtime·Caller(intgo skip, uintptr retpc, String retfile, intgo retline, bool retbool) { retpc = 0; FLUSH(&retpc); retfile.str = 0; retfile.len = 0; FLUSH(&retfile); retline = 0; FLUSH(&retline); retbool = 0; FLUSH(&retbool); #line 73 "/home/14/ren/source/golang/go/src/pkg/runtime/runtime1.goc" Func *f, *g; uintptr pc; uintptr rpc[2]; /* * Ask for two PCs: the one we were asked for * and what it called, so that we can see if it * "called" sigpanic. */ retpc = 0; if(runtime·callers(1+skip-1, rpc, 2) < 2) { retfile = runtime·emptystring; retline = 0; retbool = false; } else if((f = runtime·findfunc(rpc[1])) == nil) { retfile = runtime·emptystring; retline = 0; retbool = true; // have retpc at least } else { retpc = rpc[1]; pc = retpc; g = runtime·findfunc(rpc[0]); if(pc > f->entry && (g == nil || g->entry != (uintptr)runtime·sigpanic)) pc--; retline = runtime·funcline(f, pc, &retfile); retbool = true; } FLUSH(&retpc); FLUSH(&retfile); FLUSH(&retline); FLUSH(&retbool); }
void runtime·Caller(intgo skip, uintptr retpc, String retfile, intgo retline, bool retbool) { retpc = 0; FLUSH(&retpc); retfile.str = 0; retfile.len = 0; FLUSH(&retfile); retline = 0; FLUSH(&retline); retbool = 0; FLUSH(&retbool); #line 73 "C:\Users\ADMINI~1\AppData\Local\Temp\2\makerelease686069423\go\src\pkg\runtime\runtime1.goc" Func *f, *g; uintptr pc; uintptr rpc[2]; /* * Ask for two PCs: the one we were asked for * and what it called, so that we can see if it * "called" sigpanic. */ retpc = 0; if(runtime·callers(1+skip-1, rpc, 2) < 2) { retfile = runtime·emptystring; retline = 0; retbool = false; } else if((f = runtime·findfunc(rpc[1])) == nil) { retfile = runtime·emptystring; retline = 0; retbool = true; // have retpc at least } else { retpc = rpc[1]; pc = retpc; g = runtime·findfunc(rpc[0]); if(pc > f->entry && (g == nil || g->entry != (uintptr)runtime·sigpanic)) pc--; retline = runtime·funcline(f, pc, &retfile); retbool = true; } FLUSH(&retpc); FLUSH(&retfile); FLUSH(&retline); FLUSH(&retbool); }
fint nmethod::ncallers() { // slow! need only approximate # callers, so clip at MaxCallers const fint MaxCallers = 10; AddressList callers(MaxCallers); fint i = 0; for (nmln* l = linkedSends.next; i < MaxCallers && l != &linkedSends; l = l->next, i++) { callers.append((char*)l); } if (i >= MaxCallers) return MaxCallers; qsort(callers.data_addr(), callers.length(), sizeof(char*), cmp_addrs); fint n = 0, len = callers.length(); for (i = 0; i < len; ) { n++; nmethod* nm = ((nmln*)callers.nth(i))->asSender(); while (++i < len && nm == ((nmln*)callers.nth(i))->asSender()) ; } return n; }
static void mcommoninit(M *mp) { mp->id = runtime·sched.mcount++; mp->fastrand = 0x49f6428aUL + mp->id + runtime·cputicks(); if(mp->mcache == nil) mp->mcache = runtime·allocmcache(); runtime·callers(1, mp->createstack, nelem(mp->createstack)); // Add to runtime·allm so garbage collector doesn't free m // when it is just in a register or thread-local storage. mp->alllink = runtime·allm; // runtime·NumCgoCall() iterates over allm w/o schedlock, // so we need to publish it safely. runtime·atomicstorep(&runtime·allm, mp); }
static void mcommoninit(M *m) { m->id = runtime·sched.mcount++; m->fastrand = 0x49f6428aUL + m->id + runtime·cputicks(); m->stackalloc = runtime·malloc(sizeof(*m->stackalloc)); runtime·FixAlloc_Init(m->stackalloc, FixedStack, runtime·SysAlloc, nil, nil); if(m->mcache == nil) m->mcache = runtime·allocmcache(); runtime·callers(1, m->createstack, nelem(m->createstack)); // Add to runtime·allm so garbage collector doesn't free m // when it is just in a register or thread-local storage. m->alllink = runtime·allm; // runtime·NumCgoCall() iterates over allm w/o schedlock, // so we need to publish it safely. runtime·atomicstorep(&runtime·allm, m); }
static void rangeaccess(void *addr, uintptr size, uintptr step, uintptr callpc, uintptr pc, bool write) { uintptr racectx; if(!onstack((uintptr)addr)) { m->racecall = true; racectx = g->racectx; if(callpc) { if(callpc == (uintptr)runtime·lessstack) runtime·callers(3, &callpc, 1); runtime∕race·FuncEnter(racectx, (void*)callpc); } if(write) runtime∕race·WriteRange(racectx, addr, size, step, (void*)pc); else runtime∕race·ReadRange(racectx, addr, size, step, (void*)pc); if(callpc) runtime∕race·FuncExit(racectx); m->racecall = false; } }
static void memoryaccess(void *addr, uintptr callpc, uintptr pc, bool write) { int64 goid; if(!onstack((uintptr)addr)) { m->racecall = true; goid = g->goid-1; if(callpc) { if(callpc == (uintptr)runtime·lessstack || (callpc >= (uintptr)runtime·mheap.arena_start && callpc < (uintptr)runtime·mheap.arena_used)) runtime·callers(3, &callpc, 1); runtime∕race·FuncEnter(goid, (void*)callpc); } if(write) runtime∕race·Write(goid, addr, (void*)pc); else runtime∕race·Read(goid, addr, (void*)pc); if(callpc) runtime∕race·FuncExit(goid); m->racecall = false; } }
std::vector<Function::Ptr> FunctionCallGraph::callers(const Function::Ptr &target) const { return callers(findFunction(target)); }