void StackSpace::markAndClobber(JSTracer *trc) { /* NB: this depends on the continuity of segments in memory. */ Value *nextSegEnd = firstUnused(); for (StackSegment *seg = seg_; seg; seg = seg->prevInMemory()) { /* * A segment describes a linear region of memory that contains a stack * of native and interpreted calls. For marking purposes, though, we * only need to distinguish between frames and values and mark * accordingly. Since native calls only push values on the stack, we * can effectively lump them together and just iterate over interpreted * calls. Thus, marking can view the stack as the regex: * (segment slots (frame slots)*)* * which gets marked in reverse order. */ Value *slotsEnd = nextSegEnd; jsbytecode *pc = seg->maybepc(); for (StackFrame *fp = seg->maybefp(); (Value *)fp > (Value *)seg; fp = fp->prev()) { /* Mark from fp->slots() to slotsEnd. */ markAndClobberFrame(trc, fp, slotsEnd, pc); if (trc) fp->mark(trc); slotsEnd = (Value *)fp; InlinedSite *site; pc = fp->prevpc(&site); JS_ASSERT_IF(fp->prev(), !site); } if (trc) gc::MarkValueRootRange(trc, seg->slotsBegin(), slotsEnd, "vm_stack"); nextSegEnd = (Value *)seg; } }
void StackSpace::mark(JSTracer *trc) { /* * JIT code can leave values in an incoherent (i.e., unsafe for precise * marking) state, hence MarkStackRangeConservatively. */ /* NB: this depends on the continuity of segments in memory. */ Value *nextSegEnd = firstUnused(); for (StackSegment *seg = seg_; seg; seg = seg->prevInMemory()) { /* * A segment describes a linear region of memory that contains a stack * of native and interpreted calls. For marking purposes, though, we * only need to distinguish between frames and values and mark * accordingly. Since native calls only push values on the stack, we * can effectively lump them together and just iterate over interpreted * calls. Thus, marking can view the stack as the regex: * (segment slots (frame slots)*)* * which gets marked in reverse order. * */ Value *slotsEnd = nextSegEnd; for (StackFrame *fp = seg->maybefp(); (Value *)fp > (Value *)seg; fp = fp->prev()) { MarkStackRangeConservatively(trc, fp->slots(), slotsEnd); js_TraceStackFrame(trc, fp); slotsEnd = (Value *)fp; } MarkStackRangeConservatively(trc, seg->slotsBegin(), slotsEnd); nextSegEnd = (Value *)seg; } }
void StackSpace::markActiveCompartments() { for (StackSegment *seg = seg_; seg; seg = seg->prevInMemory()) { for (StackFrame *fp = seg->maybefp(); (Value *)fp > (Value *)seg; fp = fp->prev()) MarkCompartmentActive(fp); } }
bool ContextStack::containsSlow(const StackFrame *target) const { for (StackSegment *s = seg_; s; s = s->prevInContext()) { if (s->contains(target)) return true; } return false; }
StackSegment & StackSpace::containingSegment(const StackFrame *target) const { for (StackSegment *s = seg_; s; s = s->prevInMemory()) { if (s->contains(target)) return *s; } JS_NOT_REACHED("frame not in stack space"); return *(StackSegment *)NULL; }