bool ContextStack::pushGeneratorFrame(JSContext *cx, JSGenerator *gen, GeneratorFrameGuard *gfg) { StackFrame *genfp = gen->floatingFrame(); HeapValue *genvp = gen->floatingStack; unsigned vplen = (HeapValue *)genfp - genvp; unsigned nvars = vplen + VALUES_PER_STACK_FRAME + genfp->numSlots(); Value *firstUnused = ensureOnTop(cx, REPORT_ERROR, nvars, CAN_EXTEND, &gfg->pushedSeg_); if (!firstUnused) return false; StackFrame *stackfp = reinterpret_cast<StackFrame *>(firstUnused + vplen); Value *stackvp = (Value *)stackfp - vplen; /* Save this for popGeneratorFrame. */ gfg->gen_ = gen; gfg->stackvp_ = stackvp; /* * Trigger incremental barrier on the floating frame's generator object. * This is normally traced through only by associated arguments/call * objects, but only when the generator is not actually on the stack. * We don't need to worry about generational barriers as the generator * object has a trace hook and cannot be nursery allocated. */ JSObject *genobj = js_FloatingFrameToGenerator(genfp)->obj; JS_ASSERT(genobj->getClass()->trace); JSObject::writeBarrierPre(genobj); /* Copy from the generator's floating frame to the stack. */ stackfp->stealFrameAndSlots<Value, HeapValue, StackFrame::NoPostBarrier>( cx, stackfp, stackvp, genfp, genvp, gen->regs.sp); stackfp->resetGeneratorPrev(cx); stackfp->unsetFloatingGenerator(); gfg->regs_.rebaseFromTo(gen->regs, *stackfp); gfg->prevRegs_ = seg_->pushRegs(gfg->regs_); JS_ASSERT(space().firstUnused() == gfg->regs_.sp); gfg->setPushed(*this); return true; }
JS_BrokenFrameIterator(JSContext *cx, JSStackFrame **iteratorp) { StackFrame *fp = Valueify(*iteratorp); if (!fp) { #ifdef JS_METHODJIT js::mjit::ExpandInlineFrames(cx->compartment); #endif fp = cx->maybefp(); } else { fp = fp->prev(); } // settle on the next non-ion frame as it is not considered safe to inspect // Ion's activation StackFrame. while (fp && fp->runningInIon()) fp = fp->prev(); *iteratorp = Jsvalify(fp); return *iteratorp; }
StackFrame * InterpreterStack::pushInvokeFrame(JSContext *cx, const CallArgs &args, InitialFrameFlags initial, FrameGuard *fg) { LifoAlloc::Mark mark = allocator_.mark(); RootedFunction fun(cx, &args.callee().as<JSFunction>()); RootedScript script(cx, fun->nonLazyScript()); StackFrame::Flags flags = ToFrameFlags(initial); Value *argv; StackFrame *fp = getCallFrame(cx, args, script, &flags, &argv); if (!fp) return nullptr; fp->mark_ = mark; fp->initCallFrame(cx, nullptr, nullptr, nullptr, *fun, script, argv, args.length(), flags); fg->setPushed(*this, fp); return fp; }
StackFrame * InterpreterStack::pushExecuteFrame(JSContext *cx, HandleScript script, const Value &thisv, HandleObject scopeChain, ExecuteType type, AbstractFramePtr evalInFrame, FrameGuard *fg) { LifoAlloc::Mark mark = allocator_.mark(); unsigned nvars = 2 /* callee, this */ + script->nslots; uint8_t *buffer = allocateFrame(cx, sizeof(StackFrame) + nvars * sizeof(Value)); if (!buffer) return nullptr; StackFrame *fp = reinterpret_cast<StackFrame *>(buffer + 2 * sizeof(Value)); fp->mark_ = mark; fp->initExecuteFrame(cx, script, evalInFrame, thisv, *scopeChain, type); fp->initVarsToUndefined(); fg->setPushed(*this, fp); return fp; }
JS_SetTopFrameAnnotation(JSContext *cx, void *annotation) { StackFrame *fp = cx->fp(); JS_ASSERT_IF(fp->beginsIonActivation(), !fp->annotation()); // Note that if this frame is running in Ion, the actual calling frame // could be inlined or a callee and thus we won't have a correct |fp|. // To account for this, ion::InvalidationBailout will transfer an // annotation from the old cx->fp() to the new top frame. This works // because we will never EnterIon on a frame with an annotation. fp->setAnnotation(annotation); JSScript *script = fp->script(); ReleaseAllJITCode(cx->runtime->defaultFreeOp()); // Ensure that we'll never try to compile this again. JS_ASSERT(!script->hasIonScript()); script->ion = ION_DISABLED_SCRIPT; }
bool ContextStack::pushDummyFrame(JSContext *cx, JSCompartment *dest, JSObject &scopeChain, DummyFrameGuard *dfg) { JS_ASSERT(dest == scopeChain.compartment()); unsigned nvars = VALUES_PER_STACK_FRAME; Value *firstUnused = ensureOnTop(cx, REPORT_ERROR, nvars, CAN_EXTEND, &dfg->pushedSeg_, dest); if (!firstUnused) return false; StackFrame *fp = reinterpret_cast<StackFrame *>(firstUnused); fp->initDummyFrame(cx, scopeChain); dfg->regs_.initDummyFrame(*fp); cx->setCompartment(dest); dfg->prevRegs_ = seg_->pushRegs(dfg->regs_); JS_ASSERT(space().firstUnused() == dfg->regs_.sp); dfg->setPushed(*this); return true; }
void ContextStack::popGeneratorFrame(const GeneratorFrameGuard &gfg) { JSGenerator *gen = gfg.gen_; StackFrame *genfp = gen->floatingFrame(); HeapValue *genvp = gen->floatingStack; const FrameRegs &stackRegs = gfg.regs_; StackFrame *stackfp = stackRegs.fp(); Value *stackvp = gfg.stackvp_; /* Copy from the stack to the generator's floating frame. */ gen->regs.rebaseFromTo(stackRegs, *genfp); genfp->stealFrameAndSlots<HeapValue, Value, StackFrame::DoPostBarrier>( genfp, genvp, stackfp, stackvp, stackRegs.sp); genfp->setFloatingGenerator(); /* ~FrameGuard/popFrame will finish the popping. */ JS_ASSERT(ImplicitCast<const FrameGuard>(gfg).pushed()); }
void Continuation::suspend( const Item& retval ) { if ( m_vm->currentContext()->atomicMode() ) { throw new CodeError( ErrorParam( e_cont_atomic, __LINE__ ) .origin( e_orig_vm ) ); } // find the calling frame. StackFrame* frame = m_vm->currentFrame(); while( frame->prev() != m_callingFrame ) { frame = frame->prev(); } // save the original parameters m_params.clear(); for( uint32 i = 0; i < frame->m_param_count; i++ ) { m_params.append( frame->m_params[i] ); } // disengage the stack. frame->prev(0); m_bottom = frame; m_top = m_vm->currentFrame(); // and remove the parameters m_callingFrame->pop( frame->m_param_count ); m_context->setFrames( m_callingFrame ); // prepare the resume values m_tgtSymbol = m_top->m_symbol; m_tgtLModule = m_top->m_module; m_tgtPC = m_top->m_ret_pc; m_vm->regA() = retval; // for sure, we need more call m_bComplete = false; // PC, module and symbol are in our return frame, which is invoked as we return. }
bool addErrorInfoAndGetBytecodeOffset(ExecState* exec, VM& vm, JSObject* obj, bool useCurrentFrame, CallFrame*& callFrame, unsigned* bytecodeOffset) { Vector<StackFrame> stackTrace = Vector<StackFrame>(); size_t framesToSkip = useCurrentFrame ? 0 : 1; vm.interpreter->getStackTrace(stackTrace, framesToSkip); if (!stackTrace.isEmpty()) { ASSERT(exec == vm.topCallFrame || exec == exec->lexicalGlobalObject()->globalExec() || exec == exec->vmEntryGlobalObject()->globalExec()); StackFrame* firstNonNativeFrame; for (unsigned i = 0 ; i < stackTrace.size(); ++i) { firstNonNativeFrame = &stackTrace.at(i); if (!firstNonNativeFrame->isNative()) break; } if (bytecodeOffset) { FindFirstCallerFrameWithCodeblockFunctor functor(exec); vm.topCallFrame->iterate(functor); callFrame = functor.foundCallFrame(); unsigned stackIndex = functor.index(); *bytecodeOffset = stackTrace.at(stackIndex).bytecodeOffset; } unsigned line; unsigned column; firstNonNativeFrame->computeLineAndColumn(line, column); obj->putDirect(vm, vm.propertyNames->line, jsNumber(line), ReadOnly | DontDelete); obj->putDirect(vm, vm.propertyNames->column, jsNumber(column), ReadOnly | DontDelete); String frameSourceURL = firstNonNativeFrame->sourceURL(); if (!frameSourceURL.isEmpty()) obj->putDirect(vm, vm.propertyNames->sourceURL, jsString(&vm, frameSourceURL), ReadOnly | DontDelete); obj->putDirect(vm, vm.propertyNames->stack, Interpreter::stackTraceAsString(vm, stackTrace), DontEnum); return true; } return false; }
bool ThreadPlanStepInRange::FrameMatchesAvoidRegexp () { StackFrame *frame = GetThread().GetStackFrameAtIndex(0).get(); const RegularExpression *avoid_regexp_to_use = m_avoid_regexp_ap.get(); if (avoid_regexp_to_use == NULL) avoid_regexp_to_use = GetThread().GetSymbolsToAvoidRegexp(); if (avoid_regexp_to_use != NULL) { SymbolContext sc = frame->GetSymbolContext(eSymbolContextFunction|eSymbolContextBlock|eSymbolContextSymbol); if (sc.symbol != NULL) { const char *frame_function_name = sc.GetFunctionName().GetCString(); if (frame_function_name) return avoid_regexp_to_use->Execute(frame_function_name); } } return false; }
bool ValueObjectRegisterContext::UpdateValue () { m_error.Clear(); ExecutionContext exe_ctx(GetExecutionContextRef()); StackFrame *frame = exe_ctx.GetFramePtr(); if (frame) m_reg_ctx_sp = frame->GetRegisterContext(); else m_reg_ctx_sp.reset(); if (m_reg_ctx_sp.get() == NULL) { SetValueIsValid (false); m_error.SetErrorToGenericError(); } else SetValueIsValid (true); return m_error.Success(); }
size_t EmulateInstruction::ReadMemoryFrame (EmulateInstruction *instruction, void *baton, const Context &context, lldb::addr_t addr, void *dst, size_t dst_len) { if (!baton || dst == NULL || dst_len == 0) return 0; StackFrame *frame = (StackFrame *) baton; ProcessSP process_sp (frame->CalculateProcess()); if (process_sp) { Error error; return process_sp->ReadMemory (addr, dst, dst_len, error); } return 0; }
bool SBFrame::SetPC (addr_t new_pc) { LogSP log(GetLogIfAllCategoriesSet (LIBLLDB_LOG_API)); bool ret_val = false; Mutex::Locker api_locker; ExecutionContext exe_ctx (m_opaque_sp.get(), api_locker); StackFrame *frame = NULL; Target *target = exe_ctx.GetTargetPtr(); Process *process = exe_ctx.GetProcessPtr(); if (target && process) { Process::StopLocker stop_locker; if (stop_locker.TryLock(&process->GetRunLock())) { frame = exe_ctx.GetFramePtr(); if (frame) { ret_val = frame->GetRegisterContext()->SetPC (new_pc); } else { if (log) log->Printf ("SBFrame::SetPC () => error: could not reconstruct frame object for this SBFrame."); } } else { if (log) log->Printf ("SBFrame::SetPC () => error: process is running"); } } if (log) log->Printf ("SBFrame(%p)::SetPC (new_pc=0x%" PRIx64 ") => %i", frame, new_pc, ret_val); return ret_val; }
bool SBFrame::GetDescription (SBStream &description) { LogSP log(GetLogIfAllCategoriesSet (LIBLLDB_LOG_API)); Stream &strm = description.ref(); Mutex::Locker api_locker; ExecutionContext exe_ctx (m_opaque_sp.get(), api_locker); StackFrame *frame; Target *target = exe_ctx.GetTargetPtr(); Process *process = exe_ctx.GetProcessPtr(); if (target && process) { Process::StopLocker stop_locker; if (stop_locker.TryLock(&process->GetRunLock())) { frame = exe_ctx.GetFramePtr(); if (frame) { frame->DumpUsingSettingsFormat (&strm); } else { if (log) log->Printf ("SBFrame::GetDescription () => error: could not reconstruct frame object for this SBFrame."); } } else { if (log) log->Printf ("SBFrame::GetDescription () => error: process is running"); } } else strm.PutCString ("No value"); return true; }
SBSymbolContext SBFrame::GetSymbolContext (uint32_t resolve_scope) const { LogSP log(GetLogIfAllCategoriesSet (LIBLLDB_LOG_API)); SBSymbolContext sb_sym_ctx; Mutex::Locker api_locker; ExecutionContext exe_ctx (m_opaque_sp.get(), api_locker); StackFrame *frame = NULL; Target *target = exe_ctx.GetTargetPtr(); Process *process = exe_ctx.GetProcessPtr(); if (target && process) { Process::StopLocker stop_locker; if (stop_locker.TryLock(&process->GetRunLock())) { frame = exe_ctx.GetFramePtr(); if (frame) { sb_sym_ctx.SetSymbolContext(&frame->GetSymbolContext (resolve_scope)); } else { if (log) log->Printf ("SBFrame::GetVariables () => error: could not reconstruct frame object for this SBFrame."); } } else { if (log) log->Printf ("SBFrame::GetSymbolContext () => error: process is running"); } } if (log) log->Printf ("SBFrame(%p)::GetSymbolContext (resolve_scope=0x%8.8x) => SBSymbolContext(%p)", frame, resolve_scope, sb_sym_ctx.get()); return sb_sym_ctx; }
/* * This function must only be called after the early prologue, since it depends * on fp->exec.fun. */ void * JS_FASTCALL stubs::FixupArity(VMFrame &f, uint32_t nactual) { AssertCanGC(); JSContext *cx = f.cx; StackFrame *oldfp = f.fp(); JS_ASSERT(nactual != oldfp->numFormalArgs()); /* * Grossssss! *move* the stack frame. If this ends up being perf-critical, * we can figure out how to spot-optimize it. Be careful to touch only the * members that have been initialized by the caller and early prologue. */ InitialFrameFlags initial = oldfp->initialFlags(); RootedFunction fun(cx, oldfp->fun()); RootedScript script(cx, fun->nonLazyScript()); void *ncode = oldfp->nativeReturnAddress(); /* Pop the inline frame. */ f.regs.popPartialFrame((Value *)oldfp); /* Reserve enough space for a callee frame. */ CallArgs args = CallArgsFromSp(nactual, f.regs.sp); if (fun->isCallsiteClone()) { JS_ASSERT(args.callee().toFunction() == fun->getExtendedSlot(0).toObject().toFunction()); args.setCallee(ObjectValue(*fun)); } StackFrame *fp = cx->stack.getFixupFrame(cx, DONT_REPORT_ERROR, args, fun, script, ncode, initial, &f.stackLimit); if (!fp) { f.regs.updateForNcode(f.jit(), ncode); js_ReportOverRecursed(cx); THROWV(NULL); } /* The caller takes care of assigning fp to regs. */ return fp; }
jvmtiError interpreter_ti_getObject( jvmtiEnv* env, VM_thread *thread, jint depth, jint slot, jobject* value_ptr) { StackFrame *frame; // check error condition: JVMTI_ERROR_NULL_POINTER if( value_ptr == NULL ) return JVMTI_ERROR_NULL_POINTER; // check error condition: JVMTI_ERROR_NO_MORE_FRAMES // check error condition: JVMTI_ERROR_OPAQUE_FRAME // check error condition: JVMTI_ERROR_INVALID_SLOT jvmtiError err = interpreter_ti_getLocalCommon(env, thread, depth, slot, &frame); if (err != JVMTI_ERROR_NONE) return err; // TODO: check error condition: JVMTI_ERROR_TYPE_MISMATCH // partial check error condition: JVMTI_ERROR_TYPE_MISMATCH if (frame->locals.ref(slot) == 0) { return JVMTI_ERROR_TYPE_MISMATCH; } assert(hythread_is_suspend_enabled()); hythread_suspend_disable(); ManagedObject *obj = UNCOMPRESS_INTERP(frame->locals(slot).ref); if (NULL == obj) { *value_ptr = NULL; } else { ObjectHandle handle = oh_allocate_local_handle(); handle->object = obj; *value_ptr = (jobject) handle; } hythread_suspend_enable(); return JVMTI_ERROR_NONE; }
bool ContextStack::pushGeneratorFrame(JSContext *cx, JSGenerator *gen, GeneratorFrameGuard *gfg) { StackFrame *genfp = gen->floatingFrame(); Value *genvp = gen->floatingStack; uintN vplen = (Value *)genfp - genvp; uintN nvars = vplen + VALUES_PER_STACK_FRAME + genfp->numSlots(); Value *firstUnused = ensureOnTop(cx, REPORT_ERROR, nvars, CAN_EXTEND, &gfg->pushedSeg_); if (!firstUnused) return false; StackFrame *stackfp = reinterpret_cast<StackFrame *>(firstUnused + vplen); Value *stackvp = (Value *)stackfp - vplen; /* Save this for popGeneratorFrame. */ gfg->gen_ = gen; gfg->stackvp_ = stackvp; /* Copy from the generator's floating frame to the stack. */ stackfp->stealFrameAndSlots(stackvp, genfp, genvp, gen->regs.sp); stackfp->resetGeneratorPrev(cx); stackfp->unsetFloatingGenerator(); gfg->regs_.rebaseFromTo(gen->regs, *stackfp); gfg->prevRegs_ = seg_->pushRegs(gfg->regs_); JS_ASSERT(space().firstUnused() == gfg->regs_.sp); gfg->setPushed(*this); return true; }
void ClonedBlockObject::put(JSContext *cx) { StackFrame *fp = cx->fp(); JS_ASSERT(maybeStackFrame() == js_FloatingFrameIfGenerator(cx, fp)); uint32_t count = slotCount(); uint32_t depth = stackDepth(); /* The block and its locals must be on the current stack for GC safety. */ JS_ASSERT(depth <= uint32_t(cx->regs().sp - fp->base())); JS_ASSERT(count <= uint32_t(cx->regs().sp - fp->base() - depth)); /* See comments in CheckDestructuring in frontend/Parser.cpp. */ JS_ASSERT(count >= 1); copySlotRange(RESERVED_SLOTS, fp->base() + depth, count); /* We must clear the private slot even with errors. */ setPrivate(NULL); fp->setScopeChainNoCallObj(enclosingScope()); }
size_t EmulateInstruction::WriteMemoryFrame (EmulateInstruction *instruction, void *baton, const Context &context, lldb::addr_t addr, const void *src, size_t src_len) { if (!baton || src == NULL || src_len == 0) return 0; StackFrame *frame = (StackFrame *) baton; ProcessSP process_sp (frame->CalculateProcess()); if (process_sp) { Error error; return process_sp->WriteMemory (addr, src, src_len, error); } return 0; }
bool ThreadPlanShouldStopHere::DefaultShouldStopHereCallback (ThreadPlan *current_plan, Flags &flags, FrameComparison operation, void *baton) { bool should_stop_here = true; StackFrame *frame = current_plan->GetThread().GetStackFrameAtIndex(0).get(); if (!frame) return true; Log *log(lldb_private::GetLogIfAllCategoriesSet (LIBLLDB_LOG_STEP)); if ((operation == eFrameCompareOlder && flags.Test(eStepOutAvoidNoDebug)) || (operation == eFrameCompareYounger && flags.Test(eStepInAvoidNoDebug)) || (operation == eFrameCompareSameParent && flags.Test(eStepInAvoidNoDebug))) { if (!frame->HasDebugInformation()) { if (log) log->Printf ("Stepping out of frame with no debug info"); should_stop_here = false; } } // Always avoid code with line number 0. // FIXME: At present the ShouldStop and the StepFromHere calculate this independently. If this ever // becomes expensive (this one isn't) we can try to have this set a state that the StepFromHere can use. if (frame) { SymbolContext sc; sc = frame->GetSymbolContext (eSymbolContextLineEntry); if (sc.line_entry.line == 0) should_stop_here = false; } return should_stop_here; }
const char * SBFrame::Disassemble () const { LogSP log(GetLogIfAllCategoriesSet (LIBLLDB_LOG_API)); const char *disassembly = NULL; Mutex::Locker api_locker; ExecutionContext exe_ctx (m_opaque_sp.get(), api_locker); StackFrame *frame = NULL; Target *target = exe_ctx.GetTargetPtr(); Process *process = exe_ctx.GetProcessPtr(); if (target && process) { Process::StopLocker stop_locker; if (stop_locker.TryLock(&process->GetRunLock())) { frame = exe_ctx.GetFramePtr(); if (frame) { disassembly = frame->Disassemble(); } else { if (log) log->Printf ("SBFrame::Disassemble () => error: could not reconstruct frame object for this SBFrame."); } } else { if (log) log->Printf ("SBFrame::Disassemble () => error: process is running"); } } if (log) log->Printf ("SBFrame(%p)::Disassemble () => %s", frame, disassembly); return disassembly; }
SBFunction SBFrame::GetFunction () const { LogSP log(GetLogIfAllCategoriesSet (LIBLLDB_LOG_API)); SBFunction sb_function; Mutex::Locker api_locker; ExecutionContext exe_ctx (m_opaque_sp.get(), api_locker); StackFrame *frame = NULL; Target *target = exe_ctx.GetTargetPtr(); Process *process = exe_ctx.GetProcessPtr(); if (target && process) { Process::StopLocker stop_locker; if (stop_locker.TryLock(&process->GetRunLock())) { frame = exe_ctx.GetFramePtr(); if (frame) { sb_function.reset(frame->GetSymbolContext (eSymbolContextFunction).function); } else { if (log) log->Printf ("SBFrame::GetFunction () => error: could not reconstruct frame object for this SBFrame."); } } else { if (log) log->Printf ("SBFrame::GetFunction () => error: process is running"); } } if (log) log->Printf ("SBFrame(%p)::GetFunction () => SBFunction(%p)", frame, sb_function.get()); return sb_function; }
JS_EvaluateUCInStackFrame(JSContext *cx, JSStackFrame *fpArg, const jschar *chars, unsigned length, const char *filename, unsigned lineno, jsval *rval) { if (!CheckDebugMode(cx)) return false; Rooted<Env*> env(cx, JS_GetFrameScopeChain(cx, fpArg)); if (!env) return false; StackFrame *fp = Valueify(fpArg); if (!ComputeThis(cx, fp)) return false; RootedValue thisv(cx, fp->thisValue()); js::AutoCompartment ac(cx, env); return EvaluateInEnv(cx, env, thisv, fp, StableCharPtr(chars, length), length, filename, lineno, rval); }
addr_t SBFrame::GetPC () const { LogSP log(GetLogIfAllCategoriesSet (LIBLLDB_LOG_API)); addr_t addr = LLDB_INVALID_ADDRESS; Mutex::Locker api_locker; ExecutionContext exe_ctx (m_opaque_sp.get(), api_locker); StackFrame *frame = NULL; Target *target = exe_ctx.GetTargetPtr(); Process *process = exe_ctx.GetProcessPtr(); if (target && process) { Process::StopLocker stop_locker; if (stop_locker.TryLock(&process->GetRunLock())) { frame = exe_ctx.GetFramePtr(); if (frame) { addr = frame->GetFrameCodeAddress().GetOpcodeLoadAddress (target); } else { if (log) log->Printf ("SBFrame::GetPC () => error: could not reconstruct frame object for this SBFrame."); } } else { if (log) log->Printf ("SBFrame::GetPC () => error: process is running"); } } if (log) log->Printf ("SBFrame(%p)::GetPC () => 0x%" PRIx64, frame, addr); return addr; }
int AbstractInterpreter::size_top_interpreter_activation(methodOop method) { #ifdef PPC StackFrame frame; int call_stub_frame = round_to( StubRoutines::call_stub_base_size() + method->max_locals() * wordSize, StackAlignmentInBytes); int interpreter_frame = round_to( frame.unaligned_size() + slop_factor + method->max_stack() * wordSize + (method->is_synchronized() ? frame::interpreter_frame_monitor_size() * wordSize : 0) + sizeof(BytecodeInterpreter), StackAlignmentInBytes); return (call_stub_frame + interpreter_frame) / wordSize; #else Unimplemented(); #endif // PPC }
/* * This function must only be called after the early prologue, since it depends * on fp->exec.fun. */ void * JS_FASTCALL stubs::FixupArity(VMFrame &f, uint32 nactual) { JSContext *cx = f.cx; StackFrame *oldfp = f.fp(); JS_ASSERT(nactual != oldfp->numFormalArgs()); /* * Grossssss! *move* the stack frame. If this ends up being perf-critical, * we can figure out how to spot-optimize it. Be careful to touch only the * members that have been initialized by initJitFrameCallerHalf and the * early prologue. */ MaybeConstruct construct = oldfp->isConstructing(); JSFunction *fun = oldfp->fun(); JSScript *script = fun->script(); void *ncode = oldfp->nativeReturnAddress(); /* Pop the inline frame. */ f.regs.popPartialFrame((Value *)oldfp); /* Reserve enough space for a callee frame. */ CallArgs args = CallArgsFromSp(nactual, f.regs.sp); StackFrame *fp = cx->stack.getFixupFrame(cx, DONT_REPORT_ERROR, args, fun, script, ncode, construct, &f.stackLimit); if (!fp) { /* * The PC is not coherent with the current frame, so fix it up for * exception handling. */ f.regs.pc = f.jit()->nativeToPC(ncode); js_ReportOverRecursed(cx); THROWV(NULL); } /* The caller takes care of assigning fp to regs. */ return fp; }
void StackSpace::mark(JSTracer *trc) { /* * JIT code can leave values in an incoherent (i.e., unsafe for precise * marking) state, hence MarkStackRangeConservatively. */ /* NB: this depends on the continuity of segments in memory. */ Value *nextSegEnd = firstUnused(); for (StackSegment *seg = seg_; seg; seg = seg->prevInMemory()) { /* * A segment describes a linear region of memory that contains a stack * of native and interpreted calls. For marking purposes, though, we * only need to distinguish between frames and values and mark * accordingly. Since native calls only push values on the stack, we * can effectively lump them together and just iterate over interpreted * calls. Thus, marking can view the stack as the regex: * (segment slots (frame slots)*)* * which gets marked in reverse order. */ Value *slotsEnd = nextSegEnd; jsbytecode *pc = seg->maybepc(); for (StackFrame *fp = seg->maybefp(); (Value *)fp > (Value *)seg; fp = fp->prev()) { /* Mark from fp->slots() to slotsEnd. */ markFrameValues(trc, fp, slotsEnd, pc); fp->mark(trc); slotsEnd = (Value *)fp; InlinedSite *site; pc = fp->prevpc(&site); JS_ASSERT_IF(fp->prev(), !site); } gc::MarkValueRootRange(trc, seg->slotsBegin(), slotsEnd, "vm_stack"); nextSegEnd = (Value *)seg; } }
bool StringSummaryFormat::FormatObject (ValueObject *valobj, std::string& retval) { if (!valobj) { retval.assign("NULL ValueObject"); return false; } StreamString s; ExecutionContext exe_ctx (valobj->GetExecutionContextRef()); SymbolContext sc; StackFrame *frame = exe_ctx.GetFramePtr(); if (frame) sc = frame->GetSymbolContext(lldb::eSymbolContextEverything); if (IsOneLiner()) { ValueObjectPrinter printer(valobj,&s,DumpValueObjectOptions()); printer.PrintChildrenOneLiner(HideNames()); retval.assign(s.GetData()); return true; } else { if (Debugger::FormatPrompt(m_format.c_str(), &sc, &exe_ctx, &sc.line_entry.range.GetBaseAddress(), s, valobj)) { retval.assign(s.GetString()); return true; } else { retval.assign("error: summary string parsing error"); return false; } } }
void StackFrameList::Dump (Stream *s) { if (s == NULL) return; Mutex::Locker locker (m_mutex); const_iterator pos, begin = m_frames.begin(), end = m_frames.end(); for (pos = begin; pos != end; ++pos) { StackFrame *frame = (*pos).get(); s->Printf("%p: ", static_cast<void*>(frame)); if (frame) { frame->GetStackID().Dump (s); frame->DumpUsingSettingsFormat (s); } else s->Printf("frame #%u", (uint32_t)std::distance (begin, pos)); s->EOL(); } s->EOL(); }