static void FinishVarIncOp(VMFrame &f, RejoinState rejoin, Value ov, Value nv, Value *vp) { /* Finish an increment operation on a LOCAL or ARG. These do not involve property accesses. */ JS_ASSERT(rejoin == REJOIN_POS || rejoin == REJOIN_BINARY); JSContext *cx = f.cx; JSOp op = JSOp(*f.pc()); JS_ASSERT(op == JSOP_LOCALINC || op == JSOP_INCLOCAL || op == JSOP_LOCALDEC || op == JSOP_DECLOCAL || op == JSOP_ARGINC || op == JSOP_INCARG || op == JSOP_ARGDEC || op == JSOP_DECARG); const JSCodeSpec *cs = &js_CodeSpec[op]; unsigned i = GET_SLOTNO(f.pc()); Value *var = (JOF_TYPE(cs->format) == JOF_LOCAL) ? f.fp()->slots() + i : &f.fp()->formalArg(i); if (rejoin == REJOIN_POS) { double d = ov.toNumber(); double N = (cs->format & JOF_INC) ? 1 : -1; if (!nv.setNumber(d + N)) types::TypeScript::MonitorOverflow(cx, f.script(), f.pc()); } *var = nv; *vp = (cs->format & JOF_POST) ? ov : nv; }
void JS_FASTCALL stubs::CreateFunCallObject(VMFrame &f) { JS_ASSERT(f.fp()->fun()->isHeavyweight()); if (!js::CreateFunCallObject(f.cx, f.fp())) THROW(); }
void JS_FASTCALL stubs::ScriptDebugEpilogue(VMFrame &f) { Probes::exitJSFun(f.cx, f.fp()->maybeFun(), f.fp()->script()); if (!js::ScriptDebugEpilogue(f.cx, f.fp(), JS_TRUE)) THROW(); }
/* * Clean up a frame and return. */ static void InlineReturn(VMFrame &f) { JS_ASSERT(f.fp() != f.entryfp); JS_ASSERT(!js_IsActiveWithOrBlock(f.cx, &f.fp()->scopeChain(), 0)); f.cx->stack.popInlineFrame(f.regs); }
static void FinishVarIncOp(VMFrame &f, RejoinState rejoin, Value ov, Value nv, Value *vp) { /* Finish an increment operation on a LOCAL or ARG. These do not involve property accesses. */ JS_ASSERT(rejoin == REJOIN_POS || rejoin == REJOIN_BINARY); JSContext *cx = f.cx; JSOp op = JSOp(*f.pc()); JS_ASSERT(op == JSOP_LOCALINC || op == JSOP_INCLOCAL || op == JSOP_LOCALDEC || op == JSOP_DECLOCAL || op == JSOP_ARGINC || op == JSOP_INCARG || op == JSOP_ARGDEC || op == JSOP_DECARG); const JSCodeSpec *cs = &js_CodeSpec[op]; if (rejoin == REJOIN_POS) { double d = ov.toNumber(); double N = (cs->format & JOF_INC) ? 1 : -1; if (!nv.setNumber(d + N)) { RootedScript fscript(cx, f.script()); types::TypeScript::MonitorOverflow(cx, fscript, f.pc()); } } unsigned i = GET_SLOTNO(f.pc()); if (JOF_TYPE(cs->format) == JOF_LOCAL) f.fp()->unaliasedLocal(i) = nv; else if (f.fp()->script()->argsObjAliasesFormals()) f.fp()->argsObj().setArg(i, nv); else f.fp()->unaliasedFormal(i) = nv; *vp = (cs->format & JOF_POST) ? ov : nv; }
void JS_FASTCALL stubs::PutStrictEvalCallObject(VMFrame &f) { JS_ASSERT(f.fp()->isEvalFrame()); JS_ASSERT(f.fp()->script()->strictModeCode); JS_ASSERT(f.fp()->hasCallObj()); js_PutCallObject(f.cx, f.fp()); }
/* * Clean up a frame and return. */ static void InlineReturn(VMFrame &f) { JS_ASSERT(f.fp() != f.entryfp); JS_ASSERT(!js_IsActiveWithOrBlock(f.cx, &f.fp()->scopeChain(), 0)); f.cx->stack.popInlineFrame(f.regs); JS_ASSERT(*f.regs.pc == JSOP_CALL || *f.regs.pc == JSOP_NEW || *f.regs.pc == JSOP_EVAL || *f.regs.pc == JSOP_FUNCALL || *f.regs.pc == JSOP_FUNAPPLY); f.regs.pc += JSOP_CALL_LENGTH; }
/* * HitStackQuota is called after the early prologue pushing the new frame would * overflow f.stackLimit. */ void JS_FASTCALL stubs::HitStackQuota(VMFrame &f) { /* Include space to push another frame. */ uintN nvals = f.fp()->script()->nslots + VALUES_PER_STACK_FRAME; JS_ASSERT(f.regs.sp == f.fp()->base()); if (f.cx->stack().bumpCommitAndLimit(f.entryfp, f.regs.sp, nvals, &f.stackLimit)) return; /* Remove the current partially-constructed frame before throwing. */ RemovePartialFrame(f.cx, f.fp()); js_ReportOverRecursed(f.cx); THROW(); }
void JS_FASTCALL ic::GetGlobalName(VMFrame &f, ic::GetGlobalNameIC *ic) { JSObject *obj = f.fp()->scopeChain().getGlobal(); JSAtom *atom = f.script()->getAtom(GET_INDEX(f.pc())); jsid id = ATOM_TO_JSID(atom); const Shape *shape = obj->nativeLookup(f.cx, id); if (!shape || !shape->hasDefaultGetterOrIsMethod() || !shape->hasSlot()) { if (shape) PatchGetFallback(f, ic); stubs::GetGlobalName(f); return; } uint32 slot = shape->slot; /* Patch shape guard. */ Repatcher repatcher(f.jit()); repatcher.repatch(ic->fastPathStart.dataLabel32AtOffset(ic->shapeOffset), obj->shape()); /* Patch loads. */ uint32 index = obj->dynamicSlotIndex(slot); JSC::CodeLocationLabel label = ic->fastPathStart.labelAtOffset(ic->loadStoreOffset); repatcher.patchAddressOffsetForValueLoad(label, index * sizeof(Value)); /* Do load anyway... this time. */ stubs::GetGlobalName(f); }
/* * Clean up a frame and return. */ static void InlineReturn(VMFrame &f) { JS_ASSERT(f.fp() != f.entryfp); JS_ASSERT(!IsActiveWithOrBlock(f.cx, f.fp()->scopeChain(), 0)); JS_ASSERT(!f.fp()->hasBlockChain()); f.cx->stack.popInlineFrame(f.regs); DebugOnly<JSOp> op = JSOp(*f.regs.pc); JS_ASSERT(op == JSOP_CALL || op == JSOP_NEW || op == JSOP_EVAL || op == JSOP_FUNCALL || op == JSOP_FUNAPPLY); f.regs.pc += JSOP_CALL_LENGTH; }
/* * This function must only be called after the early prologue, since it depends * on fp->exec.fun. */ void * JS_FASTCALL stubs::FixupArity(VMFrame &f, uint32_t nactual) { JSContext *cx = f.cx; StackFrame *oldfp = f.fp(); JS_ASSERT(nactual != oldfp->numFormalArgs()); /* * Grossssss! *move* the stack frame. If this ends up being perf-critical, * we can figure out how to spot-optimize it. Be careful to touch only the * members that have been initialized by the caller and early prologue. */ InitialFrameFlags initial = oldfp->initialFlags(); JSFunction *fun = oldfp->fun(); JSScript *script = fun->script(); void *ncode = oldfp->nativeReturnAddress(); /* Pop the inline frame. */ f.regs.popPartialFrame((Value *)oldfp); /* Reserve enough space for a callee frame. */ CallArgs args = CallArgsFromSp(nactual, f.regs.sp); StackFrame *fp = cx->stack.getFixupFrame(cx, DONT_REPORT_ERROR, args, fun, script, ncode, initial, &f.stackLimit); if (!fp) { f.regs.updateForNcode(f.jit(), ncode); js_ReportOverRecursed(cx); THROWV(NULL); } /* The caller takes care of assigning fp to regs. */ return fp; }
/* * Clean up a frame and return. */ static void InlineReturn(VMFrame &f) { JS_ASSERT(f.fp() != f.entryfp); AssertValidFunctionScopeChainAtExit(f.fp()); f.cx->stack.popInlineFrame(f.regs); DebugOnly<JSOp> op = JSOp(*f.regs.pc); JS_ASSERT(op == JSOP_CALL || op == JSOP_NEW || op == JSOP_EVAL || op == JSOP_FUNCALL || op == JSOP_FUNAPPLY); f.regs.pc += JSOP_CALL_LENGTH; }
void JS_FASTCALL stubs::BindName(VMFrame &f, PropertyName *name) { JSObject *obj = FindIdentifierBase(f.cx, &f.fp()->scopeChain(), name); if (!obj) THROW(); f.regs.sp[0].setObject(*obj); }
void JS_FASTCALL stubs::Eval(VMFrame &f, uint32 argc) { CallArgs args = CallArgsFromSp(argc, f.regs.sp); if (!IsBuiltinEvalForScope(&f.fp()->scopeChain(), args.calleev())) { if (!Invoke(f.cx, args)) THROW(); return; } JS_ASSERT(f.fp() == f.cx->fp()); if (!DirectEval(f.cx, args)) THROW(); f.regs.sp = args.spAfterCall(); }
void JS_FASTCALL stubs::ScriptDebugPrologue(VMFrame &f) { Probes::enterJSFun(f.cx, f.fp()->maybeFun(), f.fp()->script()); JSTrapStatus status = js::ScriptDebugPrologue(f.cx, f.fp()); switch (status) { case JSTRAP_CONTINUE: break; case JSTRAP_RETURN: *f.returnAddressLocation() = f.cx->jaegerRuntime().forceReturnFromFastCall(); return; case JSTRAP_ERROR: case JSTRAP_THROW: THROW(); default: JS_NOT_REACHED("bad ScriptDebugPrologue status"); } }
void JS_FASTCALL stubs::BindName(VMFrame &f, PropertyName *name_) { RootedPropertyName name(f.cx, name_); RootedObject scope(f.cx); if (!LookupNameWithGlobalDefault(f.cx, name, f.fp()->scopeChain(), &scope)) THROW(); f.regs.sp[0].setObject(*scope); }
/* * This function must only be called after the early prologue, since it depends * on fp->exec.fun. */ void * JS_FASTCALL stubs::FixupArity(VMFrame &f, uint32 nactual) { JSContext *cx = f.cx; JSStackFrame *oldfp = f.fp(); JS_ASSERT(nactual != oldfp->numFormalArgs()); /* * Grossssss! *move* the stack frame. If this ends up being perf-critical, * we can figure out how to spot-optimize it. Be careful to touch only the * members that have been initialized by initCallFrameCallerHalf and the * early prologue. */ uint32 flags = oldfp->isConstructingFlag(); JSFunction *fun = oldfp->fun(); void *ncode = oldfp->nativeReturnAddress(); /* Pop the inline frame. */ f.fp() = oldfp->prev(); f.regs.sp = (Value*) oldfp; /* Reserve enough space for a callee frame. */ JSStackFrame *newfp = cx->stack().getInlineFrameWithinLimit(cx, (Value*) oldfp, nactual, fun, fun->script(), &flags, f.entryfp, &f.stackLimit); if (!newfp) { /* * The PC is not coherent with the current frame, so fix it up for * exception handling. */ f.regs.pc = f.jit()->nativeToPC(ncode); THROWV(NULL); } /* Reset the part of the stack frame set by the caller. */ newfp->initCallFrameCallerHalf(cx, flags, ncode); /* Reset the part of the stack frame set by the prologue up to now. */ newfp->initCallFrameEarlyPrologue(fun, nactual); /* The caller takes care of assigning fp to regs. */ return newfp; }
void JS_FASTCALL stubs::Eval(VMFrame &f, uint32_t argc) { CallArgs args = CallArgsFromSp(argc, f.regs.sp); if (!IsBuiltinEvalForScope(f.fp()->scopeChain(), args.calleev())) { if (!InvokeKernel(f.cx, args)) THROW(); types::TypeScript::Monitor(f.cx, f.script(), f.pc(), args.rval()); return; } JS_ASSERT(f.fp() == f.cx->fp()); if (!DirectEval(f.cx, args)) THROW(); types::TypeScript::Monitor(f.cx, f.script(), f.pc(), args.rval()); }
static inline bool UncachedInlineCall(VMFrame &f, MaybeConstruct construct, void **pret, bool *unjittable, uint32 argc) { JSContext *cx = f.cx; CallArgs args = CallArgsFromSp(argc, f.regs.sp); JSObject &callee = args.callee(); JSFunction *newfun = callee.getFunctionPrivate(); JSScript *newscript = newfun->script(); /* Get pointer to new frame/slots, prepare arguments. */ if (!cx->stack.pushInlineFrame(cx, f.regs, args, callee, newfun, newscript, construct, &f.stackLimit)) return false; /* Scope with a call object parented by callee's parent. */ if (newfun->isHeavyweight() && !js::CreateFunCallObject(cx, f.fp())) return false; /* Try to compile if not already compiled. */ if (newscript->getJITStatus(f.fp()->isConstructing()) == JITScript_None) { CompileStatus status = CanMethodJIT(cx, newscript, f.fp(), CompileRequest_Interpreter); if (status == Compile_Error) { /* A runtime exception was thrown, get out. */ InlineReturn(f); return false; } if (status == Compile_Abort) *unjittable = true; } /* If newscript was successfully compiled, run it. */ if (JITScript *jit = newscript->getJIT(f.fp()->isConstructing())) { *pret = jit->invokeEntry; return true; } /* Otherwise, run newscript in the interpreter. */ bool ok = !!Interpret(cx, cx->fp()); InlineReturn(f); *pret = NULL; return ok; }
void JS_FASTCALL stubs::CreateThis(VMFrame &f, JSObject *proto) { JSContext *cx = f.cx; StackFrame *fp = f.fp(); RootedObject callee(cx, &fp->callee()); JSObject *obj = js_CreateThisForFunctionWithProto(cx, callee, proto); if (!obj) THROW(); fp->thisValue() = ObjectValue(*obj); }
void JS_FASTCALL stubs::CreateThis(VMFrame &f, JSObject *proto) { JSContext *cx = f.cx; StackFrame *fp = f.fp(); RootedVarObject callee(cx, &fp->callee()); JSObject *obj = js_CreateThisForFunctionWithProto(cx, callee, proto); if (!obj) THROW(); fp->formalArgs()[-1].setObject(*obj); }
void * JS_FASTCALL stubs::CompileFunction(VMFrame &f, uint32 nactual) { /* * We have a partially constructed frame. That's not really good enough to * compile though because we could throw, so get a full, adjusted frame. */ JSContext *cx = f.cx; JSStackFrame *fp = f.fp(); /* * Since we can only use members set by initCallFrameCallerHalf, * we must carefully extract the callee from the nactual. */ JSObject &callee = fp->formalArgsEnd()[-(int(nactual) + 2)].toObject(); JSFunction *fun = callee.getFunctionPrivate(); JSScript *script = fun->script(); /* * FixupArity/RemovePartialFrame expect to be called after the early * prologue. */ fp->initCallFrameEarlyPrologue(fun, nactual); if (nactual != fp->numFormalArgs()) { fp = (JSStackFrame *)FixupArity(f, nactual); if (!fp) return NULL; } /* Finish frame initialization. */ fp->initCallFrameLatePrologue(); /* These would have been initialized by the prologue. */ f.regs.fp = fp; f.regs.sp = fp->base(); f.regs.pc = script->code; if (fun->isHeavyweight() && !js::CreateFunCallObject(cx, fp)) THROWV(NULL); CompileStatus status = CanMethodJIT(cx, script, fp, CompileRequest_JIT); if (status == Compile_Okay) return script->getJIT(fp->isConstructing())->invokeEntry; /* Function did not compile... interpret it. */ JSBool ok = Interpret(cx, fp); InlineReturn(f); if (!ok) THROWV(NULL); return NULL; }
void * JS_FASTCALL stubs::CompileFunction(VMFrame &f, uint32_t argc) { /* * Note: the stubRejoin kind for the frame was written before the call, and * needs to be cleared out on all return paths (doing this directly in the * IC stub will not handle cases where we recompiled or threw). */ JS_ASSERT_IF(f.cx->typeInferenceEnabled(), f.stubRejoin); ResetStubRejoin reset(f); InitialFrameFlags initial = f.fp()->initialFlags(); f.regs.popPartialFrame((Value *)f.fp()); if (InitialFrameFlagsAreConstructing(initial)) return UncachedNew(f, argc); else if (InitialFrameFlagsAreLowered(initial)) return UncachedLoweredCall(f, argc); else return UncachedCall(f, argc); }
void JS_FASTCALL stubs::CrossChunkShim(VMFrame &f, void *edge_) { DebugOnly<CrossChunkEdge*> edge = (CrossChunkEdge *) edge_; mjit::ExpandInlineFrames(f.cx->compartment); JSScript *script = f.script(); JS_ASSERT(edge->target < script->length); JS_ASSERT(script->code + edge->target == f.pc()); CompileStatus status = CanMethodJIT(f.cx, script, f.pc(), f.fp()->isConstructing(), CompileRequest_Interpreter); if (status == Compile_Error) THROW(); void **addr = f.returnAddressLocation(); *addr = JS_FUNC_TO_DATA_PTR(void *, JaegerInterpoline); f.fp()->setRejoin(StubRejoin(REJOIN_RESUME)); }
/* * Clean up a frame and return. */ static void InlineReturn(VMFrame &f) { JSContext *cx = f.cx; JSStackFrame *fp = f.regs.fp; JS_ASSERT(f.fp() != f.entryfp); JS_ASSERT(!js_IsActiveWithOrBlock(cx, &fp->scopeChain(), 0)); Value *newsp = fp->actualArgs() - 1; newsp[-1] = fp->returnValue(); cx->stack().popInlineFrame(cx, fp->prev(), newsp); }
static inline bool CheckStackQuota(VMFrame &f) { JS_ASSERT(f.regs.sp == f.fp()->base()); f.stackLimit = f.cx->stack.space().getStackLimit(f.cx, DONT_REPORT_ERROR); if (f.stackLimit) return true; /* Remove the current partially-constructed frame before throwing. */ f.cx->stack.popFrameAfterOverflow(); js_ReportOverRecursed(f.cx); return false; }
void JS_FASTCALL ic::SetGlobalName(VMFrame &f, ic::SetGlobalNameIC *ic) { JSObject *obj = f.fp()->scopeChain().getGlobal(); JSScript *script = f.script(); JSAtom *atom = script->getAtom(GET_INDEX(f.pc())); const Shape *shape = obj->nativeLookup(f.cx, ATOM_TO_JSID(atom)); LookupStatus status = UpdateSetGlobalName(f, ic, obj, shape); if (status == Lookup_Error) THROW(); if (ic->usePropertyCache) STRICT_VARIANT(stubs::SetGlobalName)(f, atom); else STRICT_VARIANT(stubs::SetGlobalNameNoCache)(f, atom); }
void JS_FASTCALL ic::SetGlobalName(VMFrame &f, ic::SetGlobalNameIC *ic) { RootedObject obj(f.cx, &f.fp()->global()); RootedPropertyName name(f.cx, f.script()->getName(GET_UINT32_INDEX(f.pc()))); RecompilationMonitor monitor(f.cx); Shape *shape = obj->nativeLookup(f.cx, NameToId(name)); if (!monitor.recompiled()) { LookupStatus status = UpdateSetGlobalName(f, ic, obj, shape); if (status == Lookup_Error) THROW(); } STRICT_VARIANT(f.script(), stubs::SetGlobalName)(f, name); }
void JS_FASTCALL ic::GetGlobalName(VMFrame &f, ic::GetGlobalNameIC *ic) { AssertCanGC(); RootedObject obj(f.cx, &f.fp()->global()); PropertyName *name = f.script()->getName(GET_UINT32_INDEX(f.pc())); RecompilationMonitor monitor(f.cx); uint32_t slot; { RootedShape shape(f.cx, obj->nativeLookup(f.cx, NameToId(name))); if (monitor.recompiled()) { stubs::Name(f); return; } if (!shape || !shape->hasDefaultGetter() || !shape->hasSlot()) { if (shape) PatchGetFallback(f, ic); stubs::Name(f); return; } slot = shape->slot(); /* Patch shape guard. */ Repatcher repatcher(f.chunk()); repatcher.repatch(ic->fastPathStart.dataLabelPtrAtOffset(ic->shapeOffset), obj->lastProperty()); /* Patch loads. */ uint32_t index = obj->dynamicSlotIndex(slot); JSC::CodeLocationLabel label = ic->fastPathStart.labelAtOffset(ic->loadStoreOffset); repatcher.patchAddressOffsetForValueLoad(label, index * sizeof(Value)); } /* Do load anyway... this time. */ stubs::Name(f); }
void JS_FASTCALL ic::SetGlobalName(VMFrame &f, ic::SetGlobalNameIC *ic) { JSObject &obj = f.fp()->global(); JSScript *script = f.script(); PropertyName *name = script->getName(GET_UINT32_INDEX(f.pc())); RecompilationMonitor monitor(f.cx); const Shape *shape = obj.nativeLookup(f.cx, NameToId(name)); if (!monitor.recompiled()) { LookupStatus status = UpdateSetGlobalName(f, ic, &obj, shape); if (status == Lookup_Error) THROW(); } STRICT_VARIANT(stubs::SetGlobalName)(f, name); }