bool RNewObject::recover(JSContext *cx, SnapshotIterator &iter) const { RootedObject templateObject(cx, &iter.read().toObject()); RootedValue result(cx); JSObject *resultObject = nullptr; // Use AutoEnterAnalysis to avoid invoking the object metadata callback // while bailing out, which could try to walk the stack. types::AutoEnterAnalysis enter(cx); // See CodeGenerator::visitNewObjectVMCall if (templateObjectIsClassPrototype_) resultObject = NewInitObjectWithClassPrototype(cx, templateObject); else resultObject = NewInitObject(cx, templateObject); if (!resultObject) return false; result.setObject(*resultObject); iter.storeInstructionResult(result); return true; }
bool RMul::recover(JSContext* cx, SnapshotIterator& iter) const { RootedValue lhs(cx, iter.read()); RootedValue rhs(cx, iter.read()); RootedValue result(cx); if (MMul::Mode(mode_) == MMul::Normal) { if (!js::MulValues(cx, &lhs, &rhs, &result)) return false; // MIRType::Float32 is a specialization embedding the fact that the // result is rounded to a Float32. if (isFloatOperation_ && !RoundFloat32(cx, result, &result)) return false; } else { MOZ_ASSERT(MMul::Mode(mode_) == MMul::Integer); if (!js::math_imul_handle(cx, lhs, rhs, &result)) return false; } iter.storeInstructionResult(result); return true; }
bool RNewObject::recover(JSContext* cx, SnapshotIterator& iter) const { RootedObject templateObject(cx, &iter.read().toObject()); RootedValue result(cx); JSObject* resultObject = nullptr; // See CodeGenerator::visitNewObjectVMCall switch (mode_) { case MNewObject::ObjectLiteral: resultObject = NewObjectOperationWithTemplate(cx, templateObject); break; case MNewObject::ObjectCreate: resultObject = ObjectCreateWithTemplate(cx, templateObject.as<PlainObject>()); break; } if (!resultObject) return false; result.setObject(*resultObject); iter.storeInstructionResult(result); return true; }
void StackFrame::initFromBailout(JSContext *cx, SnapshotIterator &iter) { AutoAssertNoGC nogc; uint32 exprStackSlots = iter.slots() - script()->nfixed; #ifdef TRACK_SNAPSHOTS iter.spewBailingFrom(); #endif IonSpew(IonSpew_Bailouts, " expr stack slots %u, is function frame %u", exprStackSlots, isFunctionFrame()); if (iter.bailoutKind() == Bailout_ArgumentCheck) { // Temporary hack -- skip the (unused) scopeChain, because it could be // bogus (we can fail before the scope chain slot is set). Strip the // hasScopeChain flag and we'll check this later to run prologue(). iter.skip(); flags_ &= ~StackFrame::HAS_SCOPECHAIN; } else { Value v = iter.read(); if (v.isObject()) { scopeChain_ = &v.toObject(); flags_ |= StackFrame::HAS_SCOPECHAIN; if (isFunctionFrame() && fun()->isHeavyweight()) flags_ |= StackFrame::HAS_CALL_OBJ; } else { JS_ASSERT(v.isUndefined()); } } // Assume that all new stack frames have had their entry flag set if // profiling has been turned on. This will be corrected if necessary // elsewhere. if (cx->runtime->spsProfiler.enabled()) setPushedSPSFrame(); if (isFunctionFrame()) { Value thisv = iter.read(); formals()[-1] = thisv; // The new |this| must have already been constructed prior to an Ion // constructor running. if (isConstructing()) JS_ASSERT(!thisv.isPrimitive()); JS_ASSERT(iter.slots() >= CountArgSlots(fun())); IonSpew(IonSpew_Bailouts, " frame slots %u, nargs %u, nfixed %u", iter.slots(), fun()->nargs, script()->nfixed); for (uint32 i = 0; i < fun()->nargs; i++) { Value arg = iter.read(); formals()[i] = arg; } } exprStackSlots -= CountArgSlots(maybeFun()); for (uint32 i = 0; i < script()->nfixed; i++) { Value slot = iter.read(); slots()[i] = slot; } IonSpew(IonSpew_Bailouts, " pushing %u expression stack slots", exprStackSlots); FrameRegs ®s = cx->regs(); for (uint32 i = 0; i < exprStackSlots; i++) { Value v; // If coming from an invalidation bailout, and this is the topmost // value, and a value override has been specified, don't read from the // iterator. Otherwise, we risk using a garbage value. if (!iter.moreFrames() && i == exprStackSlots - 1 && cx->runtime->hasIonReturnOverride()) v = iter.skip(); else v = iter.read(); *regs.sp++ = v; } unsigned pcOff = iter.pcOffset(); regs.pc = script()->code + pcOff; if (iter.resumeAfter()) regs.pc = GetNextPc(regs.pc); IonSpew(IonSpew_Bailouts, " new PC is offset %u within script %p (line %d)", pcOff, (void *)script(), PCToLineNumber(script(), regs.pc)); JS_ASSERT(exprStackSlots == js_ReconstructStackDepth(cx, script(), regs.pc)); }
void InlineFrameIterator::dump() const { AutoAssertNoGC nogc; if (more()) fprintf(stderr, " JS frame (inlined)\n"); else fprintf(stderr, " JS frame\n"); bool isFunction = false; if (isFunctionFrame()) { isFunction = true; fprintf(stderr, " callee fun: "); #ifdef DEBUG js_DumpObject(callee()); #else fprintf(stderr, "?\n"); #endif } else { fprintf(stderr, " global frame, no callee\n"); } fprintf(stderr, " file %s line %u\n", script()->filename, (unsigned) script()->lineno); fprintf(stderr, " script = %p, pc = %p\n", (void*) script(), pc()); fprintf(stderr, " current op: %s\n", js_CodeName[*pc()]); if (!more()) { numActualArgs(); } SnapshotIterator si = snapshotIterator(); fprintf(stderr, " slots: %u\n", si.slots() - 1); for (unsigned i = 0; i < si.slots() - 1; i++) { if (isFunction) { if (i == 0) fprintf(stderr, " scope chain: "); else if (i == 1) fprintf(stderr, " this: "); else if (i - 2 < callee()->nargs) fprintf(stderr, " formal (arg %d): ", i - 2); else { if (i - 2 == callee()->nargs && numActualArgs() > callee()->nargs) { DumpOp d(callee()->nargs); forEachCanonicalActualArg(d, d.i_, numActualArgs()); } fprintf(stderr, " slot %d: ", i - 2 - callee()->nargs); } } else fprintf(stderr, " slot %u: ", i); #ifdef DEBUG js_DumpValue(si.maybeRead()); #else fprintf(stderr, "?\n"); #endif } fputc('\n', stderr); }
bool RRandom::recover(JSContext* cx, SnapshotIterator& iter) const { iter.storeInstructionResult(DoubleValue(math_random_impl(cx))); return true; }
void StackFrame::initFromBailout(JSContext *cx, SnapshotIterator &iter) { uint32_t exprStackSlots = iter.slots() - script()->nfixed; #ifdef TRACK_SNAPSHOTS iter.spewBailingFrom(); #endif IonSpew(IonSpew_Bailouts, " expr stack slots %u, is function frame %u", exprStackSlots, isFunctionFrame()); if (iter.bailoutKind() == Bailout_ArgumentCheck) { // Temporary hack -- skip the (unused) scopeChain, because it could be // bogus (we can fail before the scope chain slot is set). Strip the // hasScopeChain flag. If a call object is needed, it will get handled later // by |ThunkToInterpreter| which call |EnsureHasScopeObjects|. iter.skip(); flags_ &= ~StackFrame::HAS_SCOPECHAIN; // If the script binds arguments, then skip the snapshot slot reserved to hold // its value. if (script()->argumentsHasVarBinding()) iter.skip(); flags_ &= ~StackFrame::HAS_ARGS_OBJ; } else { Value scopeChain = iter.read(); JS_ASSERT(scopeChain.isObject() || scopeChain.isUndefined()); if (scopeChain.isObject()) { scopeChain_ = &scopeChain.toObject(); flags_ |= StackFrame::HAS_SCOPECHAIN; if (isFunctionFrame() && fun()->isHeavyweight()) flags_ |= StackFrame::HAS_CALL_OBJ; } // The second slot will be an arguments object if the script needs one. if (script()->argumentsHasVarBinding()) { Value argsObj = iter.read(); JS_ASSERT(argsObj.isObject() || argsObj.isUndefined()); if (argsObj.isObject()) initArgsObj(argsObj.toObject().asArguments()); } } // Assume that all new stack frames have had their entry flag set if // profiling has been turned on. This will be corrected if necessary // elsewhere. if (cx->runtime->spsProfiler.enabled()) setPushedSPSFrame(); if (isFunctionFrame()) { Value thisv = iter.read(); formals()[-1] = thisv; // The new |this| must have already been constructed prior to an Ion // constructor running. if (isConstructing()) JS_ASSERT(!thisv.isPrimitive()); JS_ASSERT(iter.slots() >= CountArgSlots(script(), fun())); IonSpew(IonSpew_Bailouts, " frame slots %u, nargs %u, nfixed %u", iter.slots(), fun()->nargs, script()->nfixed); for (uint32_t i = 0; i < fun()->nargs; i++) { Value arg = iter.read(); formals()[i] = arg; } } exprStackSlots -= CountArgSlots(script(), maybeFun()); for (uint32_t i = 0; i < script()->nfixed; i++) { Value slot = iter.read(); slots()[i] = slot; } IonSpew(IonSpew_Bailouts, " pushing %u expression stack slots", exprStackSlots); FrameRegs ®s = cx->regs(); for (uint32_t i = 0; i < exprStackSlots; i++) { Value v; // If coming from an invalidation bailout, and this is the topmost // value, and a value override has been specified, don't read from the // iterator. Otherwise, we risk using a garbage value. if (!iter.moreFrames() && i == exprStackSlots - 1 && cx->runtime->hasIonReturnOverride()) v = iter.skip(); else v = iter.read(); *regs.sp++ = v; } unsigned pcOff = iter.pcOffset(); regs.pc = script()->code + pcOff; if (iter.resumeAfter()) regs.pc = GetNextPc(regs.pc); IonSpew(IonSpew_Bailouts, " new PC is offset %u within script %p (line %d)", pcOff, (void *)script(), PCToLineNumber(script(), regs.pc)); // For fun.apply({}, arguments) the reconstructStackDepth will be atleast 4, // but it could be that we inlined the funapply. In that case exprStackSlots, // will have the real arguments in the slots and not always be equal. JS_ASSERT_IF(JSOp(*regs.pc) != JSOP_FUNAPPLY, exprStackSlots == js_ReconstructStackDepth(cx, script(), regs.pc)); }