static int hop_gc(lua_State *L) { snHopLoop *hloop = checkLoop(L); closeLoop(hloop); /* free only those members; hloop itself is freed by Lua */ free(hloop->api); free(hloop->state); return 0; }
void ConstraintSet::modelUpdate(Frame& _external_pose,const Timestamp& timestamp) { m_chi+=m_chidot*timestamp.realTimestep; m_externalPose = _external_pose; //update the internal pose and Jf updateJacobian(); //check if loop is already closed, if not update the pose and Jf unsigned int iter=0; while(iter<5&&!closeLoop()) iter++; }
bool ConstraintSet::initialise(Frame& init_pose){ m_externalPose=init_pose; // get current Jf updateJacobian(); unsigned int iter=0; while(iter<m_maxIter&&!closeLoop()){ iter++; } if (iter<m_maxIter) return true; else return false; }
JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::downRecursion() { JSStackFrame* fp = cx->fp; if ((jsbytecode*)fragment->ip < fp->script->code || (jsbytecode*)fragment->ip >= fp->script->code + fp->script->length) { RETURN_STOP_A("inner recursive call must compile first"); } /* Adjust the stack by the budget the down-frame needs. */ int slots = NativeStackSlots(cx, 1) - NativeStackSlots(cx, 0); JS_ASSERT(unsigned(slots) == NativeStackSlots(cx, 1) - fp->argc - 2 - fp->script->nfixed - 2); /* Guard that there is enough stack space. */ JS_ASSERT(tree->maxNativeStackSlots >= tree->nativeStackBase / sizeof(double)); int guardSlots = slots + tree->maxNativeStackSlots - tree->nativeStackBase / sizeof(double); LIns* sp_top = lir->ins2(LIR_piadd, lirbuf->sp, lir->insImmWord(guardSlots * sizeof(double))); guard(true, lir->ins2(LIR_plt, sp_top, eos_ins), OOM_EXIT); /* Guard that there is enough call stack space. */ LIns* rp_top = lir->ins2(LIR_piadd, lirbuf->rp, lir->insImmWord(sizeof(FrameInfo*))); guard(true, lir->ins2(LIR_plt, rp_top, eor_ins), OOM_EXIT); /* * For every slot in the new frame that is not in the tracker, create a load * in the tracker. This is necessary because otherwise snapshot() will see * missing imports and use the down frame, rather than the new frame. * This won't affect performance because the loads will be killed if not * used. */ ImportFrameSlotsVisitor visitor(*this); VisitStackSlots(visitor, cx, callDepth); /* Add space for a new JIT frame. */ lirbuf->sp = lir->ins2(LIR_piadd, lirbuf->sp, lir->insImmWord(slots * sizeof(double))); lir->insStorei(lirbuf->sp, lirbuf->state, offsetof(InterpState, sp)); lirbuf->rp = lir->ins2(LIR_piadd, lirbuf->rp, lir->insImmWord(sizeof(FrameInfo*))); lir->insStorei(lirbuf->rp, lirbuf->state, offsetof(InterpState, rp)); --callDepth; clearCurrentFrameSlotsFromTracker(nativeFrameTracker); /* * If the callee and caller have identical call sites, this is a down- * recursive loop. Otherwise something special happened. For example, a * recursive call that is unwinding could nest back down recursively again. * In this case, we build a fragment that ideally we'll never invoke * directly, but link from a down-recursive branch. The UNLINKED_EXIT tells * closeLoop() that the peer trees should match the recursive pc, not the * tree pc. */ VMSideExit* exit; if ((jsbytecode*)fragment->root->ip == fp->script->code) exit = snapshot(UNSTABLE_LOOP_EXIT); else exit = snapshot(RECURSIVE_UNLINKED_EXIT); exit->recursive_pc = fp->script->code; debug_only_print0(LC_TMTracer, "Compiling down-recursive function call.\n"); JS_ASSERT(tree->recursion != Recursion_Disallowed); tree->recursion = Recursion_Detected; return closeLoop(exit); }
JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::slurpDownFrames(jsbytecode* return_pc) { /* Missing - no go */ if (cx->fp->argc != cx->fp->fun->nargs) RETURN_STOP_A("argc != nargs"); LIns* argv_ins; unsigned frameDepth; unsigned downPostSlots; JSStackFrame* fp = cx->fp; LIns* fp_ins = addName(lir->insLoad(LIR_ldp, cx_ins, offsetof(JSContext, fp)), "fp"); /* * When first emitting slurp code, do so against the down frame. After * popping the interpreter frame, it is illegal to resume here, as the * down frame has been moved up. So all this code should be skipped if * anchoring off such an exit. */ if (!anchor || anchor->exitType != RECURSIVE_SLURP_FAIL_EXIT) { fp_ins = addName(lir->insLoad(LIR_ldp, fp_ins, offsetof(JSStackFrame, down)), "downFp"); fp = fp->down; argv_ins = addName(lir->insLoad(LIR_ldp, fp_ins, offsetof(JSStackFrame, argv)), "argv"); /* If recovering from a SLURP_MISMATCH, all of this is unnecessary. */ if (!anchor || anchor->exitType != RECURSIVE_SLURP_MISMATCH_EXIT) { /* fp->down should not be NULL. */ guard(false, lir->ins_peq0(fp_ins), RECURSIVE_LOOP_EXIT); /* fp->down->argv should not be NULL. */ guard(false, lir->ins_peq0(argv_ins), RECURSIVE_LOOP_EXIT); /* * Guard on the script being the same. This might seem unnecessary, * but it lets the recursive loop end cleanly if it doesn't match. * With only the pc check, it is harder to differentiate between * end-of-recursion and recursion-returns-to-different-pc. */ guard(true, lir->ins2(LIR_peq, addName(lir->insLoad(LIR_ldp, fp_ins, offsetof(JSStackFrame, script)), "script"), INS_CONSTPTR(cx->fp->down->script)), RECURSIVE_LOOP_EXIT); } /* fp->down->regs->pc should be == pc. */ guard(true, lir->ins2(LIR_peq, lir->insLoad(LIR_ldp, addName(lir->insLoad(LIR_ldp, fp_ins, offsetof(JSStackFrame, regs)), "regs"), offsetof(JSFrameRegs, pc)), INS_CONSTPTR(return_pc)), RECURSIVE_SLURP_MISMATCH_EXIT); /* fp->down->argc should be == argc. */ guard(true, lir->ins2(LIR_eq, addName(lir->insLoad(LIR_ld, fp_ins, offsetof(JSStackFrame, argc)), "argc"), INS_CONST(cx->fp->argc)), MISMATCH_EXIT); /* Pop the interpreter frame. */ LIns* args[] = { lirbuf->state, cx_ins }; guard(false, lir->ins_eq0(lir->insCall(&js_PopInterpFrame_ci, args)), MISMATCH_EXIT); /* Compute slots for the down frame. */ downPostSlots = NativeStackSlots(cx, 1) - NativeStackSlots(cx, 0); frameDepth = 1; } else { /* Note: loading argv from fp, not fp->down. */ argv_ins = addName(lir->insLoad(LIR_ldp, fp_ins, offsetof(JSStackFrame, argv)), "argv"); /* Slots for this frame, minus the return value. */ downPostSlots = NativeStackSlots(cx, 0) - 1; frameDepth = 0; } /* * This is a special exit used as a template for the stack-slurping code. * LeaveTree will ignore all but the final slot, which contains the return * value. The slurpSlot variable keeps track of the last slot that has been * unboxed, as to avoid re-unboxing when taking a SLURP_FAIL exit. */ unsigned numGlobalSlots = tree->globalSlots->length(); unsigned safeSlots = NativeStackSlots(cx, frameDepth) + 1 + numGlobalSlots; jsbytecode* recursive_pc = return_pc + JSOP_CALL_LENGTH; VMSideExit* exit = (VMSideExit*) traceMonitor->traceAlloc->alloc(sizeof(VMSideExit) + sizeof(TraceType) * safeSlots); memset(exit, 0, sizeof(VMSideExit)); exit->pc = (jsbytecode*)recursive_pc; exit->from = fragment; exit->exitType = RECURSIVE_SLURP_FAIL_EXIT; exit->numStackSlots = downPostSlots + 1; exit->numGlobalSlots = numGlobalSlots; exit->sp_adj = ((downPostSlots + 1) * sizeof(double)) - tree->nativeStackBase; exit->recursive_pc = recursive_pc; /* * Build the exit typemap. This may capture extra types, but they are * thrown away. */ TraceType* typeMap = exit->stackTypeMap(); jsbytecode* oldpc = cx->fp->regs->pc; cx->fp->regs->pc = exit->pc; CaptureStackTypes(cx, frameDepth, typeMap); cx->fp->regs->pc = oldpc; if (!anchor || anchor->exitType != RECURSIVE_SLURP_FAIL_EXIT) typeMap[downPostSlots] = determineSlotType(&stackval(-1)); else typeMap[downPostSlots] = anchor->stackTypeMap()[anchor->numStackSlots - 1]; determineGlobalTypes(&typeMap[exit->numStackSlots]); #if defined JS_JIT_SPEW TreevisLogExit(cx, exit); #endif /* * Return values are tricky because there are two cases. Anchoring off a * slurp failure (the second case) means the return value has already been * moved. However it can still be promoted to link trees together, so we * load it from the new location. * * In all other cases, the return value lives in the tracker and it can be * grabbed safely. */ LIns* rval_ins; TraceType returnType = exit->stackTypeMap()[downPostSlots]; if (!anchor || anchor->exitType != RECURSIVE_SLURP_FAIL_EXIT) { rval_ins = get(&stackval(-1)); if (returnType == TT_INT32) { JS_ASSERT(determineSlotType(&stackval(-1)) == TT_INT32); JS_ASSERT(isPromoteInt(rval_ins)); rval_ins = demote(lir, rval_ins); } /* * The return value must be written out early, before slurping can fail, * otherwise it will not be available when there's a type mismatch. */ lir->insStorei(rval_ins, lirbuf->sp, exit->sp_adj - sizeof(double)); } else { switch (returnType) { case TT_PSEUDOBOOLEAN: case TT_INT32: rval_ins = lir->insLoad(LIR_ld, lirbuf->sp, exit->sp_adj - sizeof(double)); break; case TT_DOUBLE: rval_ins = lir->insLoad(LIR_ldf, lirbuf->sp, exit->sp_adj - sizeof(double)); break; case TT_FUNCTION: case TT_OBJECT: case TT_STRING: case TT_NULL: rval_ins = lir->insLoad(LIR_ldp, lirbuf->sp, exit->sp_adj - sizeof(double)); break; default: JS_NOT_REACHED("unknown type"); RETURN_STOP_A("unknown type"); } } /* Slurp */ SlurpInfo info; info.curSlot = 0; info.exit = exit; info.typeMap = typeMap; info.slurpFailSlot = (anchor && anchor->exitType == RECURSIVE_SLURP_FAIL_EXIT) ? anchor->slurpFailSlot : 0; /* callee */ slurpSlot(lir->insLoad(LIR_ldp, argv_ins, -2 * ptrdiff_t(sizeof(jsval))), &fp->argv[-2], &info); /* this */ slurpSlot(lir->insLoad(LIR_ldp, argv_ins, -1 * ptrdiff_t(sizeof(jsval))), &fp->argv[-1], &info); /* args[0..n] */ for (unsigned i = 0; i < JS_MAX(fp->argc, fp->fun->nargs); i++) slurpSlot(lir->insLoad(LIR_ldp, argv_ins, i * sizeof(jsval)), &fp->argv[i], &info); /* argsobj */ slurpSlot(addName(lir->insLoad(LIR_ldp, fp_ins, offsetof(JSStackFrame, argsobj)), "argsobj"), &fp->argsobj, &info); /* scopeChain */ slurpSlot(addName(lir->insLoad(LIR_ldp, fp_ins, offsetof(JSStackFrame, scopeChain)), "scopeChain"), (jsval*) &fp->scopeChain, &info); /* vars */ LIns* slots_ins = addName(lir->insLoad(LIR_ldp, fp_ins, offsetof(JSStackFrame, slots)), "slots"); for (unsigned i = 0; i < fp->script->nfixed; i++) slurpSlot(lir->insLoad(LIR_ldp, slots_ins, i * sizeof(jsval)), &fp->slots[i], &info); /* stack vals */ unsigned nfixed = fp->script->nfixed; jsval* stack = StackBase(fp); LIns* stack_ins = addName(lir->ins2(LIR_piadd, slots_ins, INS_CONSTWORD(nfixed * sizeof(jsval))), "stackBase"); size_t limit = size_t(fp->regs->sp - StackBase(fp)); if (anchor && anchor->exitType == RECURSIVE_SLURP_FAIL_EXIT) limit--; else limit -= fp->fun->nargs + 2; for (size_t i = 0; i < limit; i++) slurpSlot(lir->insLoad(LIR_ldp, stack_ins, i * sizeof(jsval)), &stack[i], &info); JS_ASSERT(info.curSlot == downPostSlots); /* Jump back to the start */ exit = copy(exit); exit->exitType = UNSTABLE_LOOP_EXIT; #if defined JS_JIT_SPEW TreevisLogExit(cx, exit); #endif RecursiveSlotMap slotMap(*this, downPostSlots, rval_ins); for (unsigned i = 0; i < downPostSlots; i++) slotMap.addSlot(typeMap[i]); slotMap.addSlot(&stackval(-1), typeMap[downPostSlots]); VisitGlobalSlots(slotMap, cx, *tree->globalSlots); debug_only_print0(LC_TMTracer, "Compiling up-recursive slurp...\n"); exit = copy(exit); if (exit->recursive_pc == fragment->root->ip) exit->exitType = UNSTABLE_LOOP_EXIT; else exit->exitType = RECURSIVE_UNLINKED_EXIT; debug_only_printf(LC_TMTreeVis, "TREEVIS CHANGEEXIT EXIT=%p TYPE=%s\n", (void*)exit, getExitName(exit->exitType)); JS_ASSERT(tree->recursion >= Recursion_Unwinds); return closeLoop(slotMap, exit); }
JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::upRecursion() { JS_ASSERT((JSOp)*cx->fp->down->regs->pc == JSOP_CALL); JS_ASSERT(js_CodeSpec[js_GetOpcode(cx, cx->fp->down->script, cx->fp->down->regs->pc)].length == JSOP_CALL_LENGTH); JS_ASSERT(callDepth == 0); /* * If some operation involving interpreter frame slurping failed, go to * that code right away, and don't bother with emitting the up-recursive * guards again. */ if (anchor && (anchor->exitType == RECURSIVE_EMPTY_RP_EXIT || anchor->exitType == RECURSIVE_SLURP_MISMATCH_EXIT || anchor->exitType == RECURSIVE_SLURP_FAIL_EXIT)) { return slurpDownFrames(cx->fp->down->regs->pc); } jsbytecode* return_pc = cx->fp->down->regs->pc; jsbytecode* recursive_pc = return_pc + JSOP_CALL_LENGTH; /* * It is possible that the down frame isn't the same at runtime. It's not * enough to guard on the PC, since the typemap could be different as well. * To deal with this, guard that the FrameInfo on the callstack is 100% * identical. * * Note that though the counted slots is called "downPostSlots", this is * the number of slots after the CALL instruction has theoretically popped * callee/this/argv, but before the return value is pushed. This is * intended since the FrameInfo pushed by down recursion would not have * the return value yet. Instead, when closing the loop, the return value * becomes the sole stack type that deduces type stability. */ unsigned totalSlots = NativeStackSlots(cx, 1); unsigned downPostSlots = totalSlots - NativeStackSlots(cx, 0); FrameInfo* fi = (FrameInfo*)alloca(sizeof(FrameInfo) + totalSlots * sizeof(TraceType)); fi->block = NULL; fi->pc = (jsbytecode*)return_pc; fi->imacpc = NULL; /* * Need to compute this from the down frame, since the stack could have * moved on this one. */ fi->spdist = cx->fp->down->regs->sp - cx->fp->down->slots; JS_ASSERT(cx->fp->argc == cx->fp->down->argc); fi->set_argc(cx->fp->argc, false); fi->callerHeight = downPostSlots; fi->callerArgc = cx->fp->down->argc; if (anchor && anchor->exitType == RECURSIVE_MISMATCH_EXIT) { /* * Case 0: Anchoring off a RECURSIVE_MISMATCH guard. Guard on this FrameInfo. * This is always safe because this point is only reached on simple "call myself" * recursive functions. */ #if defined DEBUG AssertDownFrameIsConsistent(cx, anchor, fi); #endif fi = anchor->recursive_down; } else if (recursive_pc != fragment->root->ip) { /* * Case 1: Guess that down-recursion has to started back out, infer types * from the down frame. */ CaptureStackTypes(cx, 1, fi->get_typemap()); } else { /* Case 2: Guess that up-recursion is backing out, infer types from our Tree. */ JS_ASSERT(tree->nStackTypes == downPostSlots + 1); TraceType* typeMap = fi->get_typemap(); for (unsigned i = 0; i < downPostSlots; i++) typeMap[i] = tree->typeMap[i]; } fi = traceMonitor->frameCache->memoize(fi); /* * Guard that there are more recursive frames. If coming from an anchor * where this was already computed, don't bother doing it again. */ if (!anchor || anchor->exitType != RECURSIVE_MISMATCH_EXIT) { VMSideExit* exit = snapshot(RECURSIVE_EMPTY_RP_EXIT); /* Guard that rp >= sr + 1 */ guard(true, lir->ins2(LIR_pge, lirbuf->rp, lir->ins2(LIR_piadd, lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, sor)), INS_CONSTWORD(sizeof(FrameInfo*)))), exit); } debug_only_printf(LC_TMRecorder, "guardUpRecursive fragment->root=%p fi=%p\n", (void*)fragment->root, (void*)fi); /* Guard that the FrameInfo above is the same FrameInfo pointer. */ VMSideExit* exit = snapshot(RECURSIVE_MISMATCH_EXIT); LIns* prev_rp = lir->insLoad(LIR_ldp, lirbuf->rp, -int32_t(sizeof(FrameInfo*))); guard(true, lir->ins2(LIR_peq, prev_rp, INS_CONSTPTR(fi)), exit); /* * Now it's time to try and close the loop. Get a special exit that points * at the down frame, after the return has been propagated up. */ exit = downSnapshot(fi); LIns* rval_ins = (!anchor || anchor->exitType != RECURSIVE_SLURP_FAIL_EXIT) ? get(&stackval(-1)) : NULL; JS_ASSERT(rval_ins != NULL); TraceType returnType = exit->stackTypeMap()[downPostSlots]; if (returnType == TT_INT32) { JS_ASSERT(determineSlotType(&stackval(-1)) == TT_INT32); JS_ASSERT(isPromoteInt(rval_ins)); rval_ins = demote(lir, rval_ins); } UpRecursiveSlotMap slotMap(*this, downPostSlots, rval_ins); for (unsigned i = 0; i < downPostSlots; i++) slotMap.addSlot(exit->stackType(i)); slotMap.addSlot(&stackval(-1)); VisitGlobalSlots(slotMap, cx, *tree->globalSlots); if (recursive_pc == (jsbytecode*)fragment->root->ip) { debug_only_print0(LC_TMTracer, "Compiling up-recursive loop...\n"); } else { debug_only_print0(LC_TMTracer, "Compiling up-recursive branch...\n"); exit->exitType = RECURSIVE_UNLINKED_EXIT; exit->recursive_pc = recursive_pc; } JS_ASSERT(tree->recursion != Recursion_Disallowed); if (tree->recursion != Recursion_Detected) tree->recursion = Recursion_Unwinds; return closeLoop(slotMap, exit); }