std::vector<Eigen::Vector2i> PathPlanner::smoothPath(std::vector<Eigen::Vector2i> & inputPath) { if(inputPath.size() <= 2) { return inputPath; } std::vector<Eigen::Vector2i> outputPath; outputPath.push_back(inputPath.at(0)); int inputIndex = 2; while(inputIndex < inputPath.size() - 1) { if(!traceLine(outputPath.back(), inputPath[inputIndex])) { outputPath.push_back(inputPath[inputIndex - 1]); } inputIndex++; } outputPath.push_back(inputPath.back()); return outputPath; }
static void initStackFrame(VM *vm, const int **ip, int *bp, int functionOffset, uint parameterCount) { const int *bytecode = vmBytecode + functionOffset; int i = *bytecode++; int localsCount = i >> 8; if (DEBUG_TRACE) { traceLine(vm, functionOffset); } assert((i & 0xff) == OP_FUNCTION); *ip = bytecode; *bp = (int)(IVSize(&vm->stack) - parameterCount); IVGrowZero(&vm->stack, (size_t)localsCount); }
std::vector<Eigen::Vector3f> PathPlanner::getPath(Eigen::Vector3f start, Eigen::Vector3f end, cv::Mat3b * image) { std::vector<Eigen::Vector3f> path; std::vector<Eigen::Vector2i> pathImg; //Objects are outside the configuration space, so get the closest point Eigen::Vector2i lastPoint; if(!validPoint(worldToImg(end))) { if(!findClosestPoint(start, end, bot_in_pixels_)) { return path; } } //Check if we can just go straight there unobstructed if(traceLine(worldToImg(start), worldToImg(end))) { path.push_back(start); path.push_back(end); } else if(aStar(worldToImg(start), worldToImg(end), pathImg)) { pathImg = smoothPath(pathImg); for(size_t i = 0; i < pathImg.size(); i++) { path.push_back(imgToWorld(pathImg.at(i))); } } if(image) { drawPath(path, *image); } return path; }
void Debugger::debugLine(int linenum) { AvmAssert( core->callStack !=0 ); if (!core->callStack) return; AvmAssert(linenum > 0); int prev = core->callStack->linenum(); core->callStack->set_linenum(linenum); int line = linenum; // line number has changed bool changed = (prev == line) ? false : true; bool exited = (prev == -1) ? true : false; // are we being called as a result of function exit? if (!changed && !exited) return; // still on the same line in the same function? Profiler* profiler = core->profiler(); Sampler* s = core->get_sampler(); if (profiler && profiler->profilingDataWanted && profiler->profileSwitch && !(s && s->sampling())) { profiler->sendLineTimestamp(line); } // tracing information if (!exited) traceLine(line); // check if we should stop due to breakpoint or step bool stop = false; if (stepState.flag) { if (stepState.startingDepth != -1 && core->callStack->depth() < stepState.startingDepth) { // We stepped out of whatever function was executing when the // stepInto/stepOver/stepOut command was executed. We may be // in the middle of a line of code, but we still want to stop // immediately. See bug 126633. stop = true; } else if (!exited && (stepState.depth == -1 || core->callStack->depth() <= stepState.depth) ) { // We reached the beginning of a new line of code. stop = true; } } // we didn't decide to stop due to a step, but check if we hit a breakpoint if (!stop && !exited) { MethodInfo* f = core->callStack->info(); #ifdef VMCFG_AOT if (f && (f->hasMethodBody() || f->isCompiledMethod())) #else if (f && f->hasMethodBody()) #endif { AbcFile* abc = f->file(); if (abc) { SourceFile* source = abc->sourceNamed( core->callStack->filename() ); if (source && source->hasBreakpoint(line)) { stop = true; } } } } // we still haven't decided to stop; check our watchpoints if (!stop && !exited) { if (hitWatchpoint()) stop = true; } if (stop) { // Terminate whatever step operation may have been happening. But first, // save the state of the step, so that if someone calls stepContinue(), // then we can restore it. StepState oldOldStepState = oldStepState; // save oldStepState in case of reentrancy oldStepState = stepState; // save stepState so that stepContinue() can find it stepState.clear(); // turn off stepping enterDebugger(); oldStepState = oldOldStepState; // restore oldStepState } }
static VM *execute(VM *vm) { int maxInstructions = 100; while (maxInstructions--) { int i = *vm->ip; int arg = i >> 8; if (DEBUG_TRACE) { traceLine(vm, (int)(vm->ip - vmBytecode)); fflush(stdout); } vm->ip++; switch ((Instruction)(i & 0xff)) { case OP_NULL: storeValue(vm, vm->bp, arg, VNull); break; case OP_TRUE: storeValue(vm, vm->bp, arg, VTrue); break; case OP_FALSE: storeValue(vm, vm->bp, arg, VFalse); break; case OP_EMPTY_LIST: storeValue(vm, vm->bp, arg, VEmptyList); break; case OP_LIST: { vref result; vref *array; vref *write; assert(arg); array = VCreateArray((size_t)arg); for (write = array; arg--; write++) { vref value = loadValue(vm, vm->bp, *vm->ip++); if (value == VFuture) { vm->ip += arg; VAbortArray(array); result = VFuture; goto storeList; } *write = value; assert(HeapGetObjectType(*write) != TYPE_FUTURE); } result = VFinishArray(array); storeList: storeValue(vm, vm->bp, *vm->ip++, result); break; } case OP_FILELIST: { vref string = refFromInt(arg); vref result; if (string == VFuture) { result = VFuture; } else { result = VCreateFilelistGlob(VGetString(string), VStringLength(string)); } storeValue(vm, vm->bp, *vm->ip++, result); break; } case OP_STORE_CONSTANT: storeValue(vm, vm->bp, arg, refFromInt(*vm->ip++)); break; case OP_COPY: storeValue(vm, vm->bp, *vm->ip++, loadValue(vm, vm->bp, arg)); break; case OP_NOT: storeValue(vm, vm->bp, *vm->ip++, VNot(loadValue(vm, vm->bp, arg))); break; case OP_NEG: { vref result = VNeg(vm, loadValue(vm, vm->bp, arg)); if (!result) { return vm; } storeValue(vm, vm->bp, *vm->ip++, result); break; } case OP_INV: { vref result = VInv(vm, loadValue(vm, vm->bp, arg)); if (!result) { return vm; } storeValue(vm, vm->bp, *vm->ip++, result); break; } case OP_ITER_NEXT: { vref collection = loadValue(vm, vm->bp, *vm->ip++); int indexVariable = *vm->ip++; vref index = loadValue(vm, vm->bp, indexVariable); vref step = loadValue(vm, vm->bp, *vm->ip++); if (index == VFuture || step == VFuture) { index = VFuture; storeValue(vm, vm->bp, indexVariable, index); goto iterNextFuture; } else { index = VAdd(vm, index, step); storeValue(vm, vm->bp, indexVariable, index); } if (collection == VFuture) { iterNextFuture: assert(0); } switch (VGetBool(VValidIndex(vm, collection, index))) { case TRUTHY: storeValue(vm, vm->bp, *vm->ip++, VIndexedAccess(vm, collection, index)); break; case FALSY: vm->ip += arg - 2; break; case FUTURE: unreachable; } break; } case OP_EQUALS: { vref value1 = loadValue(vm, vm->bp, arg); vref value2 = loadValue(vm, vm->bp, *vm->ip++); vref result = VEquals(value1, value2); if (!result) { return vm; } storeValue(vm, vm->bp, *vm->ip++, result); break; } case OP_NOT_EQUALS: { vref value1 = loadValue(vm, vm->bp, arg); vref value2 = loadValue(vm, vm->bp, *vm->ip++); vref result = VEquals(value1, value2); if (!result) { return vm; } switch (VGetBool(result)) { case TRUTHY: result = VFalse; break; case FALSY: result = VTrue; break; case FUTURE: break; } storeValue(vm, vm->bp, *vm->ip++, result); break; } case OP_LESS_EQUALS: { vref value1 = loadValue(vm, vm->bp, arg); vref value2 = loadValue(vm, vm->bp, *vm->ip++); vref result = VLessEquals(vm, value1, value2); if (!result) { return vm; } storeValue(vm, vm->bp, *vm->ip++, result); break; } case OP_GREATER_EQUALS: { vref value1 = loadValue(vm, vm->bp, arg); vref value2 = loadValue(vm, vm->bp, *vm->ip++); vref result = VLessEquals(vm, value2, value1); if (!result) { return vm; } storeValue(vm, vm->bp, *vm->ip++, result); break; } case OP_LESS: { vref value1 = loadValue(vm, vm->bp, arg); vref value2 = loadValue(vm, vm->bp, *vm->ip++); vref result = VLess(vm, value1, value2); if (!result) { return vm; } storeValue(vm, vm->bp, *vm->ip++, result); break; } case OP_GREATER: { vref value1 = loadValue(vm, vm->bp, arg); vref value2 = loadValue(vm, vm->bp, *vm->ip++); vref result = VLess(vm, value2, value1); if (!result) { return vm; } storeValue(vm, vm->bp, *vm->ip++, result); break; } case OP_ADD: { vref value1 = loadValue(vm, vm->bp, arg); vref value2 = loadValue(vm, vm->bp, *vm->ip++); vref result = VAdd(vm, value1, value2); if (!result) { return vm; } storeValue(vm, vm->bp, *vm->ip++, result); break; } case OP_SUB: { vref value1 = loadValue(vm, vm->bp, arg); vref value2 = loadValue(vm, vm->bp, *vm->ip++); vref result = VSub(vm, value1, value2); if (!result) { return vm; } storeValue(vm, vm->bp, *vm->ip++, result); break; } case OP_MUL: { vref value1 = loadValue(vm, vm->bp, arg); vref value2 = loadValue(vm, vm->bp, *vm->ip++); vref result = VMul(vm, value1, value2); if (!result) { return vm; } storeValue(vm, vm->bp, *vm->ip++, result); break; } case OP_DIV: { vref value1 = loadValue(vm, vm->bp, arg); vref value2 = loadValue(vm, vm->bp, *vm->ip++); vref result = VDiv(vm, value1, value2); if (!result) { return vm; } storeValue(vm, vm->bp, *vm->ip++, result); break; } case OP_REM: { vref value1 = loadValue(vm, vm->bp, arg); vref value2 = loadValue(vm, vm->bp, *vm->ip++); vref result = VRem(vm, value1, value2); if (!result) { return vm; } storeValue(vm, vm->bp, *vm->ip++, result); break; } case OP_CONCAT_LIST: { vref value1 = loadValue(vm, vm->bp, arg); vref value2 = loadValue(vm, vm->bp, *vm->ip++); vref result = VConcat(vm, value1, value2); if (!result) { return vm; } storeValue(vm, vm->bp, *vm->ip++, result); break; } case OP_CONCAT_STRING: { vref result; assert(!IVSize(&temp)); for (i = 0; i < arg; i++) { IVAdd(&temp, intFromRef(loadValue(vm, vm->bp, *vm->ip++))); } result = VConcatString((size_t)arg, (vref*)IVGetWritePointer(&temp, 0)); storeValue(vm, vm->bp, *vm->ip++, result); IVSetSize(&temp, 0); break; } case OP_INDEXED_ACCESS: { vref collection = loadValue(vm, vm->bp, arg); vref index = loadValue(vm, vm->bp, *vm->ip++); vref result = VIndexedAccess(vm, collection, index); if (!result) { return vm; } storeValue(vm, vm->bp, *vm->ip++, result); break; } case OP_RANGE: { vref value1 = loadValue(vm, vm->bp, arg); vref value2 = loadValue(vm, vm->bp, *vm->ip++); vref result = VRange(vm, value1, value2); if (!result) { return vm; } storeValue(vm, vm->bp, *vm->ip++, result); break; } case OP_JUMP: vm->ip += arg + 1; break; case OP_BRANCH_TRUE: { vref value = loadValue(vm, vm->bp, *vm->ip++); VBool b = VGetBool(value); vm->base.clonePoints++; if (vm->child && vm->base.clonePoints >= vm->child->clonePoints) { if (vm->base.clonePoints > vm->child->clonePoints) { VMDispose(vm->child); vm->child = null; goto branchTrueNoChild; } assert(!vm->child->fullVM); if (b == FUTURE) { VMReplaceCloneBranch(vm, vm->ip + arg); } else { uint keepChild; if (b == FALSY) { keepChild = 0; } else { vm->ip += arg; keepChild = 1; } vm->child = VMDisposeBranch((VMBranch*)vm->child, keepChild); } } else { branchTrueNoChild: switch (b) { case FALSY: break; case FUTURE: assert(!vm->child); VMCloneBranch(vm, vm->ip); /* fallthrough */ case TRUTHY: vm->ip += arg; break; } } break; } case OP_BRANCH_FALSE: { vref value = loadValue(vm, vm->bp, *vm->ip++); VBool b = VGetBool(value); vm->base.clonePoints++; if (vm->child && vm->base.clonePoints >= vm->child->clonePoints) { if (vm->base.clonePoints > vm->child->clonePoints) { VMDispose(vm->child); vm->child = null; goto branchFalseNoChild; } assert(!vm->child->fullVM); if (b == FUTURE) { VMReplaceCloneBranch(vm, vm->ip); vm->ip += arg; } else { uint keepChild; if (b == FALSY) { vm->ip += arg; keepChild = 0; } else { keepChild = 1; } vm->child = VMDisposeBranch((VMBranch*)vm->child, keepChild); } } else { branchFalseNoChild: switch (b) { case TRUTHY: break; case FUTURE: assert(!vm->child); VMCloneBranch(vm, vm->ip); /* fallthrough */ case FALSY: vm->ip += arg; break; } } break; } case OP_RETURN: assert(IVSize(&vm->callStack)); popStackFrame(vm, &vm->ip, &vm->bp, (uint)arg); break; case OP_RETURN_VOID: if (!IVSize(&vm->callStack)) { vm->base.clonePoints++; VMHalt(vm, 0); return vm; } popStackFrame(vm, &vm->ip, &vm->bp, 0); break; case OP_INVOKE: { vref *values; int function = *vm->ip++; values = (vref*)IVGetAppendPointer(&vm->stack, (size_t)arg); for (i = 0; i < arg; i++) { *values++ = loadValue(vm, vm->bp, *vm->ip++); } IVAdd(&vm->callStack, (int)(vm->ip - vmBytecode)); IVAdd(&vm->callStack, vm->bp); initStackFrame(vm, &vm->ip, &vm->bp, function, (uint)arg); break; } case OP_INVOKE_NATIVE: { nativefunctionref nativeFunction = refFromInt(arg); vref value; int storeAt; assert(!vm->job); vm->base.clonePoints++; if (vm->child && vm->base.clonePoints >= vm->child->clonePoints) { VM *child = (VM*)vm->child; assert(vm->child->fullVM); VMReplaceChild(vm, child); } value = NativeInvoke(vm, nativeFunction); if (vm->idle) { return vm; } storeAt = *vm->ip++; storeValue(vm, vm->bp, storeAt, value); if (vm->job) { vm->job->storeAt = storeAt; vm->idle = true; /* TODO: Activate speculative execution */ JobExecute(vm->job); if (unlikely(vm->failMessage)) { return vm; } /* vm = VMClone(vm, vm->ip); */ } break; } case OP_FUNCTION: case OP_FUNCTION_UNLINKED: case OP_LOAD_FIELD: case OP_STORE_FIELD: case OP_ITER_NEXT_INDEXED: case OP_JUMPTARGET: case OP_JUMP_INDEXED: case OP_BRANCH_TRUE_INDEXED: case OP_BRANCH_FALSE_INDEXED: case OP_INVOKE_UNLINKED: case OP_UNKNOWN_VALUE: case OP_FILE: case OP_LINE: case OP_ERROR: default: unreachable; } } return vm; }