void MoreAccurateMotionWobbleSuppressorGpu::suppress(int idx, const gpu::GpuMat &frame, gpu::GpuMat &result) { CV_Assert(motions_ && stabilizationMotions_); if (idx % period_ == 0) { result = frame; return; } int k1 = idx / period_ * period_; int k2 = std::min(k1 + period_, frameCount_ - 1); Mat S1 = (*stabilizationMotions_)[idx]; Mat ML = S1 * getMotion(k1, idx, *motions2_) * getMotion(k1, idx, *motions_).inv() * S1.inv(); Mat MR = S1 * getMotion(idx, k2, *motions2_).inv() * getMotion(idx, k2, *motions_) * S1.inv(); gpu::calcWobbleSuppressionMaps(k1, idx, k2, frame.size(), ML, MR, mapx_, mapy_); if (result.data == frame.data) result = gpu::GpuMat(frame.size(), frame.type()); gpu::remap(frame, result, mapx_, mapy_, INTER_LINEAR, BORDER_REPLICATE); }
static void saveMotions( int frameCount, const std::vector<Mat> &motions, const std::vector<Mat> &stabilizationMotions) { std::ofstream fm("log_motions.csv"); for (int i = 0; i < frameCount - 1; ++i) { Mat_<float> M = at(i, motions); fm << M(0,0) << " " << M(0,1) << " " << M(0,2) << " " << M(1,0) << " " << M(1,1) << " " << M(1,2) << " " << M(2,0) << " " << M(2,1) << " " << M(2,2) << std::endl; } std::ofstream fo("log_orig.csv"); for (int i = 0; i < frameCount; ++i) { Mat_<float> M = getMotion(0, i, motions); fo << M(0,0) << " " << M(0,1) << " " << M(0,2) << " " << M(1,0) << " " << M(1,1) << " " << M(1,2) << " " << M(2,0) << " " << M(2,1) << " " << M(2,2) << std::endl; } std::ofstream fs("log_stab.csv"); for (int i = 0; i < frameCount; ++i) { Mat_<float> M = stabilizationMotions[i] * getMotion(0, i, motions); fs << M(0,0) << " " << M(0,1) << " " << M(0,2) << " " << M(1,0) << " " << M(1,1) << " " << M(1,2) << " " << M(2,0) << " " << M(2,1) << " " << M(2,2) << std::endl; } }
void MoreAccurateMotionWobbleSuppressor::suppress(int idx, const Mat &frame, Mat &result) { CV_Assert(motions_ && stabilizationMotions_); if (idx % period_ == 0) { result = frame; return; } int k1 = idx / period_ * period_; int k2 = std::min(k1 + period_, frameCount_ - 1); Mat S1 = (*stabilizationMotions_)[idx]; Mat_<float> ML = S1 * getMotion(k1, idx, *motions2_) * getMotion(k1, idx, *motions_).inv() * S1.inv(); Mat_<float> MR = S1 * getMotion(idx, k2, *motions2_).inv() * getMotion(idx, k2, *motions_) * S1.inv(); mapx_.create(frame.size()); mapy_.create(frame.size()); float xl, yl, zl, wl; float xr, yr, zr, wr; for (int y = 0; y < frame.rows; ++y) { for (int x = 0; x < frame.cols; ++x) { xl = ML(0,0)*x + ML(0,1)*y + ML(0,2); yl = ML(1,0)*x + ML(1,1)*y + ML(1,2); zl = ML(2,0)*x + ML(2,1)*y + ML(2,2); xl /= zl; yl /= zl; wl = float(idx - k1); xr = MR(0,0)*x + MR(0,1)*y + MR(0,2); yr = MR(1,0)*x + MR(1,1)*y + MR(1,2); zr = MR(2,0)*x + MR(2,1)*y + MR(2,2); xr /= zr; yr /= zr; wr = float(k2 - idx); mapx_(y,x) = (wr * xl + wl * xr) / (wl + wr); mapy_(y,x) = (wr * yl + wl * yr) / (wl + wr); } } if (result.data == frame.data) result = Mat(frame.size(), frame.type()); remap(frame, result, mapx_, mapy_, INTER_LINEAR, BORDER_REPLICATE); }
int main(void) { ConfigPins(); initUART(); initSysTick(); RValue = initAccel(); printInteger(RValue); if (RValue == 0) { printString("Boldly going :)\r\n"); } else { printString("Inertia sensors offline :(\r\n"); } while (1) { Temperature = getTemperature(); printShort(Temperature); printString(" "); getMotion(&m); printShort(m.x_a); printString(" "); printShort(m.y_a); printString(" "); printShort(m.z_a); printString(" "); printString("\r\n"); GPIO0DATA ^= BIT2; delay(10000); } return 0; }
bool QICMotionCapture::OnReadEvent() { //INFO("head of on OnreadEvent"); struct input_event ev[64]; int rd; rd = read(GetDeviceFD(), ev, sizeof(struct input_event) * 64); if (rd < (int) sizeof(struct input_event)) { printf("expected %d bytes, got %d\n", (int) sizeof(struct input_event), rd); perror("\nevtest: error reading"); return 1; } rect_t mymotion[3]; unsigned long ts; int numofmotion=getMotion(mymotion, ts); // INFO("numofmotion %d",numofmotion); uint8_t motionbuffer[4*numofmotion]; for(int i=0;i<numofmotion;i++){ motionbuffer[i]=mymotion->x; motionbuffer[i+1]=mymotion->y; motionbuffer[i+2]=mymotion->width; motionbuffer[i+3]=mymotion->height; } INFO("ts: %lu last ts:%lu", ts, _CtrlTs); if(_CtrlTs==0) _CtrlTs=ts-(1000/_FPSControl+1); if((ts-_CtrlTs)>(1000/_FPSControl)) { _CtrlTs=ts; SendDataToCarriers(motionbuffer,4*numofmotion, ts, HW_DATA_MOTION); } return true; }
void getCommand(){ packageCounter++; if(packageCounter == 4){ readType(); } if(packageCounter == packageSize){ packageCounter = 0; packageSize = 4; getMotion(package); } }
WeightedAnimation::WeightedAnimation(BVH* newBVH,const QString& bvhFile) : Animation(newBVH, bvhFile) { int frames_count = newBVH->lastLoadedNumberOfFrames; _tPosed = checkTPosed(frames); if(frames_count < 2*MIX_IN_OUT) _mixIn = _mixOut = frames_count/2; else _mixIn = _mixOut = MIX_IN_OUT; frameWeights = new int[frames_count]; for(int i=0; i<frames_count; i++) frameWeights[i] = 50; linearBones = new QMap<QString, BVHNode*>(); linearBones->insert(positionNode->name(), positionNode); initializeLinearBonesHelper(getMotion()); }
void SonicXNFile::setFileMode(XNFileMode target_file_mode) { SonicXNObject *object=getObject(); SonicXNTexture *texture=getTexture(); SonicXNEffect *effect=getEffect(); SonicXNBones *bones=getBones(); SonicXNMotion *motion=getMotion(); file_mode = target_file_mode; setHeaders(); if (info) { info->setHeader(header_info); info->setFileMode(file_mode); } if (object) { object->setFileMode(file_mode); object->setHeader(header_object); } if (texture) { texture->setFileMode(file_mode); texture->setHeader(header_texture); } if (effect) { effect->setFileMode(file_mode); effect->setHeader(header_effect); } if (bones) { bones->setFileMode(file_mode); bones->setHeader(header_bones); } if (motion) { motion->setFileMode(file_mode); motion->setHeader(header_motion); } if (offset_table) offset_table->setFileMode(file_mode); if (footer) footer->setFileMode(file_mode); if (end) end->setFileMode(file_mode); }
bool QICMotionCapture2::OnReadEvent() { /* call getMotion and deliver to carrier */ rect_t mymotion[3]; unsigned long ts; int numofmotion=getMotion(mymotion, ts); // INFO("numofmotion %d",numofmotion); uint8_t motionbuffer[4*numofmotion]; for(int i=0;i<numofmotion;i++){ motionbuffer[i]=mymotion->x; motionbuffer[i+1]=mymotion->y; motionbuffer[i+2]=mymotion->width; motionbuffer[i+3]=mymotion->height; } /*INFO("ts: %lu last ts:%lu", ts, _CtrlTs); if(_CtrlTs==0) _CtrlTs=ts-(1000/_FPSControl+1); if((ts-_CtrlTs)>(1000/_FPSControl)) { _CtrlTs=ts; SendDataToCarriers(motionbuffer,4*numofmotion, ts, HW_DATA_MOTION); }*/ _motionBox.tsCache[_motionBox.currentIndex]=ts; _motionBox.currentIndex++; _motionBox.currentIndex%=_eventBufferSize; uint8_t boxCounter; uint8_t hitCounter=0; for (boxCounter=0;boxCounter<_eventBufferSize;boxCounter++){ if((ts-_motionBox.tsCache[boxCounter]) < _eventInMiliSec) hitCounter++; } if(hitCounter >= _eventCountThreshold) SendDataToCarriers(motionbuffer,4*numofmotion, ts, HW_DATA_MOTION); return true; }
void WeightingDeblurer::deblur(int idx, Mat &frame) { CV_Assert(frame.type() == CV_8UC3); bSum_.create(frame.size()); gSum_.create(frame.size()); rSum_.create(frame.size()); wSum_.create(frame.size()); for (int y = 0; y < frame.rows; ++y) { for (int x = 0; x < frame.cols; ++x) { Point3_<uchar> p = frame.at<Point3_<uchar> >(y,x); bSum_(y,x) = p.x; gSum_(y,x) = p.y; rSum_(y,x) = p.z; wSum_(y,x) = 1.f; } } for (int k = idx - radius_; k <= idx + radius_; ++k) { const Mat &neighbor = at(k, *frames_); float bRatio = at(idx, *blurrinessRates_) / at(k, *blurrinessRates_); Mat_<float> M = getMotion(idx, k, *motions_); if (bRatio > 1.f) { for (int y = 0; y < frame.rows; ++y) { for (int x = 0; x < frame.cols; ++x) { int x1 = cvRound(M(0,0)*x + M(0,1)*y + M(0,2)); int y1 = cvRound(M(1,0)*x + M(1,1)*y + M(1,2)); if (x1 >= 0 && x1 < neighbor.cols && y1 >= 0 && y1 < neighbor.rows) { const Point3_<uchar> &p = frame.at<Point3_<uchar> >(y,x); const Point3_<uchar> &p1 = neighbor.at<Point3_<uchar> >(y1,x1); float w = bRatio * sensitivity_ / (sensitivity_ + std::abs(intensity(p1) - intensity(p))); bSum_(y,x) += w * p1.x; gSum_(y,x) += w * p1.y; rSum_(y,x) += w * p1.z; wSum_(y,x) += w; } } } } } for (int y = 0; y < frame.rows; ++y) { for (int x = 0; x < frame.cols; ++x) { float wSumInv = 1.f / wSum_(y,x); frame.at<Point3_<uchar> >(y,x) = Point3_<uchar>( static_cast<uchar>(bSum_(y,x)*wSumInv), static_cast<uchar>(gSum_(y,x)*wSumInv), static_cast<uchar>(rSum_(y,x)*wSumInv)); } } }
void ompl::geometric::LBTRRT::considerEdge(Motion *parent, Motion *child, double c) { // optimization - check if the bounded approximation invariant // will be violated after the edge insertion (at least for the child node) // if this is the case - perform the local planning // this prevents the update of the graph due to the edge insertion and then the re-update as it is removed double potential_cost = parent->costLb_ + c; if (child->costApx_ > (1 + epsilon_) * potential_cost) if (!checkMotion(parent, child)) return; // update lowerBoundGraph_ std::list<std::size_t> affected; lowerBoundGraph_.addEdge(parent->id_, child->id_, c, true, affected); // now, check if the bounded apprimation invariant has been violated for each affected vertex // insert them into a priority queue ordered according to the lb cost std::list<std::size_t>::iterator iter; IsLessThanLB isLessThanLB(this); Lb_queue queue(isLessThanLB); for (iter = affected.begin(); iter != affected.end(); ++iter) { Motion *m = getMotion(*iter); m->costLb_ = lowerBoundGraph_.getShortestPathCost(*iter); if (m->costApx_ > (1 + epsilon_) * m->costLb_) queue.insert(m); } while (queue.empty() == false) { Motion *motion = *(queue.begin()); queue.erase(queue.begin()); if (motion->costApx_ > (1 + epsilon_) * motion->costLb_) { Motion *potential_parent = getMotion(lowerBoundGraph_.getShortestPathParent(motion->id_)); if (checkMotion(potential_parent, motion)) { double delta = lazilyUpdateApxParent(motion, potential_parent); updateChildCostsApx(motion, delta); } else { affected.clear(); lowerBoundGraph_.removeEdge(potential_parent->id_, motion->id_, true, affected); for (iter = affected.begin(); iter != affected.end(); ++iter) { Motion *affected = getMotion(*iter); Lb_queue_iter lb_queue_iter = queue.find(affected); if (lb_queue_iter != queue.end()) { queue.erase(lb_queue_iter); affected->costLb_ = lowerBoundGraph_.getShortestPathCost(affected->id_); if (affected->costApx_ > (1 + epsilon_) * affected->costLb_) queue.insert(affected); } else { affected->costLb_ = lowerBoundGraph_.getShortestPathCost(affected->id_); } } motion->costLb_ = lowerBoundGraph_.getShortestPathCost(motion->id_); if (motion->costApx_ > (1 + epsilon_) * motion->costLb_) queue.insert(motion); // optimization - we can remove the opposite edge lowerBoundGraph_.removeEdge(motion->id_, potential_parent->id_, false, affected); } } } return; }
//**************************************************************************************************************************pointCloudcallback() void PSMpositionNode::pointCloudcallback(const sensor_msgs::PointCloud2& pcloud) { ROS_DEBUG("Received PointCloud"); pclCount_++; struct timeval start, end; gettimeofday(&start, NULL); // **** if this is the first scan, initialize and leave the function here if (!init_) { init_ = initializeLasermsgs(pcloud); if (init_) ROS_INFO("Lasermsgs initialized"); return; } hline.header = pcloud.header; hline.header.frame_id = "hkinectline"; pcl::fromROSMsg (pcloud, cloud); std::vector <depthPoint> verticalLine; //get the horizontal central line for (int i=0; i<width_; i++) { if (isnan(cloud.points[begin_+i].x) || isnan(cloud.points[begin_+i].y) || isnan(cloud.points[begin_+i].z)) { hline.ranges[width_-1-i] = 0; } else { hline.ranges[width_-1-i] = sqrt(pow(cloud.points[begin_+i].x,2)+pow(cloud.points[begin_+i].y,2)+pow(cloud.points[begin_+i].z,2)); } } //ROS_INFO("Distance to wall: %f",hline.ranges[width_/2]); //get the vertical central line for (int i=0; i<height_; i++) { //for altitude estimation XYpoint.x = cloud.points[(i+0.5)*width_].z ; XYpoint.y = cloud.points[(i+0.5)*width_].y ; if (isnan(cloud.points[(i+0.5)*width_].x) || isnan(cloud.points[(i+0.5)*width_].y) || isnan(cloud.points[(i+0.5)*width_].z)) { vline.ranges[i] = 0; } else { verticalLine.push_back(XYpoint); //vline.ranges[i] = sqrt(pow(cloud.points[(i+0.5)*width_].x,2)+pow(cloud.points[(i+0.5)*width_].y,2)+pow(cloud.points[(i+0.5)*width_].z,2)); } } if(smMethod==1)getMotion(hline, &verticalLine); if(smMethod==2)getMotion_can(hline, &verticalLine); gettimeofday(&end, NULL); double dur = ((end.tv_sec * 1000000 + end.tv_usec ) - (start.tv_sec * 1000000 + start.tv_usec)) / 1000.0; totalDuration_ += dur; double ave = totalDuration_/pclCount_; ROS_INFO("dur:\t %.3f ms \t ave:\t %.3f ms", dur, ave); }
//get the action target point in the images reference frame bool VisuoThread::getTarget(Value &type, Bottle &options) { bool ok=false; Bottle &bNewTarget=options.addList(); bNewTarget.addString("target"); Bottle &bTarget=bNewTarget.addList(); Bottle &bNewStereo=bTarget.addList(); bNewStereo.addString("stereo"); Bottle &bStereo=bNewStereo.addList(); if(type.isList()) { Bottle *list=type.asList(); if(list->size()>2) { if(list->get(0).asString()=="right") { bStereo.addDouble(0.0); bStereo.addDouble(0.0); bStereo.addDouble(list->get(1).asDouble()); bStereo.addDouble(list->get(2).asDouble()); } else if(list->get(0).asString()=="left") { bStereo.addDouble(list->get(1).asDouble()); bStereo.addDouble(list->get(2).asDouble()); bStereo.addDouble(0.0); bStereo.addDouble(0.0); } else if(list->get(0).asString()=="cartesian") { Bottle &bNewCartesian=bTarget.addList(); bNewCartesian.addString("cartesian"); Bottle &bCartesian=bNewCartesian.addList(); for(int i=1; i<list->size(); i++) bCartesian.addDouble(list->get(i).asDouble()); } else { Bottle &bNewCartesian=bTarget.addList(); bNewCartesian.addString("cartesian"); Bottle &bCartesian=bNewCartesian.addList(); for(int i=0; i<list->size(); i++) bCartesian.addDouble(list->get(i).asDouble()); } ok=true; } else if(list->size()==2) { bStereo.addDouble(list->get(0).asDouble()); bStereo.addDouble(list->get(1).asDouble()); bStereo.addDouble(0.0); bStereo.addDouble(0.0); ok=true; } } else switch(type.asVocab()) { case PARAM_FIXATION: { ok=getFixation(bStereo); break; } case PARAM_MOTION: { ok=getMotion(bStereo); break; } case PARAM_TRACK: { ok=getTrack(bStereo); break; } case PARAM_RAW: { ok=getRaw(bStereo); break; } default: { Bottle &bName=bTarget.addList(); bName.addString("name"); bName.addString(type.asString().c_str()); getObject(type.asString().c_str(),bStereo); break; } } return ok; }