bool calculate_heading(double& heading) { bool ret = false; if(gps_points_buffer.capacity() == gps_points_buffer.size()) { gps_points_t p_comp = gps_points_buffer[0]; for(size_t i=1; i < gps_points_buffer.size();i++) { gps_points_t p_cur = gps_points_buffer[i]; double dist = sqrt(pow(p_comp.x - p_cur.x,2) + pow(p_comp.y - p_cur.y,2)); if(dist > heading_threshold) { heading = atan2(p_comp.y - p_cur.y,p_comp.x - p_cur.x); ret = true; break; } } } return ret; }
/** Reserve some part of the pipe for reading \param[in] s is the number of element to reserve \param[out] rid is an iterator to a description of the reservation that has been done if successful \param[in] blocking specify if the call wait for the operation to succeed \return true if the reservation was successful */ bool reserve_read(std::size_t s, rid_iterator &rid, bool blocking = false) { // Lock the pipe to avoid being disturbed std::unique_lock<std::mutex> ul { cb_mutex }; TRISYCL_DUMP_T("Before read reservation cb.size() = " << cb.size() << " size() = " << size()); if (s == 0) // Empty reservation requested, so nothing to do return false; if (blocking) /* If in blocking mode, wait for enough elements to read in the pipe for the reservation. This condition can change when a write is done */ write_done.wait(ul, [&] { return s <= size(); }); else if (s > size()) // Not enough elements to read in the pipe for the reservation return false; // Compute the location of the first element of the reservation auto first = cb.begin() + read_reserved_frozen; // Increment the number of frozen elements read_reserved_frozen += s; /* Add a description of the reservation at the end of the reservation queue */ r_rid_q.emplace_back(first, s); // Return the iterator to the last reservation descriptor rid = r_rid_q.end() - 1; TRISYCL_DUMP_T("After reservation cb.size() = " << cb.size() << " size() = " << size()); return true; }
void render(sf::RenderTarget& target) { float dx = 1.0f/static_cast<float>(line_data_.capacity()); float x = static_cast<float>(line_data_.capacity()-line_data_.size())*dx; line_data_.push_back(std::make_pair(tick_data_, tick_tag_)); tick_tag_ = false; glBegin(GL_LINE_STRIP); auto c = color(color_); glColor4f(std::get<0>(c), std::get<1>(c), std::get<2>(c), 0.8f); for(size_t n = 0; n < line_data_.size(); ++n) if(line_data_[n].first > -0.5) glVertex3d(x+n*dx, std::max(0.05, std::min(0.95, (1.0f-line_data_[n].first)*0.8 + 0.1f)), 0.0); glEnd(); glEnable(GL_LINE_STIPPLE); glLineStipple(3, 0xAAAA); for(size_t n = 0; n < line_data_.size(); ++n) { if(line_data_[n].second) { glBegin(GL_LINE_STRIP); glVertex3f(x+n*dx, 0.0f, 0.0f); glVertex3f(x+n*dx, 1.0f, 0.0f); glEnd(); } } glDisable(GL_LINE_STIPPLE); }
void image_obj_ranged_callback(const cv_tracker::image_obj_ranged::ConstPtr& image_obj_ranged_msg) { pthread_mutex_lock(&mutex); image_obj_ranged_ringbuf.push_front(*image_obj_ranged_msg); //image_raw is empty if (image_raw_ringbuf.begin() == image_raw_ringbuf.end()) { pthread_mutex_unlock(&mutex); ROS_INFO("image_raw ring buffer is empty"); return; } buf_flag = true; // image_obj_ranged > image_raw if (get_time(&(image_obj_ranged_ringbuf.front().header)) >= get_time(&(image_raw_ringbuf.front().header))) { image_raw_buf = image_raw_ringbuf.front(); boost::circular_buffer<cv_tracker::image_obj_ranged>::iterator it = image_obj_ranged_ringbuf.begin(); if (image_obj_ranged_ringbuf.size() == 1) { image_obj_ranged_buf = *it; pthread_mutex_unlock(&mutex); return; } else { for (it++; it != image_obj_ranged_ringbuf.end(); it++) { if (fabs_time_diff(&(image_raw_ringbuf.front().header), &((it-1)->header)) < fabs_time_diff(&(image_raw_ringbuf.front().header), &(it->header))) { image_obj_ranged_buf = *(it-1); break; } } if (it == image_obj_ranged_ringbuf.end()) { image_obj_ranged_buf = image_obj_ranged_ringbuf.back(); } } } else { image_obj_ranged_buf = image_obj_ranged_ringbuf.front(); boost::circular_buffer<sensor_msgs::Image>::iterator it = image_raw_ringbuf.begin(); if (image_raw_ringbuf.size() == 1) { image_raw_buf = *it; pthread_mutex_unlock(&mutex); return; } for (it++; it != image_raw_ringbuf.end(); it++) { if (fabs_time_diff(&(image_obj_ranged_ringbuf.front().header), &((it-1)->header)) < fabs_time_diff(&(image_obj_ranged_ringbuf.front().header), &(it->header))) { image_raw_buf = *(it-1); break; } } if (it == image_raw_ringbuf.end()) { image_raw_buf = image_raw_ringbuf.back(); } } pthread_mutex_unlock(&mutex); }
void image_obj_callback(const autoware_msgs::image_obj::ConstPtr& image_obj_msg) { pthread_mutex_lock(&mutex); image_obj_ringbuf.push_front(*image_obj_msg); //vscan_image is empty if (vscan_image_ringbuf.begin() == vscan_image_ringbuf.end()) { pthread_mutex_unlock(&mutex); ROS_INFO("vscan_image ring buffer is empty"); return; } buf_flag = true; // image_obj > vscan_image if (get_time(&(image_obj_ringbuf.front().header)) >= get_time(&(vscan_image_ringbuf.front().header))) { vscan_image_buf = vscan_image_ringbuf.front(); boost::circular_buffer<autoware_msgs::image_obj>::iterator it = image_obj_ringbuf.begin(); if (image_obj_ringbuf.size() == 1) { image_obj_buf = *it; pthread_mutex_unlock(&mutex); return; } else { for (it++; it != image_obj_ringbuf.end(); it++) { if (fabs_time_diff(&(vscan_image_ringbuf.front().header), &((it-1)->header)) < fabs_time_diff(&(vscan_image_ringbuf.front().header), &(it->header))) { image_obj_buf = *(it-1); break; } } if (it == image_obj_ringbuf.end()) { image_obj_buf = image_obj_ringbuf.back(); } } } else { image_obj_buf = image_obj_ringbuf.front(); boost::circular_buffer<autoware_msgs::PointsImage>::iterator it = vscan_image_ringbuf.begin(); if (vscan_image_ringbuf.size() == 1) { vscan_image_buf = *it; pthread_mutex_unlock(&mutex); return; } for (it++; it != vscan_image_ringbuf.end(); it++) { if (fabs_time_diff(&(image_obj_ringbuf.front().header), &((it-1)->header)) < fabs_time_diff(&(image_obj_ringbuf.front().header), &(it->header))) { vscan_image_buf = *(it-1); break; } } if (it == vscan_image_ringbuf.end()) { vscan_image_buf = vscan_image_ringbuf.back(); } } pthread_mutex_unlock(&mutex); }
void emit_block(size_t line_number, std::function<void (const std::string &, int, size_t)> fun) { std::string content; for (auto x : buffer) { content += x; } fun(content, line_number - buffer.size(), buffer.size()); }
/** Get the current number of elements in the pipe that can be read This is obviously a volatile value which is constrained by the theory of restricted relativity. Note that on some devices it may be costly to implement (for example on FPGA). */ std::size_t size() const { TRISYCL_DUMP_T("size() cb.size() = " << cb.size() << " cb.end() = " << (void *)&*cb.end() << " reserved_for_reading() = " << reserved_for_reading() << " reserved_for_writing() = " << reserved_for_writing()); /* The actual number of available elements depends from the elements blocked by some reservations. This prevents a consumer to read into reserved area. */ return cb.size() - reserved_for_reading() - reserved_for_writing(); }
/** Try to write a value to the pipe \param[in] value is what we want to write \param[in] blocking specify if the call wait for the operation to succeed \return true on success \todo provide a && version */ bool write(const T &value, bool blocking = false) { // Lock the pipe to avoid being disturbed std::unique_lock<std::mutex> ul { cb_mutex }; TRISYCL_DUMP_T("Write pipe full = " << full() << " value = " << value); if (blocking) /* If in blocking mode, wait for the not full condition, that may be changed when a read is done */ read_done.wait(ul, [&] { return !full(); }); else if (full()) return false; cb.push_back(value); TRISYCL_DUMP_T("Write pipe front = " << cb.front() << " back = " << cb.back() << " cb.begin() = " << (void *)&*cb.begin() << " cb.size() = " << cb.size() << " cb.end() = " << (void *)&*cb.end() << " reserved_for_reading() = " << reserved_for_reading() << " reserved_for_writing() = " << reserved_for_writing()); // Notify the clients waiting to read something from the pipe write_done.notify_all(); return true; }
void WaypointVelocityVisualizer::createVelocityMarker(const boost::circular_buffer<geometry_msgs::PoseStamped>& poses, const boost::circular_buffer<geometry_msgs::TwistStamped>& twists, const std::string& ns, const std_msgs::ColorRGBA& color, visualization_msgs::MarkerArray& markers) { assert(poses.size() == twists.size()); std::vector<nav_msgs::Odometry> waypoints; for (unsigned int i = 0; i < poses.size(); ++i) { nav_msgs::Odometry odom; odom.pose.pose = poses[i].pose; odom.twist.twist = twists[i].twist; waypoints.push_back(odom); } createVelocityMarker(waypoints, ns, color, markers); }
void WaypointVelocityVisualizer::controlCallback(const geometry_msgs::PoseStamped::ConstPtr& current_pose_msg, const geometry_msgs::TwistStamped::ConstPtr& current_twist_msg, const geometry_msgs::TwistStamped::ConstPtr& command_twist_msg) { // buffers are reset when time goes back, e.g. playback rosbag ros::Time current_time = ros::Time::now(); if (previous_time_ > current_time) { ROS_WARN("Detected jump back in time of %.3fs. Clearing markers and buffers.", (previous_time_ - current_time).toSec()); deleteMarkers(); // call 'DELETEALL' resetBuffers(); // clear circular buffers } previous_time_ = current_time; // if plot_metric_interval <= 0, velocity is plotted by each callback. if (plot_metric_interval_ > 0 && current_pose_buf_.size() > 0) { tf::Vector3 p1, p2; tf::pointMsgToTF(current_pose_buf_.back().pose.position, p1); tf::pointMsgToTF(current_pose_msg->pose.position, p2); if (!(p1.distance(p2) > plot_metric_interval_)) return; // skipping plot } current_pose_buf_.push_back(*current_pose_msg); current_twist_buf_.push_back(*current_twist_msg); command_twist_buf_.push_back(*command_twist_msg); current_twist_marker_array_.markers.clear(); command_twist_marker_array_.markers.clear(); createVelocityMarker(current_pose_buf_, current_twist_buf_, "current_velocity", current_twist_color_, current_twist_marker_array_); createVelocityMarker(current_pose_buf_, command_twist_buf_, "twist_cmd", command_twist_color_, command_twist_marker_array_); publishVelocityMarker(); }
int readOneImpl() { if (!m_bReady) return -1; // delimiter has been matched --> read fails int nCurSize = m_cbuf.size(); if (m_bEOF) { // EOF has been encountered --> returns the cbuf content if (nCurSize>0) { unsigned char val = m_cbuf.front(); m_cbuf.pop_front(); return val; } // EOF reached and cbuf is empty return -1; // failed!! } if (nCurSize<m_nCbufLen) { if (!fillCbuf()) return -1; // failed!! } unsigned char val = m_cbuf.front(); m_cbuf.pop_front(); fillOne(); return val; }
boost::shared_ptr< const MapsBuffer::MapsRgb > MapsBuffer::getFront(bool print) { boost::shared_ptr< const MapsBuffer::MapsRgb > depth_rgb; { boost::mutex::scoped_lock buff_lock (bmutex_); while (buffer_.empty ()) { if (is_done) break; { boost::mutex::scoped_lock io_lock (io_mutex); //std::cout << "No data in buffer_ yet or buffer is empty." << std::endl; } buff_empty_.wait (buff_lock); } depth_rgb = buffer_.front (); buffer_.pop_front (); } if(print) PCL_INFO("%d maps left in the buffer...\n", buffer_.size ()); return (depth_rgb); }
GestureType::Type GestureClassifierByHistogram::classifyClass3Gesture(const boost::circular_buffer<size_t> &matchedHistogramIndexes) const { #if 0 const size_t &matchedIdx = matchHistogramByGestureIdPattern(matchedHistogramIndexes, gestureIdPatternHistogramsForThirdClassGesture_, params_.histDistThresholdForGestureIdPattern); switch (matchedIdx) { case : return GestureType::GT_INFINITY; case : return GestureType::GT_TRIANGLE; } #else // TODO [adjust] >> design parameter const size_t countThreshold(matchedHistogramIndexes.size() / 2); //const size_t countThreshold(params_.matchedIndexCountThresholdForClass3Gesture); const size_t &matchedIdx = matchHistogramByFrequency(matchedHistogramIndexes, countThreshold); switch (matchedIdx) { /* case : return GestureType::GT_LEFT_FAST_MOVE; case : return GestureType::GT_RIGHT_FAST_MOVE; */ case 0: return GestureType::GT_INFINITY; case 1: return GestureType::GT_TRIANGLE; } #endif return GestureType::GT_UNDEFINED; }
/*! @brief Returns true if an n click has occured in the given times and durations @param n the number of 'quick' consecutive clicks @param times a circular buffer of the click times @param durations a circular buffer of the click durations (needed to throw out long clicks) @param previoustime the previous time that this event has occured */ bool BehaviourProvider::nClick(unsigned int n, const boost::circular_buffer<float>& times, const boost::circular_buffer<float>& durations, float& previoustime) { size_t buffersize = times.size(); if (buffersize < n) // if there aren't enough values in the buffer return false return false; else if (previoustime == times.back()) // if previous time has not changed return false return false; else if (m_current_time - times.back() < 500) // need to wait 500 ms for a potential next click return false; else { // n click if the last n presses are each less than 400ms apart for (size_t i = buffersize-1; i > buffersize-n; i--) { if (times[i] - times[i-1] > 500 || durations[i] > 800) return false; } // check the n+1 click was longer than 400ms if (buffersize-n > 0) { if (times[buffersize-n] - times[buffersize-n-1] < 500 || durations[buffersize-n] > 800) return false; } previoustime = times.back(); return true; } }
void GestureClassifierByHistogram::drawMatchedIdPatternHistogram(const boost::circular_buffer<size_t> &matchedHistogramIndexes, const std::string &windowName) const { // calculate matched index histogram cv::MatND hist; #if defined(__GNUC__) { cv::Mat tmpmat(std::vector<unsigned char>(matchedHistogramIndexes.begin(), matchedHistogramIndexes.end())); cv::calcHist(&tmpmat, 1, local::indexHistChannels, cv::Mat(), hist, local::histDims, local::indexHistSize, local::indexHistRanges, true, false); } #else cv::calcHist(&cv::Mat(std::vector<unsigned char>(matchedHistogramIndexes.begin(), matchedHistogramIndexes.end())), 1, local::indexHistChannels, cv::Mat(), hist, local::histDims, local::indexHistSize, local::indexHistRanges, true, false); #endif // normalize histogram //HistogramUtil::normalizeHistogram(hist, params_.maxMatchedHistogramNum); // draw matched index histogram cv::Mat histImg(cv::Mat::zeros(local::indexHistMaxHeight, local::indexHistBins*local::indexHistBinWidth, CV_8UC3)); HistogramUtil::drawHistogram1D(hist, local::indexHistBins, params_.maxMatchedHistogramNum, local::indexHistBinWidth, local::indexHistMaxHeight, histImg); std::ostringstream sstream; sstream << "count: " << matchedHistogramIndexes.size(); cv::putText(histImg, sstream.str(), cv::Point(10, 15), cv::FONT_HERSHEY_COMPLEX, 0.5, CV_RGB(255, 0, 255), 1, 8, false); cv::imshow(windowName, histImg); }
double CTimeSmoother::EstimatePeriod(const boost::circular_buffer<double> &data, const vector<unsigned int> &intData) { double sxy = 0, sxx = 0; for (unsigned int i = 0; i < data.size(); ++i) { sxy += intData[i] * data[i]; sxx += intData[i] * intData[i]; } return sxy/sxx; }
/*! \brief Performs a pass of the correlator, calculating all of the \f$\left[W^{(1)}_{i} \cdot W^{(2)}_{i+j}\right]\f$ values it can with the current data. These are summed up, ready to be divided by the pass count in getAveragedCorrelator() to return averaged values. */ void pass() { ++_count; //Here, we're using the default constructor again to make a sum //variable with a value of zero. std::pair<T,T> sum = std::pair<T,T>(); for (size_t i(0); i < _sample_history.size(); ++i) { sum.first += _sample_history[i].first; sum.second += _sample_history[i].second; _correlator[i] += elementwiseMultiply(sum.first, sum.second); } }
bool fillCbuf() { int nCurSize = m_cbuf.size(); int nToFill = m_nCbufLen - nCurSize; MB_ASSERT(nToFill>=0); if (nToFill==0) return true; int i; for (i=0; i<nToFill; ++i) { if (!fillOne()) return false; } return true; }
GestureType::Type GestureClassifierByHistogram::classifyClass2Gesture(const boost::circular_buffer<size_t> &matchedHistogramIndexes) const { #if 0 const size_t &matchedIdx = matchHistogramByGestureIdPattern(matchedHistogramIndexes, gestureIdPatternHistogramsForClass2Gesture_, params_.histDistThresholdForGestureIdPattern); switch (matchedIdx) { case : return GestureType::GT_HORIZONTAL_FLIP; case : return GestureType::GT_VERTICAL_FLIP; // FIXME [implement] >> /* case : return GestureType::GT_JAMJAM; case : return GestureType::GT_SHAKE; case : return GestureType::GT_LEFT_90_TURN; case : return GestureType::GT_RIGHT_90_TURN; */ } #else // TODO [adjust] >> design parameter const size_t countThreshold(matchedHistogramIndexes.size() / 2); //const size_t countThreshold(params_.matchedIndexCountThresholdForClass2Gesture); const size_t &matchedIdx = matchHistogramByFrequency(matchedHistogramIndexes, countThreshold); switch (matchedIdx) { case 0: return GestureType::GT_HORIZONTAL_FLIP; case 1: return GestureType::GT_VERTICAL_FLIP; // FIXME [implement] >> /* case : return GestureType::GT_JAMJAM; case : return GestureType::GT_SHAKE; case : return GestureType::GT_LEFT_90_TURN; case : return GestureType::GT_RIGHT_90_TURN; */ } #endif return GestureType::GT_UNDEFINED; }
/** Reserve some part of the pipe for writing \param[in] s is the number of element to reserve \param[out] rid is an iterator to a description of the reservation that has been done if successful \param[in] blocking specify if the call wait for the operation to succeed \return true if the reservation was successful */ bool reserve_write(std::size_t s, rid_iterator &rid, bool blocking = false) { // Lock the pipe to avoid being disturbed std::unique_lock<std::mutex> ul { cb_mutex }; TRISYCL_DUMP_T("Before write reservation cb.size() = " << cb.size() << " size() = " << size()); if (s == 0) // Empty reservation requested, so nothing to do return false; if (blocking) /* If in blocking mode, wait for enough room in the pipe, that may be changed when a read is done. Do not use a difference here because it is only about unsigned values */ read_done.wait(ul, [&] { return cb.size() + s <= capacity(); }); else if (cb.size() + s > capacity()) // Not enough room in the pipe for the reservation return false; /* If there is enough room in the pipe, just create default values in it to do the reservation */ for (std::size_t i = 0; i != s; ++i) cb.push_back(); /* Compute the location of the first element a posteriori since it may not exist a priori if cb was empty before */ auto first = cb.end() - s; /* Add a description of the reservation at the end of the reservation queue */ w_rid_q.emplace_back(first, s); // Return the iterator to the last reservation descriptor rid = w_rid_q.end() - 1; TRISYCL_DUMP_T("After reservation cb.size() = " << cb.size() << " size() = " << size()); return true; }
//TODO: change this a lot! void updateGyroState(sensor_msgs::Imu& imuMsg, packet_t rxPkt, boost::circular_buffer<float>& calibration) { uint8_t gyro_adc = rxPkt.payload[0]; double current_time = ros::Time::now().toSec(); double last_time = imuMsg.header.stamp.toSec(); if (!isMoving) { calibration.push_back(float(gyro_adc)); double total = 0; BOOST_FOREACH( float reading, calibration ) { total += reading; } cal_offset = total / calibration.size(); }
void CTimeSmoother::BinData(const boost::circular_buffer<double> &data, vector<double> &bins, const double threshold, const unsigned int minbinsize) { if (!data.size()) return; bins.clear(); vector<unsigned int> counts; for (boost::circular_buffer<double>::const_iterator i = data.begin(); i != data.end(); ++i) { bool found = false; for (unsigned int j = 0; j < bins.size(); ++j) { if (fabs(*i - bins[j]) < threshold*bins[j]) { found = true; // update our bin mean and count bins[j] = (bins[j]*counts[j] + *i)/(counts[j]+1); counts[j]++; break; } } if (!found) { bins.push_back(*i); counts.push_back(1); } } if (minbinsize) { assert(counts.size() == bins.size()); assert(counts.size()); // filter out any bins that are not large enough (and any bins that aren't positive) for (unsigned int j = 0; j < counts.size(); ) { if (counts[j] < minbinsize || bins[j] < 0.05) { bins.erase(bins.begin() + j); counts.erase(counts.begin() + j); } else j++; } } }
GestureType::Type GestureClassifierByHistogram::classifyClass1Gesture(const boost::circular_buffer<size_t> &matchedHistogramIndexes, const bool useGestureIdPattern) const { // match histogram by gesture ID pattern if (useGestureIdPattern) { const size_t &matchedIdx = matchHistogramByGestureIdPattern(matchedHistogramIndexes, gestureIdPatternHistogramsForClass1Gesture_, params_.histDistThresholdForGestureIdPattern); switch (matchedIdx) { case 1: return GestureType::GT_LEFT_MOVE; case 2: return GestureType::GT_RIGHT_MOVE; case 3: return GestureType::GT_UP_MOVE; case 4: return GestureType::GT_DOWN_MOVE; } } // match histogram by frequency else { // TODO [adjust] >> design parameter const size_t countThreshold(matchedHistogramIndexes.size() / 2); //const size_t countThreshold(params_.matchedIndexCountThresholdForClass1Gesture); const size_t &matchedIdx = matchHistogramByFrequency(matchedHistogramIndexes, countThreshold); switch (matchedIdx) { case 0: return GestureType::GT_HAND_OPEN; //case : // return GestureType::GT_HAND_CLOSE; } } return GestureType::GT_UNDEFINED; }
int size()const{ return int( commandBuffers.size() ); }
bool publish() { if (buf_flag) { pthread_mutex_lock(&mutex); //image_obj is empty if (image_obj_ringbuf.begin() == image_obj_ringbuf.end()) { pthread_mutex_unlock(&mutex); ROS_INFO("image_obj ring buffer is empty"); return false; } //vscan_image is empty if (vscan_image_ringbuf.begin() == vscan_image_ringbuf.end()) { pthread_mutex_unlock(&mutex); ROS_INFO("vscan_image ring buffer is empty"); return false; } // image_obj > vscan_image if (get_time(&(image_obj_ringbuf.front().header)) >= get_time(&(vscan_image_ringbuf.front().header))) { p_vscan_image_buf = &(vscan_image_ringbuf.front()); boost::circular_buffer<autoware_msgs::image_obj>::iterator it = image_obj_ringbuf.begin(); if (image_obj_ringbuf.size() == 1) { p_image_obj_buf = &*it; publish_msg(p_image_obj_buf, p_vscan_image_buf); pthread_mutex_unlock(&mutex); return true; } else { for (it++; it != image_obj_ringbuf.end(); it++) { if (fabs_time_diff(&(vscan_image_ringbuf.front().header), &((it-1)->header)) < fabs_time_diff(&(vscan_image_ringbuf.front().header), &(it->header))) { p_image_obj_buf = &*(it-1); break; } } if (it == image_obj_ringbuf.end()) { p_image_obj_buf = &(image_obj_ringbuf.back()); } } } // image_obj < vscan_image else { p_image_obj_buf = &(image_obj_ringbuf.front()); boost::circular_buffer<autoware_msgs::PointsImage>::iterator it = vscan_image_ringbuf.begin(); if (vscan_image_ringbuf.size() == 1) { p_vscan_image_buf = &*it; publish_msg(p_image_obj_buf, p_vscan_image_buf); pthread_mutex_unlock(&mutex); return true; } for (it++; it != vscan_image_ringbuf.end(); it++) { if (fabs_time_diff(&(image_obj_ringbuf.front().header), &((it-1)->header)) < fabs_time_diff(&(image_obj_ringbuf.front().header), &(it->header))) { p_vscan_image_buf = &*(it-1); break; } } if (it == vscan_image_ringbuf.end()) { p_vscan_image_buf = &(vscan_image_ringbuf.back()); } } publish_msg(p_image_obj_buf, p_vscan_image_buf); if (image_obj_ranged_flag == true){ buf_flag = false; image_obj_ranged_flag = false; pthread_mutex_unlock(&flag_mutex); image_obj_ringbuf.clear(); vscan_image_ringbuf.clear(); } return true; } else { return false; } }
void callback(const sensor_msgs::ImageConstPtr &img, const sensor_msgs::CameraInfoConstPtr &info) { boost::mutex::scoped_lock lock(mutex_); ros::Time now = ros::Time::now(); static boost::circular_buffer<double> in_times(100); static boost::circular_buffer<double> out_times(100); static boost::circular_buffer<double> in_bytes(100); static boost::circular_buffer<double> out_bytes(100); ROS_DEBUG("resize: callback"); if ( !publish_once_ || cp_.getNumSubscribers () == 0 ) { ROS_DEBUG("resize: number of subscribers is 0, ignoring image"); return; } in_times.push_front((now - last_subscribe_time_).toSec()); in_bytes.push_front(img->data.size()); // try { int width = dst_width_ ? dst_width_ : (resize_x_ * info->width); int height = dst_height_ ? dst_height_ : (resize_y_ * info->height); double scale_x = dst_width_ ? ((double)dst_width_)/info->width : resize_x_; double scale_y = dst_height_ ? ((double)dst_height_)/info->height : resize_y_; cv_bridge::CvImagePtr cv_img = cv_bridge::toCvCopy(img); cv::Mat tmpmat(height, width, cv_img->image.type()); cv::resize(cv_img->image, tmpmat, cv::Size(width, height)); cv_img->image = tmpmat; sensor_msgs::CameraInfo tinfo = *info; tinfo.height = height; tinfo.width = width; tinfo.K[0] = tinfo.K[0] * scale_x; // fx tinfo.K[2] = tinfo.K[2] * scale_x; // cx tinfo.K[4] = tinfo.K[4] * scale_y; // fy tinfo.K[5] = tinfo.K[5] * scale_y; // cy tinfo.P[0] = tinfo.P[0] * scale_x; // fx tinfo.P[2] = tinfo.P[2] * scale_x; // cx tinfo.P[3] = tinfo.P[3] * scale_x; // T tinfo.P[5] = tinfo.P[5] * scale_y; // fy tinfo.P[6] = tinfo.P[6] * scale_y; // cy if ( !use_messages_ || now - last_publish_time_ > period_ ) { cp_.publish(cv_img->toImageMsg(), boost::make_shared<sensor_msgs::CameraInfo> (tinfo)); out_times.push_front((now - last_publish_time_).toSec()); out_bytes.push_front(cv_img->image.total()*cv_img->image.elemSize()); last_publish_time_ = now; } } catch( cv::Exception& e ) { ROS_ERROR("%s", e.what()); } float duration = (now - last_rosinfo_time_).toSec(); if ( duration > 2 ) { int in_time_n = in_times.size(); int out_time_n = out_times.size(); double in_time_mean = 0, in_time_rate = 1.0, in_time_std_dev = 0.0, in_time_max_delta, in_time_min_delta; double out_time_mean = 0, out_time_rate = 1.0, out_time_std_dev = 0.0, out_time_max_delta, out_time_min_delta; std::for_each( in_times.begin(), in_times.end(), (in_time_mean += boost::lambda::_1) ); in_time_mean /= in_time_n; in_time_rate /= in_time_mean; std::for_each( in_times.begin(), in_times.end(), (in_time_std_dev += (boost::lambda::_1 - in_time_mean)*(boost::lambda::_1 - in_time_mean) ) ); in_time_std_dev = sqrt(in_time_std_dev/in_time_n); if ( in_time_n > 1 ) { in_time_min_delta = *std::min_element(in_times.begin(), in_times.end()); in_time_max_delta = *std::max_element(in_times.begin(), in_times.end()); } std::for_each( out_times.begin(), out_times.end(), (out_time_mean += boost::lambda::_1) ); out_time_mean /= out_time_n; out_time_rate /= out_time_mean; std::for_each( out_times.begin(), out_times.end(), (out_time_std_dev += (boost::lambda::_1 - out_time_mean)*(boost::lambda::_1 - out_time_mean) ) ); out_time_std_dev = sqrt(out_time_std_dev/out_time_n); if ( out_time_n > 1 ) { out_time_min_delta = *std::min_element(out_times.begin(), out_times.end()); out_time_max_delta = *std::max_element(out_times.begin(), out_times.end()); } double in_byte_mean = 0, out_byte_mean = 0; std::for_each( in_bytes.begin(), in_bytes.end(), (in_byte_mean += boost::lambda::_1) ); std::for_each( out_bytes.begin(), out_bytes.end(), (out_byte_mean += boost::lambda::_1) ); in_byte_mean /= duration; out_byte_mean /= duration; ROS_INFO_STREAM(" in bandwidth: " << std::fixed << std::setw(11) << std::setprecision(3) << in_byte_mean/1000*8 << " Kbps rate:" << std::fixed << std::setw(7) << std::setprecision(3) << in_time_rate << " hz min:" << std::fixed << std::setw(7) << std::setprecision(3) << in_time_min_delta << " s max: " << std::fixed << std::setw(7) << std::setprecision(3) << in_time_max_delta << " s std_dev: "<< std::fixed << std::setw(7) << std::setprecision(3) << in_time_std_dev << "s n: " << in_time_n); ROS_INFO_STREAM(" out bandwidth: " << std::fixed << std::setw(11) << std::setprecision(3) << out_byte_mean/1000*8 << " kbps rate:" << std::fixed << std::setw(7) << std::setprecision(3) << out_time_rate << " hz min:" << std::fixed << std::setw(7) << std::setprecision(3) << out_time_min_delta << " s max: " << std::fixed << std::setw(7) << std::setprecision(3) << out_time_max_delta << " s std_dev: "<< std::fixed << std::setw(7) << std::setprecision(3) << out_time_std_dev << "s n: " << out_time_n); in_times.clear(); in_bytes.clear(); out_times.clear(); out_bytes.clear(); last_rosinfo_time_ = now; } last_subscribe_time_ = now; if(use_snapshot_) { publish_once_ = false; } }
void gpsFixCallBack (geometry_msgs::PoseStamped gpsPose) { geometry_msgs::PoseStamped gpsFixPoseEstimate; // find poseAtRequestTime bool odomMovementFound = false; pair<geometry_msgs::Pose, ros::Time> prevFront; if (cb.size() > 0) { odomMovementFound = true; cout << "odom found" << endl; prevFront = cb.front(); if (gpsPose.header.stamp >= cb.front().second) { while (gpsPose.header.stamp > prevFront.second && cb.size() > 0) { prevFront = cb.front(); cb.pop_front(); } } } // calculate Odom difference double deltaX = 0; double deltaY = 0; if (odomMovementFound) { deltaX = (current.first.position.x - prevFront.first.position.x); deltaY = (current.first.position.y - prevFront.first.position.y); } gpsFixPoseEstimate.pose.position.x = gpsPose.pose.position.x + deltaX; gpsFixPoseEstimate.pose.position.y = gpsPose.pose.position.y + deltaY; tf::Quaternion gpsQaut, odomCurrentQuat, odomOldQuat; if (odomMovementFound) { tf::quaternionMsgToTF (current.first.orientation, odomCurrentQuat); tf::quaternionMsgToTF (prevFront.first.orientation, odomOldQuat); } tf::quaternionMsgToTF (gpsPose.pose.orientation, gpsQaut); double deltaYaw = 0; if (odomMovementFound) { deltaYaw = (tf::getYaw (odomCurrentQuat) - tf::getYaw (odomOldQuat)); } double newYaw = tf::getYaw (gpsQaut) + deltaYaw; gpsFixPoseEstimate.pose.orientation = tf::createQuaternionMsgFromYaw (newYaw); cout << "new Yaw: " << newYaw << endl; // create covariance // publish new estimated pose gpsFixPoseEstimate.header.stamp = ros::Time::now(); gpsFixPoseEstimate.header.frame_id = gpsFrame; geometry_msgs::PoseStamped inMapFrame; try { tfListener->transformPose ("/map", ros::Time::now(), gpsFixPoseEstimate, gpsFrame, inMapFrame); geometry_msgs::PoseWithCovarianceStamped resultPose; resultPose.pose.pose = inMapFrame.pose; resultPose.header = inMapFrame.header; if (odomMovementFound) { resultPose.pose.covariance[0] = sqrt (abs (current.first.position.x - prevFront.first.position.x)); // X resultPose.pose.covariance[7] = sqrt (abs (current.first.position.y - prevFront.first.position.y)); // Y resultPose.pose.covariance[35] = sqrt (abs (newYaw)); // Yaw } else { resultPose.pose.covariance[0] = 0.1;// X resultPose.pose.covariance[7] = 0.1; // Y resultPose.pose.covariance[35] = 0.1; // Yaw } gpsOdomCombinedLocalisationPublisher.publish (resultPose); } catch (tf::TransformException ex) { ROS_ERROR ("%s", ex.what()); } }
bool publish() { if (buf_flag) { //image_obj_ranged is empty if (image_obj_ranged_ringbuf.begin() == image_obj_ranged_ringbuf.end()) { ROS_INFO("image_obj_ranged ring buffer is empty"); return false; } //image_raw is empty if (image_raw_ringbuf.begin() == image_raw_ringbuf.end()) { ROS_INFO("image_raw ring buffer is empty"); return false; } // image_obj_ranged > image_raw if (get_time(&(image_obj_ranged_ringbuf.front().header)) >= get_time(&(image_raw_ringbuf.front().header))) { p_image_raw_buf = &(image_raw_ringbuf.front()); boost::circular_buffer<cv_tracker::image_obj_ranged>::iterator it = image_obj_ranged_ringbuf.begin(); if (image_obj_ranged_ringbuf.size() == 1) { p_image_obj_ranged_buf = &*it; publish_msg(p_image_obj_ranged_buf, p_image_raw_buf); if (image_obj_tracked_flag == true){ buf_flag = false; image_obj_tracked_flag = false; image_obj_ranged_ringbuf.clear(); image_raw_ringbuf.clear(); } return true; } else { for (it++; it != image_obj_ranged_ringbuf.end(); it++) { if (fabs_time_diff(&(image_raw_ringbuf.front().header), &((it-1)->header)) < fabs_time_diff(&(image_raw_ringbuf.front().header), &(it->header))) { p_image_obj_ranged_buf = &*(it-1); break; } } if (it == image_obj_ranged_ringbuf.end()) { p_image_obj_ranged_buf = &(image_obj_ranged_ringbuf.back()); } } } // image_obj_ranged < image_raw else { p_image_obj_ranged_buf = &(image_obj_ranged_ringbuf.front()); boost::circular_buffer<sensor_msgs::Image>::iterator it = image_raw_ringbuf.begin(); if (image_raw_ringbuf.size() == 1) { p_image_raw_buf = &*it; publish_msg(p_image_obj_ranged_buf, p_image_raw_buf); if (image_obj_tracked_flag == true){ buf_flag = false; image_obj_tracked_flag = false; image_obj_ranged_ringbuf.clear(); image_raw_ringbuf.clear(); } return true; } for (it++; it != image_raw_ringbuf.end(); it++) { if (fabs_time_diff(&(image_obj_ranged_ringbuf.front().header), &((it-1)->header)) < fabs_time_diff(&(image_obj_ranged_ringbuf.front().header), &(it->header))) { p_image_raw_buf = &*(it-1); break; } } if (it == image_raw_ringbuf.end()) { p_image_raw_buf = &(image_raw_ringbuf.back()); } } publish_msg(p_image_obj_ranged_buf, p_image_raw_buf); if (image_obj_tracked_flag == true){ buf_flag = false; image_obj_tracked_flag = false; image_obj_ranged_ringbuf.clear(); image_raw_ringbuf.clear(); } return true; } else { return false; } }
/** Test if the pipe is empty This is obviously a volatile value which is constrained by restricted relativity. Note that on some devices it may be costly to implement on the write side (for example on FPGA). */ bool empty() const { TRISYCL_DUMP_T("empty() cb.size() = " << cb.size() << " size() = " << size()); // It is empty when the size is zero, taking into account reservations return size() == 0; }
void ShowFrameDurationPlot() { Vec2i windowSize = mainApp->getWindow()->getSize(); size_t maxSamples = size_t(windowSize.x); if(maxSamples != frameDurationPlotValues.capacity()) { frameDurationPlotValues.set_capacity(maxSamples); } if(maxSamples != frameDurationPlotVertices.size()) { frameDurationPlotVertices.resize(maxSamples); } GRenderer->ResetTexture(0); frameDurationPlotValues.push_front(toMs(g_platformTime.lastFrameDuration())); float avg = std::accumulate(frameDurationPlotValues.begin(), frameDurationPlotValues.end(), 0.f) / frameDurationPlotValues.size(); float worst = *std::max_element(frameDurationPlotValues.begin(), frameDurationPlotValues.end()); const float OFFSET_Y = 80.f; const float SCALE_Y = 4.0f; for(size_t i = 0; i < frameDurationPlotValues.size(); ++i) { float time = frameDurationPlotValues[i]; frameDurationPlotVertices[i].color = Color::white.toRGB(); frameDurationPlotVertices[i].p.x = i; frameDurationPlotVertices[i].p.y = OFFSET_Y + (time * SCALE_Y); frameDurationPlotVertices[i].p.z = 1.0f; frameDurationPlotVertices[i].w = 1.0f; } EERIEDRAWPRIM(Renderer::LineStrip, &frameDurationPlotVertices[0], frameDurationPlotValues.size()); Color avgColor = Color::blue * 0.5f + Color::white * 0.5f; float avgPos = OFFSET_Y + (avg * SCALE_Y); drawLine(Vec2f(0, avgPos), Vec2f(windowSize.x, avgPos), 1.0f, Color::blue); Color worstColor = Color::red * 0.5f + Color::white * 0.5f; float worstPos = OFFSET_Y + (worst * SCALE_Y); drawLine(Vec2f(0, worstPos), Vec2f(windowSize.x, worstPos), 1.0f, Color::red); Font * font = hFontDebug; float lineOffset = font->getLineHeight() + 2; std::string labels[3] = { "Average: ", "Worst: ", "Current: " }; Color colors[3] = { avgColor, worstColor, Color::white }; float values[3] = { avg, worst, frameDurationPlotValues[0] }; std::string texts[3]; float widths[3]; static float labelWidth = 0.f; static float valueWidth = 0.f; for(size_t i = 0; i < 3; i++) { // Format value std::ostringstream oss; oss << std::fixed << std::setprecision(2) << values[i] << " ms ("<< 1.f / (values[i] * 0.001f) << " FPS)"; texts[i] = oss.str(); // Calculate widths (could be done more efficiently for monospace fonts...) labelWidth = std::max(labelWidth, float(font->getTextSize(labels[i]).width())); widths[i] = font->getTextSize(texts[i]).width(); valueWidth = std::max(valueWidth, widths[i]); } float x = 10; float y = 10; float xend = x + labelWidth + 10 + valueWidth; for(size_t i = 0; i < 3; i++) { font->draw(Vec2i(x, y), labels[i], Color::gray(0.8f)); font->draw(Vec2i(xend - widths[i], y), texts[i], colors[i]); y += lineOffset; } }