void TrackerViewer::callback (const sensor_msgs::ImageConstPtr& image, const sensor_msgs::CameraInfoConstPtr& info, const geometry_msgs::PoseWithCovarianceStamped::ConstPtr& trackingResult, const visp_tracker::MovingEdgeSites::ConstPtr& sites, const visp_tracker::KltPoints::ConstPtr& klt) { // Copy image. try { rosImageToVisp(image_, image); } catch(std::exception& e) { ROS_ERROR_STREAM("dropping frame: " << e.what()); } // Copy moving camera infos, edges sites and optional KLT points. info_ = info; sites_ = sites; klt_ = klt; // Copy cMo. cMo_ = vpHomogeneousMatrix(); transformToVpHomogeneousMatrix(*cMo_, trackingResult->pose.pose); }
void Tracker::spin() { ros::Rate loopRateTracking(100); tf::Transform transform; std_msgs::Header lastHeader; while (!exiting()) { // When a camera sequence is played several times, // the seq id will decrease, in this case we want to // continue the tracking. if (header_.seq < lastHeader.seq) lastTrackedImage_ = 0; if (lastTrackedImage_ < header_.seq) { lastTrackedImage_ = header_.seq; // If we can estimate the camera displacement using tf, // we update the cMo to compensate for robot motion. if (compensateRobotMotion_) try { tf::StampedTransform stampedTransform; listener_.lookupTransform (header_.frame_id, // camera frame name header_.stamp, // current image time header_.frame_id, // camera frame name lastHeader.stamp, // last processed image time worldFrameId_, // frame attached to the environment stampedTransform ); vpHomogeneousMatrix newMold; transformToVpHomogeneousMatrix (newMold, stampedTransform); cMo_ = newMold * cMo_; mutex_.lock(); tracker_->setPose(image_, cMo_); mutex_.unlock(); } catch(tf::TransformException& e) { mutex_.unlock(); } // If we are lost but an estimation of the object position // is provided, use it to try to reinitialize the system. if (state_ == LOST) { // If the last received message is recent enough, // use it otherwise do nothing. if (ros::Time::now () - objectPositionHint_.header.stamp < ros::Duration (1.)) transformToVpHomogeneousMatrix (cMo_, objectPositionHint_.transform); mutex_.lock(); tracker_->setPose(image_, cMo_); mutex_.unlock(); } // We try to track the image even if we are lost, // in the case the tracker recovers... if (state_ == TRACKING || state_ == LOST) try { mutex_.lock(); // tracker_->setPose(image_, cMo_); // Removed as it is not necessary when the pose is not modified from outside. tracker_->track(image_); tracker_->getPose(cMo_); mutex_.unlock(); } catch(...) { mutex_.unlock(); ROS_WARN_THROTTLE(10, "tracking lost"); state_ = LOST; } // Publish the tracking result. if (state_ == TRACKING) { geometry_msgs::Transform transformMsg; vpHomogeneousMatrixToTransform(transformMsg, cMo_); // Publish position. if (transformationPublisher_.getNumSubscribers () > 0) { geometry_msgs::TransformStampedPtr objectPosition (new geometry_msgs::TransformStamped); objectPosition->header = header_; objectPosition->child_frame_id = childFrameId_; objectPosition->transform = transformMsg; transformationPublisher_.publish(objectPosition); } // Publish result. if (resultPublisher_.getNumSubscribers () > 0) { geometry_msgs::PoseWithCovarianceStampedPtr result (new geometry_msgs::PoseWithCovarianceStamped); result->header = header_; result->pose.pose.position.x = transformMsg.translation.x; result->pose.pose.position.y = transformMsg.translation.y; result->pose.pose.position.z = transformMsg.translation.z; result->pose.pose.orientation.x = transformMsg.rotation.x; result->pose.pose.orientation.y = transformMsg.rotation.y; result->pose.pose.orientation.z = transformMsg.rotation.z; result->pose.pose.orientation.w = transformMsg.rotation.w; const vpMatrix& covariance = tracker_->getCovarianceMatrix(); for (unsigned i = 0; i < covariance.getRows(); ++i) for (unsigned j = 0; j < covariance.getCols(); ++j) { unsigned idx = i * covariance.getCols() + j; if (idx >= 36) continue; result->pose.covariance[idx] = covariance[i][j]; } resultPublisher_.publish(result); } // Publish moving edge sites. if (movingEdgeSitesPublisher_.getNumSubscribers () > 0) { visp_tracker::MovingEdgeSitesPtr sites (new visp_tracker::MovingEdgeSites); updateMovingEdgeSites(sites); sites->header = header_; movingEdgeSitesPublisher_.publish(sites); } // Publish KLT points. if (kltPointsPublisher_.getNumSubscribers () > 0) { visp_tracker::KltPointsPtr klt (new visp_tracker::KltPoints); updateKltPoints(klt); klt->header = header_; kltPointsPublisher_.publish(klt); } // Publish to tf. transform.setOrigin (tf::Vector3(transformMsg.translation.x, transformMsg.translation.y, transformMsg.translation.z)); transform.setRotation (tf::Quaternion (transformMsg.rotation.x, transformMsg.rotation.y, transformMsg.rotation.z, transformMsg.rotation.w)); transformBroadcaster_.sendTransform (tf::StampedTransform (transform, header_.stamp, header_.frame_id, childFrameId_)); } } lastHeader = header_; spinOnce(); loopRateTracking.sleep(); } }