Object* getSelectedObject(std::list<Object*>* allObjects, Player* player) { if(!allObjects || allObjects->empty()) return NULL; float a,b,c,d,e,f; float minDistance = 999999999999; Object* closestObject; cv::Vec3f objectPosition; cv::Vec4f distanceVector; cv::Vec4f playerPosition(player->position[0], player->position[1], player->position[2], 1); cv::Vec4f lookAt(player->lookAtVector[0], player->lookAtVector[1], player->lookAtVector[2], 1); cv::Matx<float,4,4> plueckerCoords = playerPosition*lookAt.t() - lookAt*playerPosition.t(); a = plueckerCoords(0,1); b = plueckerCoords(0,2); c = plueckerCoords(0,3); d = plueckerCoords(1,2); e = plueckerCoords(1,3); f = plueckerCoords(2,3); cv::Matx<float,4,4> dualPlueckerCoords(0, f, -e, d, -f, 0, c, -b, e, -c, 0, a, -d, b, -a, 0); cv::Matx<float,3,3> A(0, f, -e, -f, 0, c, e, -c, 0); A = A.t()*A; dualPlueckerCoords = sqrt(2)/(A(0,0)*A(1,1)*A(2,2))*dualPlueckerCoords; for(std::list<Object*>::iterator it = allObjects->begin(); it != allObjects->end(); ++it) { objectPosition = (*it)->position; distanceVector = dualPlueckerCoords*cv::Vec4f(objectPosition(0), objectPosition(1), objectPosition(2), 1); std::cout << "vectorNorm" << cv::norm(cv::Vec3f(distanceVector(0),distanceVector(1),distanceVector(2))) << std::endl; if (minDistance > cv::norm(cv::Vec3f(distanceVector(0),distanceVector(1),distanceVector(2)))) { minDistance = cv::norm(cv::Vec3f(distanceVector(0),distanceVector(1),distanceVector(2))); closestObject = (*it); } } std::cout << "plueckerCoords: " << plueckerCoords << std::endl; std::cout << "dualPlueckerCoords: " << dualPlueckerCoords << std::endl; std::cout << "plueckerCoords: " << a << " " << b << " " << c << " " << d << " " << e << " " << f << std::endl; std::cout << "Minimum distance " << minDistance << std::endl; return closestObject; return NULL; }
void Tracker::spin() { ros::Rate loopRateTracking(100); tf::Transform transform; std_msgs::Header lastHeader; while (!exiting()) { // When a camera sequence is played several times, // the seq id will decrease, in this case we want to // continue the tracking. if (header_.seq < lastHeader.seq) lastTrackedImage_ = 0; if (lastTrackedImage_ < header_.seq) { lastTrackedImage_ = header_.seq; // If we can estimate the camera displacement using tf, // we update the cMo to compensate for robot motion. if (compensateRobotMotion_) try { tf::StampedTransform stampedTransform; listener_.lookupTransform (header_.frame_id, // camera frame name header_.stamp, // current image time header_.frame_id, // camera frame name lastHeader.stamp, // last processed image time worldFrameId_, // frame attached to the environment stampedTransform ); vpHomogeneousMatrix newMold; transformToVpHomogeneousMatrix (newMold, stampedTransform); cMo_ = newMold * cMo_; mutex_.lock(); tracker_->setPose(image_, cMo_); mutex_.unlock(); } catch(tf::TransformException& e) { mutex_.unlock(); } // If we are lost but an estimation of the object position // is provided, use it to try to reinitialize the system. if (state_ == LOST) { // If the last received message is recent enough, // use it otherwise do nothing. if (ros::Time::now () - objectPositionHint_.header.stamp < ros::Duration (1.)) transformToVpHomogeneousMatrix (cMo_, objectPositionHint_.transform); mutex_.lock(); tracker_->setPose(image_, cMo_); mutex_.unlock(); } // We try to track the image even if we are lost, // in the case the tracker recovers... if (state_ == TRACKING || state_ == LOST) try { mutex_.lock(); // tracker_->setPose(image_, cMo_); // Removed as it is not necessary when the pose is not modified from outside. tracker_->track(image_); tracker_->getPose(cMo_); mutex_.unlock(); } catch(...) { mutex_.unlock(); ROS_WARN_THROTTLE(10, "tracking lost"); state_ = LOST; } // Publish the tracking result. if (state_ == TRACKING) { geometry_msgs::Transform transformMsg; vpHomogeneousMatrixToTransform(transformMsg, cMo_); // Publish position. if (transformationPublisher_.getNumSubscribers () > 0) { geometry_msgs::TransformStampedPtr objectPosition (new geometry_msgs::TransformStamped); objectPosition->header = header_; objectPosition->child_frame_id = childFrameId_; objectPosition->transform = transformMsg; transformationPublisher_.publish(objectPosition); } // Publish result. if (resultPublisher_.getNumSubscribers () > 0) { geometry_msgs::PoseWithCovarianceStampedPtr result (new geometry_msgs::PoseWithCovarianceStamped); result->header = header_; result->pose.pose.position.x = transformMsg.translation.x; result->pose.pose.position.y = transformMsg.translation.y; result->pose.pose.position.z = transformMsg.translation.z; result->pose.pose.orientation.x = transformMsg.rotation.x; result->pose.pose.orientation.y = transformMsg.rotation.y; result->pose.pose.orientation.z = transformMsg.rotation.z; result->pose.pose.orientation.w = transformMsg.rotation.w; const vpMatrix& covariance = tracker_->getCovarianceMatrix(); for (unsigned i = 0; i < covariance.getRows(); ++i) for (unsigned j = 0; j < covariance.getCols(); ++j) { unsigned idx = i * covariance.getCols() + j; if (idx >= 36) continue; result->pose.covariance[idx] = covariance[i][j]; } resultPublisher_.publish(result); } // Publish moving edge sites. if (movingEdgeSitesPublisher_.getNumSubscribers () > 0) { visp_tracker::MovingEdgeSitesPtr sites (new visp_tracker::MovingEdgeSites); updateMovingEdgeSites(sites); sites->header = header_; movingEdgeSitesPublisher_.publish(sites); } // Publish KLT points. if (kltPointsPublisher_.getNumSubscribers () > 0) { visp_tracker::KltPointsPtr klt (new visp_tracker::KltPoints); updateKltPoints(klt); klt->header = header_; kltPointsPublisher_.publish(klt); } // Publish to tf. transform.setOrigin (tf::Vector3(transformMsg.translation.x, transformMsg.translation.y, transformMsg.translation.z)); transform.setRotation (tf::Quaternion (transformMsg.rotation.x, transformMsg.rotation.y, transformMsg.rotation.z, transformMsg.rotation.w)); transformBroadcaster_.sendTransform (tf::StampedTransform (transform, header_.stamp, header_.frame_id, childFrameId_)); } } lastHeader = header_; spinOnce(); loopRateTracking.sleep(); } }