void Locator::targetPoseCallback(const geometry_msgs::PoseStampedConstPtr& msg) { // We have a target to process geometry_msgs::PoseStamped targetPose = *msg; // copy the message header since we need it when we forward it on to mavros vision_pose_.header = targetPose.header; if (!targetPoseInitialized_) { // Target pose not initialized so initialize it target_current_FLU_.setPose(targetPose.pose); targetPoseInitialized_ = true; // if (vehiclePoseInitialized) // { // syncVehicleandTargetPose(); // } // else // { // targetYawOrigin = getTargetYaw(); // } } else { /* * Note that in this version the filtering of the vision input is commented out to allow * for unadulterated vision inputs. If one wanted to play with filtering then * they should uncomment this code and play with the filter appropriately */ // Update the target state with the filtered position // geometry_msgs::Pose fPose = filterPosition(msg); // target_current_FLU_.setPose(fPose); // No filtering target_current_FLU_.setPose(targetPose.pose); } ROS_DEBUG("GOT TARGET FLU[F:%0.3f L:%0.3f U:%0.3f, R:%0.3f P:%0.3f Y:%0.3f, ]", target_current_FLU_.position.x, target_current_FLU_.position.y, target_current_FLU_.position.z, target_current_FLU_.attitude.roll, target_current_FLU_.attitude.pitch, target_current_FLU_.attitude.yaw); if (vehiclePoseInitialized_) { // The vision system produces targets in the FLU frame so convert it to ENU convertTargetFLUtoENU(); // Publish target location to the FCU updateFCULocation(); } // Notify the controller that the target position has been updated if (controller_ != NULL) { controller_->targetPositionUpdated(); } // Reset the consecutive frame error count position_error_cnt_ = 0; }
void Locator::targetPoseCallback(const geometry_msgs::PoseArray::ConstPtr& msgArray) { // Check to see if we have any targets if (msgArray->poses.size() > 0) { // We have a target to process geometry_msgs::Pose msg = msgArray->poses.at(0); // copy the message header since we need it when we forward it on to mavros vision_pose_.header = msgArray->header; if (!targetPoseInitialized_) { // Target pose not initialized so initialize it target_current_FLU_.setPose(msg); targetPoseInitialized_ = true; // if (vehiclePoseInitialized) // { // syncVehicleandTargetPose(); // } // else // { // targetYawOrigin = getTargetYaw(); // } } else { /* * Note that in this version the filtering of the vision input is commented out to allow * for unadulterated vision inputs. If one wanted to play with filtering then * they should uncomment this code and play with the filter appropriately */ // Update the target state with the filtered position // geometry_msgs::Pose fPose = filterPosition(msg); // target_current_FLU_.setPose(fPose); // No filtering target_current_FLU_.setPose(msg); } ROS_DEBUG("GOT TARGET FLU[F:%0.3f L:%0.3f U:%0.3f, R:%0.3f P:%0.3f Y:%0.3f, ]", target_current_FLU_.position.x, target_current_FLU_.position.y, target_current_FLU_.position.z, target_current_FLU_.attitude.roll, target_current_FLU_.attitude.pitch, target_current_FLU_.attitude.yaw); if (vehiclePoseInitialized_) { // The vision system produces targets in the FLU frame so convert it to ENU convertTargetFLUtoENU(); // Publish target location to the FCU updateFCULocation(); } // Notify the controller that the target position has been updated if (controller_ != NULL) { controller_->targetPositionUpdated(); } // Reset the consecutive frame error count position_error_cnt_ = 0; } else { // No targets detected so potential error // reset the pose intitialization flag targetPoseInitialized_ = false; if (controller_ != NULL) { // If this is the first error frame detected then notify the controller // Note that you can change this constant it you want to notify the // controller after 'X' number of consecutive frame errors or perhaps // base the conditional on some time duration if (position_error_cnt_ == 1) { controller_->locationError(); } position_error_cnt_++; } } }