bool ArucoMapping::processImage(cv::Mat input_image,cv::Mat output_image) { aruco::MarkerDetector Detector; std::vector<aruco::Marker> temp_markers; //Set visibility flag to false for all markers for(size_t i = 0; i < num_of_markers_; i++) markers_[i].visible = false; // Save previous marker count marker_counter_previous_ = marker_counter_; // Detect markers Detector.detect(input_image,temp_markers,aruco_calib_params_,marker_size_); // If no marker found, print statement if(temp_markers.size() == 0) ROS_DEBUG("No marker found!"); //------------------------------------------------------ // FIRST MARKER DETECTED //------------------------------------------------------ if((temp_markers.size() > 0) && (first_marker_detected_ == false)) { //Set flag first_marker_detected_=true; // Detect lowest marker ID lowest_marker_id_ = temp_markers[0].id; for(size_t i = 0; i < temp_markers.size();i++) { if(temp_markers[i].id < lowest_marker_id_) lowest_marker_id_ = temp_markers[i].id; } ROS_DEBUG_STREAM("The lowest Id marker " << lowest_marker_id_ ); // Identify lowest marker ID with world's origin markers_[0].marker_id = lowest_marker_id_; markers_[0].geometry_msg_to_world.position.x = 0; markers_[0].geometry_msg_to_world.position.y = 0; markers_[0].geometry_msg_to_world.position.z = 0; markers_[0].geometry_msg_to_world.orientation.x = 0; markers_[0].geometry_msg_to_world.orientation.y = 0; markers_[0].geometry_msg_to_world.orientation.z = 0; markers_[0].geometry_msg_to_world.orientation.w = 1; // Relative position and Global position markers_[0].geometry_msg_to_previous.position.x = 0; markers_[0].geometry_msg_to_previous.position.y = 0; markers_[0].geometry_msg_to_previous.position.z = 0; markers_[0].geometry_msg_to_previous.orientation.x = 0; markers_[0].geometry_msg_to_previous.orientation.y = 0; markers_[0].geometry_msg_to_previous.orientation.z = 0; markers_[0].geometry_msg_to_previous.orientation.w = 1; // Transformation Pose to TF tf::Vector3 position; position.setX(0); position.setY(0); position.setZ(0); tf::Quaternion rotation; rotation.setX(0); rotation.setY(0); rotation.setZ(0); rotation.setW(1); markers_[0].tf_to_previous.setOrigin(position); markers_[0].tf_to_previous.setRotation(rotation); // Relative position of first marker equals Global position markers_[0].tf_to_world=markers_[0].tf_to_previous; // Increase count marker_counter_++; // Set sign of visibility of first marker markers_[0].visible=true; ROS_INFO_STREAM("First marker with ID: " << markers_[0].marker_id << " detected"); //First marker does not have any previous marker markers_[0].previous_marker_id = THIS_IS_FIRST_MARKER; } //------------------------------------------------------ // FOR EVERY MARKER DO //------------------------------------------------------ for(size_t i = 0; i < temp_markers.size();i++) { int index; int current_marker_id = temp_markers[i].id; //Draw marker convex, ID, cube and axis temp_markers[i].draw(output_image, cv::Scalar(0,0,255),2); aruco::CvDrawingUtils::draw3dCube(output_image,temp_markers[i], aruco_calib_params_); aruco::CvDrawingUtils::draw3dAxis(output_image,temp_markers[i], aruco_calib_params_); // Existing marker ? bool existing = false; int temp_counter = 0; while((existing == false) && (temp_counter < marker_counter_)) { if(markers_[temp_counter].marker_id == current_marker_id) { index = temp_counter; existing = true; ROS_DEBUG_STREAM("Existing marker with ID: " << current_marker_id << "found"); } temp_counter++; } //New marker ? if(existing == false) { index = marker_counter_; markers_[index].marker_id = current_marker_id; existing = true; ROS_DEBUG_STREAM("New marker with ID: " << current_marker_id << " found"); } // Change visibility flag of new marker for(size_t j = 0;j < marker_counter_; j++) { for(size_t k = 0;k < temp_markers.size(); k++) { if(markers_[j].marker_id == temp_markers[k].id) markers_[j].visible = true; } } //------------------------------------------------------ // For existing marker do //------------------------------------------------------ if((index < marker_counter_) && (first_marker_detected_ == true)) { markers_[index].current_camera_tf = arucoMarker2Tf(temp_markers[i]); markers_[index].current_camera_tf = markers_[index].current_camera_tf.inverse(); const tf::Vector3 marker_origin = markers_[index].current_camera_tf.getOrigin(); markers_[index].current_camera_pose.position.x = marker_origin.getX(); markers_[index].current_camera_pose.position.y = marker_origin.getY(); markers_[index].current_camera_pose.position.z = marker_origin.getZ(); const tf::Quaternion marker_quaternion = markers_[index].current_camera_tf.getRotation(); markers_[index].current_camera_pose.orientation.x = marker_quaternion.getX(); markers_[index].current_camera_pose.orientation.y = marker_quaternion.getY(); markers_[index].current_camera_pose.orientation.z = marker_quaternion.getZ(); markers_[index].current_camera_pose.orientation.w = marker_quaternion.getW(); } //------------------------------------------------------ // For new marker do //------------------------------------------------------ if((index == marker_counter_) && (first_marker_detected_ == true)) { markers_[index].current_camera_tf=arucoMarker2Tf(temp_markers[i]); tf::Vector3 marker_origin = markers_[index].current_camera_tf.getOrigin(); markers_[index].current_camera_pose.position.x = marker_origin.getX(); markers_[index].current_camera_pose.position.y = marker_origin.getY(); markers_[index].current_camera_pose.position.z = marker_origin.getZ(); tf::Quaternion marker_quaternion = markers_[index].current_camera_tf.getRotation(); markers_[index].current_camera_pose.orientation.x = marker_quaternion.getX(); markers_[index].current_camera_pose.orientation.y = marker_quaternion.getY(); markers_[index].current_camera_pose.orientation.z = marker_quaternion.getZ(); markers_[index].current_camera_pose.orientation.w = marker_quaternion.getW(); // Naming - TFs std::stringstream camera_tf_id; std::stringstream camera_tf_id_old; std::stringstream marker_tf_id_old; camera_tf_id << "camera_" << index; // Flag to keep info if any_known marker_visible in actual image bool any_known_marker_visible = false; // Array ID of markers, which position of new marker is calculated int last_marker_id; // Testing, if is possible calculate position of a new marker to old known marker for(int k = 0; k < index; k++) { if((markers_[k].visible == true) && (any_known_marker_visible == false)) { if(markers_[k].previous_marker_id != -1) { any_known_marker_visible = true; camera_tf_id_old << "camera_" << k; marker_tf_id_old << "marker_" << k; markers_[index].previous_marker_id = k; last_marker_id = k; } } } // New position can be calculated if(any_known_marker_visible == true) { // Generating TFs for listener for(char k = 0; k < 10; k++) { // TF from old marker and its camera broadcaster_.sendTransform(tf::StampedTransform(markers_[last_marker_id].current_camera_tf,ros::Time::now(), marker_tf_id_old.str(),camera_tf_id_old.str())); // TF from old camera to new camera broadcaster_.sendTransform(tf::StampedTransform(markers_[index].current_camera_tf,ros::Time::now(), camera_tf_id_old.str(),camera_tf_id.str())); ros::Duration(BROADCAST_WAIT_INTERVAL).sleep(); } // Calculate TF between two markers listener_->waitForTransform(marker_tf_id_old.str(),camera_tf_id.str(),ros::Time(0), ros::Duration(WAIT_FOR_TRANSFORM_INTERVAL)); try { broadcaster_.sendTransform(tf::StampedTransform(markers_[last_marker_id].current_camera_tf,ros::Time::now(), marker_tf_id_old.str(),camera_tf_id_old.str())); broadcaster_.sendTransform(tf::StampedTransform(markers_[index].current_camera_tf,ros::Time::now(), camera_tf_id_old.str(),camera_tf_id.str())); listener_->lookupTransform(marker_tf_id_old.str(),camera_tf_id.str(),ros::Time(0), markers_[index].tf_to_previous); } catch(tf::TransformException &e) { ROS_ERROR("Not able to lookup transform"); } // Save origin and quaternion of calculated TF marker_origin = markers_[index].tf_to_previous.getOrigin(); marker_quaternion = markers_[index].tf_to_previous.getRotation(); // If plane type selected roll, pitch and Z axis are zero if(space_type_ == "plane") { double roll, pitch, yaw; tf::Matrix3x3(marker_quaternion).getRPY(roll,pitch,yaw); roll = 0; pitch = 0; marker_origin.setZ(0); marker_quaternion.setRPY(pitch,roll,yaw); } markers_[index].tf_to_previous.setRotation(marker_quaternion); markers_[index].tf_to_previous.setOrigin(marker_origin); marker_origin = markers_[index].tf_to_previous.getOrigin(); markers_[index].geometry_msg_to_previous.position.x = marker_origin.getX(); markers_[index].geometry_msg_to_previous.position.y = marker_origin.getY(); markers_[index].geometry_msg_to_previous.position.z = marker_origin.getZ(); marker_quaternion = markers_[index].tf_to_previous.getRotation(); markers_[index].geometry_msg_to_previous.orientation.x = marker_quaternion.getX(); markers_[index].geometry_msg_to_previous.orientation.y = marker_quaternion.getY(); markers_[index].geometry_msg_to_previous.orientation.z = marker_quaternion.getZ(); markers_[index].geometry_msg_to_previous.orientation.w = marker_quaternion.getW(); // increase marker count marker_counter_++; // Invert and position of new marker to compute camera pose above it markers_[index].current_camera_tf = markers_[index].current_camera_tf.inverse(); marker_origin = markers_[index].current_camera_tf.getOrigin(); markers_[index].current_camera_pose.position.x = marker_origin.getX(); markers_[index].current_camera_pose.position.y = marker_origin.getY(); markers_[index].current_camera_pose.position.z = marker_origin.getZ(); marker_quaternion = markers_[index].current_camera_tf.getRotation(); markers_[index].current_camera_pose.orientation.x = marker_quaternion.getX(); markers_[index].current_camera_pose.orientation.y = marker_quaternion.getY(); markers_[index].current_camera_pose.orientation.z = marker_quaternion.getZ(); markers_[index].current_camera_pose.orientation.w = marker_quaternion.getW(); // Publish all TFs and markers publishTfs(false); } } //------------------------------------------------------ // Compute global position of new marker //------------------------------------------------------ if((marker_counter_previous_ < marker_counter_) && (first_marker_detected_ == true)) { // Publish all TF five times for listener for(char k = 0; k < 5; k++) publishTfs(false); std::stringstream marker_tf_name; marker_tf_name << "marker_" << index; listener_->waitForTransform("world",marker_tf_name.str(),ros::Time(0), ros::Duration(WAIT_FOR_TRANSFORM_INTERVAL)); try { listener_->lookupTransform("world",marker_tf_name.str(),ros::Time(0), markers_[index].tf_to_world); } catch(tf::TransformException &e) { ROS_ERROR("Not able to lookup transform"); } // Saving TF to Pose const tf::Vector3 marker_origin = markers_[index].tf_to_world.getOrigin(); markers_[index].geometry_msg_to_world.position.x = marker_origin.getX(); markers_[index].geometry_msg_to_world.position.y = marker_origin.getY(); markers_[index].geometry_msg_to_world.position.z = marker_origin.getZ(); tf::Quaternion marker_quaternion=markers_[index].tf_to_world.getRotation(); markers_[index].geometry_msg_to_world.orientation.x = marker_quaternion.getX(); markers_[index].geometry_msg_to_world.orientation.y = marker_quaternion.getY(); markers_[index].geometry_msg_to_world.orientation.z = marker_quaternion.getZ(); markers_[index].geometry_msg_to_world.orientation.w = marker_quaternion.getW(); } } //------------------------------------------------------ // Compute which of visible markers is the closest to the camera //------------------------------------------------------ bool any_markers_visible=false; int num_of_visible_markers=0; if(first_marker_detected_ == true) { double minimal_distance = INIT_MIN_SIZE_VALUE; for(int k = 0; k < num_of_markers_; k++) { double a,b,c,size; // If marker is visible, distance is calculated if(markers_[k].visible==true) { a = markers_[k].current_camera_pose.position.x; b = markers_[k].current_camera_pose.position.y; c = markers_[k].current_camera_pose.position.z; size = std::sqrt((a * a) + (b * b) + (c * c)); if(size < minimal_distance) { minimal_distance = size; closest_camera_index_ = k; } any_markers_visible = true; num_of_visible_markers++; } } } //------------------------------------------------------ // Publish all known markers //------------------------------------------------------ if(first_marker_detected_ == true) publishTfs(true); //------------------------------------------------------ // Compute global camera pose //------------------------------------------------------ if((first_marker_detected_ == true) && (any_markers_visible == true)) { std::stringstream closest_camera_tf_name; closest_camera_tf_name << "camera_" << closest_camera_index_; listener_->waitForTransform("world",closest_camera_tf_name.str(),ros::Time(0), ros::Duration(WAIT_FOR_TRANSFORM_INTERVAL)); try { listener_->lookupTransform("world",closest_camera_tf_name.str(),ros::Time(0), world_position_transform_); } catch(tf::TransformException &ex) { ROS_ERROR("Not able to lookup transform"); } // Saving TF to Pose const tf::Vector3 marker_origin = world_position_transform_.getOrigin(); world_position_geometry_msg_.position.x = marker_origin.getX(); world_position_geometry_msg_.position.y = marker_origin.getY(); world_position_geometry_msg_.position.z = marker_origin.getZ(); tf::Quaternion marker_quaternion = world_position_transform_.getRotation(); world_position_geometry_msg_.orientation.x = marker_quaternion.getX(); world_position_geometry_msg_.orientation.y = marker_quaternion.getY(); world_position_geometry_msg_.orientation.z = marker_quaternion.getZ(); world_position_geometry_msg_.orientation.w = marker_quaternion.getW(); } //------------------------------------------------------ // Publish all known markers //------------------------------------------------------ if(first_marker_detected_ == true) publishTfs(true); //------------------------------------------------------ // Publish custom marker message //------------------------------------------------------ aruco_mapping::ArucoMarker marker_msg; if((any_markers_visible == true)) { marker_msg.header.stamp = ros::Time::now(); marker_msg.header.frame_id = "world"; marker_msg.marker_visibile = true; marker_msg.num_of_visible_markers = num_of_visible_markers; marker_msg.global_camera_pose = world_position_geometry_msg_; marker_msg.marker_ids.clear(); marker_msg.global_marker_poses.clear(); for(size_t j = 0; j < marker_counter_; j++) { if(markers_[j].visible == true) { marker_msg.marker_ids.push_back(markers_[j].marker_id); marker_msg.global_marker_poses.push_back(markers_[j].geometry_msg_to_world); } } } else { marker_msg.header.stamp = ros::Time::now(); marker_msg.header.frame_id = "world"; marker_msg.num_of_visible_markers = num_of_visible_markers; marker_msg.marker_visibile = false; marker_msg.marker_ids.clear(); marker_msg.global_marker_poses.clear(); } // Publish custom marker msg marker_msg_pub_.publish(marker_msg); return true; }
void InteractiveMarkerArrow::MakeInteractiveMarker(std::string intMarkerName, tf::Quaternion qx_control, tf::Quaternion qy_control, tf::Quaternion qz_control) { visualization_msgs::InteractiveMarker int_marker; int_marker.header.frame_id = "world"; int_marker.scale = 0.1; int_marker.name = intMarkerName; geometry_msgs::Pose pose; tfSrv->get("world", intMarkerName + "_control", pose); int_marker.pose = pose; InteractiveMarkerArrow::MakeControl(int_marker); int_marker.controls[0].interaction_mode = 7; visualization_msgs::InteractiveMarkerControl control; control.orientation_mode = visualization_msgs::InteractiveMarkerControl::FIXED; tf::Transform tr; tfSrv->get("world", "viceGripRotation", tr); qx_control = tr.getRotation() * qx_control; qy_control = tr.getRotation() * qy_control; qz_control = tr.getRotation() * qz_control; control.orientation.w = qx_control.getW(); control.orientation.x = qx_control.getX(); control.orientation.y = qx_control.getY(); control.orientation.z = qx_control.getZ(); control.name = "rotate_x"; control.interaction_mode = visualization_msgs::InteractiveMarkerControl::ROTATE_AXIS; int_marker.controls.push_back(control); control.name = "move_x"; control.interaction_mode = visualization_msgs::InteractiveMarkerControl::MOVE_AXIS; int_marker.controls.push_back(control); control.orientation.w = qz_control.getW(); control.orientation.x = qz_control.getX(); control.orientation.y = qz_control.getY(); control.orientation.z = qz_control.getZ(); control.name = "rotate_z"; control.interaction_mode = visualization_msgs::InteractiveMarkerControl::ROTATE_AXIS; int_marker.controls.push_back(control); control.name = "move_z"; control.interaction_mode = visualization_msgs::InteractiveMarkerControl::MOVE_AXIS; int_marker.controls.push_back(control); control.orientation.w = qy_control.getW(); control.orientation.x = qy_control.getX(); control.orientation.y = qy_control.getY(); control.orientation.z = qy_control.getZ(); control.name = "rotate_y"; control.interaction_mode = visualization_msgs::InteractiveMarkerControl::ROTATE_AXIS; int_marker.controls.push_back(control); control.name = "move_y"; control.interaction_mode = visualization_msgs::InteractiveMarkerControl::MOVE_AXIS; int_marker.controls.push_back(control); intMarkerSrv->insert(int_marker); intMarkerSrv->setCallback(int_marker.name, _processFeedBackTemp(boost::bind(&InteractiveMarkerArrow::ProcessFeedback, this, _1))); intMarkerSrv->applyChanges(); }
void LeapMotionListener::leapmotionCallback(const leap_motion::leapros2::ConstPtr& dataHand) { dataHand_=(*dataHand); ROS_INFO("ORIENTATION OF THE HAND SET TO \n X: %f\n Y: %f\n Z: %f\n ",dataHand_.ypr.x,dataHand_.ypr.y,dataHand_.ypr.z); ROS_INFO("DIRECTION OF THE HAND SET TO \n X: %f\n Y: %f\n Z: %f\n ",dataHand_.direction.x,dataHand_.direction.y,dataHand_.direction.z); ROS_INFO("NORMAL OF THE HAND SET TO \n X: %f\n Y: %f\n Z: %f\n ",dataHand_.normal.x,dataHand_.normal.y,dataHand_.normal.z); //ROS_INFO("INSIDE CALLBACK"); //We create the values of reference for the first postion of our hand if (FIRST_VALUE) { dataLastHand_.palmpos.x=dataHand_.palmpos.x; dataLastHand_.palmpos.y=dataHand_.palmpos.y; dataLastHand_.palmpos.z=dataHand_.palmpos.z; FIRST_VALUE=0; Updifferencex=dataLastHand_.palmpos.x+10; Downdifferencex=dataLastHand_.palmpos.x-10; Updifferencez=dataLastHand_.palmpos.z+10; Downdifferencez=dataLastHand_.palmpos.z-20; Updifferencey=dataLastHand_.palmpos.y+20; Downdifferencey=dataLastHand_.palmpos.y-20; //ROS_INFO("ORIGINAL POSITION OF THE HAND SET TO \n X: %f\n Y: %f\n Z: %f\n ",trajectory_hand.at(i).palmpos.x,trajectory_hand.at(i).palmpos.y,trajectory_hand.at(i).palmpos.z); //sleep(2); } else { //We get the distance between the finger and transform it into gripper distance rot7=DtA+DtAx*dataHand_.finger_distance; rot8=DtA+DtAx*dataHand_.finger_distance; joint_msg_leap=jointstate_; joint_msg_leap.position[7] = -rot8; joint_msg_leap.position[6] = rot7; if ((dataHand_.palmpos.x<Downdifferencex)||(dataHand_.palmpos.x>Updifferencex)||(dataHand_.palmpos.y<Downdifferencey)||(dataHand_.palmpos.y>Updifferencey)||(dataHand_.palmpos.z<Downdifferencez)||(dataHand_.palmpos.z>Updifferencez)) { //q.setRPY(0,0,M_PI/2);//Fixed Position for testing..with this pose the robot is oriented to the ground q.setRPY(dataHand_.ypr.x,dataHand_.ypr.y,dataHand_.ypr.z); pose.orientation.x = q.getAxis().getX();//cambiado aposta getZ() pose.orientation.y = q.getAxis().getY(); pose.orientation.z = q.getAxis().getZ();//cambiado aposta getX() pose.orientation.w = q.getW(); //pose.orientation.w = ; //pose.orientation.z=1; //pose.orientation.y=0; //pose.orientation.x=0; //We need to send the correct axis to the robot. Currently they are rotated and x is z //ROS_INFO("VALUES OF THE QUATERNION SET TO \n X: %f\n Y: %f\n Z: %f W: %f\n",pose.orientation.x,pose.orientation.y,pose.orientation.z,pose.orientation.w); pose.position.y +=(dataHand_.palmpos.x-dataLastHand_.palmpos.x) ; pose.position.z +=(dataHand_.palmpos.y-dataLastHand_.palmpos.y); if(pose.position.z>Uplimitez) pose.position.z=Uplimitez; pose.position.x +=(dataHand_.palmpos.z-dataLastHand_.palmpos.z); //Here we instantiate an autogenerated service class srv.request.target = pose ; if (pclient->call(srv)) { //ROS_INFO("Ret: %d", (int)srv.response.ret); dataLastHand_.palmpos.x=dataHand_.palmpos.x; dataLastHand_.palmpos.y=dataHand_.palmpos.y; dataLastHand_.palmpos.z=dataHand_.palmpos.z; // Both limits for x,y,z to avoid small changes Updifferencex=dataLastHand_.palmpos.x+10;// Downdifferencex=dataLastHand_.palmpos.x-10; Updifferencez=dataLastHand_.palmpos.z+10; Downdifferencez=dataLastHand_.palmpos.z-20; Updifferencey=dataLastHand_.palmpos.y+20; Downdifferencey=dataLastHand_.palmpos.y-20; old_pose=pose; ROS_INFO("response %f\n",srv.response.ret); } else { ROS_ERROR("Position out of range"); pose=old_pose; } } //We get the aswer of the service and publish it into the joint_msg_leap message //joint_msg_leap.position[0] = srv.response.joint1; //joint_msg_leap.position[1] = srv.response.joint2; //joint_msg_leap.position[2] = srv.response.joint3; //joint_msg_leap.position[3] = srv.response.joint4; //joint_msg_leap.position[4] = srv.response.joint5; //joint_msg_leap.position[5] = srv.response.joint6; //robo_pub.publish(joint_msg_leap); } //ROS_INFO("END EFFECTOR POSITION \n X: %f\n Y: %f\n Z: %f\n", pose.position.x,pose.position.y,pose.position.z); //ROS_INFO("ORIENTATION OF THE HAND SET TO \n X: %f\n Y: %f\n Z: %f\n ",q.getAxis().getX(),q.getAxis().getY(),q.getAxis().getZ()); }
// callback for the complete message void complete_message_callback(const homog_track::HomogComplete& msg) { /********** Begin splitting up the incoming message *********/ // getting boolean indicating the reference has been set reference_set = msg.reference_set; // if the reference is set then will break out the points if (reference_set) { // initializer temp scalar to zero temp_scalar = cv::Mat::zeros(1,1,CV_64F); // getting the current marker points circles_curr = msg.current_points; // getting the refernce marker points circles_ref = msg.reference_points; // setting the current points to the point vector curr_red_p.x = circles_curr.red_circle.x; curr_green_p.x = circles_curr.green_circle.x; curr_cyan_p.x = circles_curr.cyan_circle.x; curr_purple_p.x = circles_curr.purple_circle.x; curr_red_p.y = circles_curr.red_circle.y; curr_green_p.y = circles_curr.green_circle.y; curr_cyan_p.y = circles_curr.cyan_circle.y; curr_purple_p.y = circles_curr.purple_circle.y; curr_points_p.push_back(curr_red_p); curr_points_p.push_back(curr_green_p); curr_points_p.push_back(curr_cyan_p); curr_points_p.push_back(curr_purple_p); // converting the points to be the projective coordinates for (int ii = 0; ii < curr_points_m.size(); ii++) { curr_points_m[ii] = K.inv(cv::DECOMP_LU)*curr_points_m[ii]; std::cout << "currpoints at " << ii << " is: " << curr_points_m[ii] << std::endl; } // setting the reference points to the point vector ref_red_p.x = circles_ref.red_circle.x; ref_green_p.x = circles_ref.green_circle.x; ref_cyan_p.x = circles_ref.cyan_circle.x; ref_purple_p.x = circles_ref.purple_circle.x; ref_red_p.y = circles_ref.red_circle.y; ref_green_p.y = circles_ref.green_circle.y; ref_cyan_p.y = circles_ref.cyan_circle.y; ref_purple_p.y = circles_ref.purple_circle.y; ref_points_p.push_back(ref_red_p); ref_points_p.push_back(ref_green_p); ref_points_p.push_back(ref_cyan_p); ref_points_p.push_back(ref_purple_p); // setting the reference points to the matrix vector, dont need to do the last one because its already 1 ref_red_m.at<double>(0,0) = ref_red_p.x; ref_red_m.at<double>(1,0) = ref_red_p.y; ref_green_m.at<double>(0,0) = ref_green_p.x; ref_green_m.at<double>(1,0) = ref_green_p.y; ref_cyan_m.at<double>(0,0) = ref_cyan_p.x; ref_cyan_m.at<double>(1,0) = ref_cyan_p.y; ref_purple_m.at<double>(0,0) = ref_purple_p.x; ref_purple_m.at<double>(1,0) = ref_purple_p.y; ref_points_m.push_back(ref_red_m); ref_points_m.push_back(ref_green_m); ref_points_m.push_back(ref_cyan_m); ref_points_m.push_back(ref_purple_m); // converting the points to be the projective coordinates for (int ii = 0; ii < ref_points_m.size(); ii++) { ref_points_m[ii] = K.inv(cv::DECOMP_LU)*ref_points_m[ii]; //std::cout << "refpoints at " << ii << " is: " << ref_points_m[ii] << std::endl; } // if any of the points have a -1 will skip over the homography if (curr_red_p.x != -1 && curr_green_p.x != -1 && curr_cyan_p.x != -1 && curr_purple_p.x != -1) { //std::cout << "hi" << std::endl; // finding the perspective homography G = cv::findHomography(curr_points_p,ref_points_p,0); //G = cv::findHomography(ref_points_p,ref_points_p,0); std::cout << "G: " << G << std::endl; // decomposing the homography into the four solutions // G and K are 3x3 // R is 3x3 // 3x1 // 3x1 // successful_decomp is the number of solutions found successful_decomp = cv::decomposeHomographyMat(G,K,R,T,n); std::cout << "successful_decomp: " << successful_decomp << std::endl; // if the decomp is successful will find the best matching if (successful_decomp > 0) { std::cout << std::endl << std::endl << " begin check for visibility" << std::endl; // finding the alphas alpha_red.data = 1/(G.at<double>(2,0)*ref_red_p.x + G.at<double>(2,1)*ref_red_p.y + 1); alpha_green.data = 1/(G.at<double>(2,0)*ref_green_p.x + G.at<double>(2,1)*ref_green_p.y + 1); alpha_cyan.data = 1/(G.at<double>(2,0)*ref_cyan_p.x + G.at<double>(2,1)*ref_cyan_p.y + 1); alpha_purple.data = 1/(G.at<double>(2,0)*ref_purple_p.x + G.at<double>(2,1)*ref_purple_p.y + 1); // finding the solutions that give the positive results for (int ii = 0; ii < successful_decomp; ii++) { std::cout << "solution set number " << ii << std::endl; // performing the operation transpose(m)*R*n to check if greater than 0 later // order operating on is red green cyan purple for (int jj = 0; jj < 4; jj++) { //std::cout << " T size: " << T[ii].size() << std::endl; //std::cout << " T type: " << T[ii].type() << std::endl; std::cout << " T value: " << T[ii] << std::endl; //std::cout << " temp scalar 1 " << std::endl; //std::cout << " temp scalar size: " << temp_scalar.size() << std::endl; //std::cout << " temp scalar type: " << temp_scalar.type() << std::endl; //std::cout << " temp scalar value " << temp_scalar <<std::endl; temp_scalar = curr_points_m[jj].t(); //std::cout << " temp scalar 2 " << std::endl; //std::cout << " temp scalar size: " << temp_scalar.size() << std::endl; //std::cout << " temp scalar type: " << temp_scalar.type() << std::endl; //std::cout << " temp scalar value " << temp_scalar <<std::endl; //std::cout << " R size: " << R[ii].size() << std::endl; //std::cout << " R type: " << R[ii].type() << std::endl; //std::cout << " R value: " << R[ii] << std::endl; temp_scalar = temp_scalar*R[ii]; //std::cout << " temp scalar 3 " << std::endl; //std::cout << " temp scalar size: " << temp_scalar.size() << std::endl; //std::cout << " temp scalar type: " << temp_scalar.type() << std::endl; //std::cout << " temp scalar value " << temp_scalar <<std::endl; //std::cout << " n size: " << n[ii].size() << std::endl; //std::cout << " n type: " << n[ii].type() << std::endl; std::cout << " n value: " << n[ii] << std::endl; temp_scalar = temp_scalar*n[ii]; //std::cout << " temp scalar size: " << temp_scalar.size() << std::endl; //std::cout << " temp scalar type: " << temp_scalar.type() << std::endl; //std::cout << " temp scalar value " << temp_scalar <<std::endl; //std::cout << " temp scalar value at 0,0" << temp_scalar.at<double>(0,0) << std::endl; scalar_value_check.push_back(temp_scalar.at<double>(0,0)); ////std::cout << " scalar value check size: " << scalar_value_check.size() << std::endl; //std::cout << " \tthe value for the " << jj << " visibility check is: " << scalar_value_check[4*ii+jj] << std::endl; } } std::cout << " end check for visibility" << std::endl << std::endl; // restting first solution found and second solution found first_solution_found = false; second_solution_found = false; fc_found = false; // getting the two solutions or only one if there are not two for (int ii = 0; ii < successful_decomp; ii++) { // getting the values onto the temporary vector // getting the start and end of the next solution temp_solution_start = scalar_value_check.begin() + 4*ii; temp_solution_end = scalar_value_check.begin() + 4*ii+4; temp_solution.assign(temp_solution_start,temp_solution_end); // checking if all the values are positive all_positive = true; current_temp_index = 0; while (all_positive && current_temp_index < 4) { if (temp_solution[current_temp_index] >= 0) { current_temp_index++; } else { all_positive = false; } } // if all the values were positive and a first solution has not been found will assign // to first solution. if all positive and first solution has been found will assign // to second solution. if all positive is false then will not do anything if (all_positive && first_solution_found && !second_solution_found) { // setting it to indicate a solution has been found second_solution_found = true; // setting the rotation, translation, and normal to be the second set second_R = R[ii]; second_T = T[ii]; second_n = n[ii]; // setting the projected values second_solution = temp_solution; } else if (all_positive && !first_solution_found) { // setting it to indicate a solution has been found first_solution_found = true; // setting the rotation, translation, and normal to be the first set first_R = R[ii]; first_T = T[ii]; first_n = n[ii]; // setting the projected values first_solution = temp_solution; } // erasing all the values from the temp solution temp_solution.erase(temp_solution.begin(),temp_solution.end()); } // erasing all the scalar values from the check scalar_value_check.erase(scalar_value_check.begin(),scalar_value_check.end()); // displaying the first solution if it was found if (first_solution_found) { std::cout << std::endl << "first R: " << first_R << std::endl; std::cout << "first T: " << first_T << std::endl; std::cout << "first n: " << first_n << std::endl; for (double ii : first_solution) { std::cout << ii << " "; } std::cout << std::endl; } // displaying the second solution if it was found if (second_solution_found) { std::cout << std::endl << "second R: " << second_R << std::endl; std::cout << "second T: " << second_T << std::endl; std::cout << "second n: " << second_n << std::endl; for (double ii : second_solution) { std::cout << ii << " "; } std::cout << std::endl; } // because the reference is set to the exact value when when n should have only a z componenet, the correct // choice should be the one closest to n_ref = [0,0,1]^T which will be the one with the greatest dot product with n_ref if (first_solution_found && second_solution_found) { if (first_n.dot(n_ref) >= second_n.dot(n_ref)) { R_fc = first_R; T_fc = first_T; } else { R_fc = second_R; T_fc = second_T; } fc_found = true; } else if(first_solution_found) { R_fc = first_R; T_fc = first_T; fc_found = true; } //if a solution was found will publish // need to convert to pose message so use if (fc_found) { // converting the rotation from a cv matrix to quaternion, first need it as a matrix3x3 R_fc_tf[0][0] = R_fc.at<double>(0,0); R_fc_tf[0][1] = R_fc.at<double>(0,1); R_fc_tf[0][2] = R_fc.at<double>(0,2); R_fc_tf[1][0] = R_fc.at<double>(1,0); R_fc_tf[1][1] = R_fc.at<double>(1,1); R_fc_tf[1][2] = R_fc.at<double>(1,2); R_fc_tf[2][0] = R_fc.at<double>(2,0); R_fc_tf[2][1] = R_fc.at<double>(2,1); R_fc_tf[2][2] = R_fc.at<double>(2,2); std::cout << "Final R:\n" << R_fc << std::endl; // converting the translation to a vector 3 T_fc_tf.setX(T_fc.at<double>(0,0)); T_fc_tf.setY(T_fc.at<double>(0,1)); T_fc_tf.setZ(T_fc.at<double>(0,2)); std::cout << "Final T :\n" << T_fc << std::endl; // getting the rotation as a quaternion R_fc_tf.getRotation(Q_fc_tf); std::cout << "current orientation:" << "\n\tx:\t" << Q_fc_tf.getX() << "\n\ty:\t" << Q_fc_tf.getY() << "\n\tz:\t" << Q_fc_tf.getZ() << "\n\tw:\t" << Q_fc_tf.getW() << std::endl; std::cout << "norm of quaternion:\t" << Q_fc_tf.length() << std::endl; // getting the negated version of the quaternion for the check Q_fc_tf_negated = tf::Quaternion(-Q_fc_tf.getX(),-Q_fc_tf.getY(),-Q_fc_tf.getZ(),-Q_fc_tf.getW()); std::cout << "negated orientation:" << "\n\tx:\t" << Q_fc_tf_negated.getX() << "\n\ty:\t" << Q_fc_tf_negated.getY() << "\n\tz:\t" << Q_fc_tf_negated.getZ() << "\n\tw:\t" << Q_fc_tf_negated.getW() << std::endl; std::cout << "norm of negated quaternion:\t" << Q_fc_tf_negated.length() << std::endl; // showing the last orientation std::cout << "last orientation:" << "\n\tx:\t" << Q_fc_tf_last.getX() << "\n\ty:\t" << Q_fc_tf_last.getY() << "\n\tz:\t" << Q_fc_tf_last.getZ() << "\n\tw:\t" << Q_fc_tf_last.getW() << std::endl; std::cout << "norm of last quaternion:\t" << Q_fc_tf_last.length() << std::endl; // checking if the quaternion has flipped Q_norm_current_diff = std::sqrt(std::pow(Q_fc_tf.getX() - Q_fc_tf_last.getX(),2.0) + std::pow(Q_fc_tf.getY() - Q_fc_tf_last.getY(),2.0) + std::pow(Q_fc_tf.getZ() - Q_fc_tf_last.getZ(),2.0) + std::pow(Q_fc_tf.getW() - Q_fc_tf_last.getW(),2.0)); std::cout << "current difference:\t" << Q_norm_current_diff << std::endl; Q_norm_negated_diff = std::sqrt(std::pow(Q_fc_tf_negated.getX() - Q_fc_tf_last.getX(),2.0) + std::pow(Q_fc_tf_negated.getY() - Q_fc_tf_last.getY(),2.0) + std::pow(Q_fc_tf_negated.getZ() - Q_fc_tf_last.getZ(),2.0) + std::pow(Q_fc_tf_negated.getW() - Q_fc_tf_last.getW(),2.0)); std::cout << "negated difference:\t" << Q_norm_negated_diff << std::endl; if (Q_norm_current_diff > Q_norm_negated_diff) { Q_fc_tf = Q_fc_tf_negated; } // updating the last Q_fc_tf_last = Q_fc_tf; // converting the tf quaternion to a geometry message quaternion Q_fc_gm.x = Q_fc_tf.getX(); Q_fc_gm.y = Q_fc_tf.getY(); Q_fc_gm.z = Q_fc_tf.getZ(); Q_fc_gm.w = Q_fc_tf.getW(); // converting the tf vector3 to a point P_fc_gm.x = T_fc_tf.getX(); P_fc_gm.y = T_fc_tf.getY(); P_fc_gm.z = T_fc_tf.getZ(); // setting the transform with the values fc_tf.setOrigin(T_fc_tf); fc_tf.setRotation(Q_fc_tf); tf_broad.sendTransform(tf::StampedTransform(fc_tf, msg.header.stamp,"f_star","f_current")); // setting the decomposed message pose_fc_gm.position = P_fc_gm; pose_fc_gm.orientation = Q_fc_gm; decomposed_msg.pose = pose_fc_gm; decomposed_msg.header.stamp = msg.header.stamp; decomposed_msg.header.frame_id = "current_frame_normalized"; decomposed_msg.alpha_red = alpha_red; decomposed_msg.alpha_green = alpha_green; decomposed_msg.alpha_cyan = alpha_cyan; decomposed_msg.alpha_purple = alpha_purple; homog_decomp_pub.publish(decomposed_msg); std::cout << "complete message\n" << decomposed_msg << std::endl << std::endl; // publish the marker marker.pose = pose_fc_gm; marker_pub.publish(marker); } } } // erasing all the temporary points if (first_solution_found || second_solution_found) { // erasing all the point vectors and matrix vectors curr_points_p.erase(curr_points_p.begin(),curr_points_p.end()); ref_points_p.erase(ref_points_p.begin(),ref_points_p.end()); curr_points_m.erase(curr_points_m.begin(),curr_points_m.end()); ref_points_m.erase(ref_points_m.begin(),ref_points_m.end()); } } /********** End splitting up the incoming message *********/ }