carmen_6d_point 
get_carmen_pose6D_from_matrix(Matrix visual_odometry_transformation_matrix)
{
//	// See: http://www.ros.org/wiki/tf
	tf::Transform visual_odometry_pose;
	tf::Transform carmen_pose;
	carmen_6d_point carmen_pose_6d;

	// convert the visual odometry output matrix to the tf::Transform type.
	visual_odometry_pose = convert_matrix_to_tf_transform(visual_odometry_transformation_matrix);

	// compute the current visual odometry pose with respect to the carmen coordinate system
	carmen_pose = g_car_to_visual_odometry_transform * visual_odometry_pose * g_car_to_visual_odometry_transform.inverse();

	double yaw, pitch, roll;
	tf::Matrix3x3(carmen_pose.getRotation()).getRPY(roll, pitch, yaw);

	carmen_pose_6d.x = carmen_pose.getOrigin().x();
	carmen_pose_6d.y = carmen_pose.getOrigin().y();
	carmen_pose_6d.z = carmen_pose.getOrigin().z();

	carmen_pose_6d.yaw = yaw;
	carmen_pose_6d.pitch = pitch;
	carmen_pose_6d.roll = roll;

	 return carmen_pose_6d;
}
Пример #2
0
void sptam::sptam_node::fixOdomFrame(const CameraPose& cameraPose, const tf::StampedTransform& camera_to_odom, const ros::Time& t)
{
  tf::Pose camera_to_map;
  CameraPose2TFPose( cameraPose, camera_to_map );

  // compute the new difference between map and odom
  tf::Transform odom_to_map = camera_to_map * camera_to_odom.inverse();

  std::lock_guard<std::mutex> lock( odom_to_map_mutex_ );
  odom_to_map_ = odom_to_map;
}
Пример #3
0
tf::StampedTransform OdomTf::stamped_transform_inverse(tf::StampedTransform stf) {
    // instantiate stamped transform with constructor args
    //note: child_frame and frame_id are reversed, to correspond to inverted transform
    tf::StampedTransform stf_inv(stf.inverse(), stf.stamp_, stf.child_frame_id_, stf.frame_id_);
    /* long-winded equivalent:
    tf::StampedTransform stf_inv;    
    tf::Transform tf = get_tf_from_stamped_tf(stf);
    tf::Transform tf_inv = tf.inverse();
    
    stf_inv.stamp_ = stf.stamp_;
    stf_inv.frame_id_ = stf.child_frame_id_;
    stf_inv.child_frame_id_ = stf.frame_id_;
    stf_inv.setOrigin(tf_inv.getOrigin());
    stf_inv.setBasis(tf_inv.getBasis());
     * */
    return stf_inv;
}
/** MAIN NODE **/
int
main (int argc, char** argv)
{
        // Initialize ROS
        ros::init (argc, argv, "comau_tf_follower");
        ros::NodeHandle nh;
        listener = new tf::TransformListener ();
        tf_broadcaster = new tf::TransformBroadcaster();

        // PUB & SUB
        ros::Subscriber comau_tf_follower = nh.subscribe( "lar_visionsystem/comau_tf_follower",1,parseCommand );
        ros::Subscriber joy_sub = nh.subscribe ("joy", 1, joy_cb);
        ros::Subscriber markers_list_sub = nh.subscribe ("lar_visionsystem/markers_list", 1, markers_list_cb);
        ros::Subscriber comau_sub = nh.subscribe ("lar_comau/comau_full_state_publisher", 1, comau_cb);
        ros::Publisher comau_cartesian_controller = nh.advertise<lar_comau::ComauCommand>("lar_comau/comau_cartesian_controller", 1);
        ros::Publisher marker_approach = nh.advertise<lar_visionsystem::MarkerApproachCommand>("lar_visionsystem/marker_approach", 1);




        //Initialize Joystick
        {
                joystick_speed_max.x = joystick_speed_max.y = joystick_speed_max.z =  0.5f;
                joystick_speed.x = joystick_speed.y = joystick_speed.z = 0.0f;

                joystick_angular_speed_max.x = joystick_angular_speed_max.y = joystick_angular_speed_max.z =  M_PI/1000.0f;
                joystick_angular_speed.x = joystick_angular_speed.y = joystick_angular_speed.z = 0.0f;

                joystick_set_point.position.x = 1000.0f;
                joystick_set_point.position.y = 0.0f;
                joystick_set_point.position.z = 0.0f;

                KDL::Rotation joystick_rotation;
                MathUtils::getRotationsByName("",joystick_rotation);
                MathUtils::poseToKDLRotation(joystick_set_point,joystick_rotation,true);
        }
        //END Initialize Joystick


        //Initialize Marker Approach Pose
        {
                follow_target_approach_pose.position.x =-0.025f;
                follow_target_approach_pose.position.y = -0.085f;
                follow_target_approach_pose.position.z = 0.7f;
                KDL::Rotation follow_rotation;
                MathUtils::getRotationsByName("down_x",follow_rotation);
                MathUtils::poseToKDLRotation(follow_target_approach_pose,follow_rotation,true);
        }

        //Initialize Gripper
        gripper_serial = open(gripper_usb_name.c_str(), O_RDWR | O_NOCTTY |O_NDELAY );
        gripper_status =gripper_serial>=0;
        tcgetattr(gripper_serial, &gripper_serial_cfg);
        cfsetospeed(&gripper_serial_cfg, BAUDRATE); /* baud rate */
        tcsetattr(gripper_serial, TCSANOW, &gripper_serial_cfg); /* apply the settings */
        tcflush(gripper_serial, TCOFLUSH);
        //End Initialize Gripper

        tf::StampedTransform transform;
        tf::Transform current_position;
        lar_comau::ComauCommand comau_command;
        comau_command.command = "joint";


        boost::thread updateTFsThread(updateTFs);

        // Spin
        while( ros::ok() && GLOBAL_NODE_ACTIVE_STATUS ) {

                if(gripper_serial) {
                        std::cout << "*** GRIPPER ONLINE! ***" << std::endl;
                        std::cout << "  -- Closure angle: "<<gripper_closure_angle<<std::endl;
                        gripper_closure_angle += gripper_closure_speed;
                }else{
                        std::cout << "*** GRIPPER OFFLINE! ***" <<std::endl;
                }

                if(comau_sync) {
                        std::cout << "*** SYNC WITH COMAU ACTIVE! ***"<<std::endl;
                }

                /** MODE JOYSTICK CONTROL */
                if(current_mode==JOYSTICK_CONTROL) {
                        std::cout << "Mode: Joystick Control"<<std::endl;
                        std::cout << joystick_angular_speed<<std::endl;

                        joystick_set_point.position.x +=  joystick_speed.x;
                        joystick_set_point.position.y +=  joystick_speed.y;
                        joystick_set_point.position.z +=  joystick_speed.z;

                        KDL::Rotation joystick_rotation;
                        MathUtils::poseToKDLRotation(joystick_set_point,joystick_rotation);
                        joystick_rotation.DoRotX(joystick_angular_speed.x);
                        joystick_rotation.DoRotY(joystick_angular_speed.y);
                        joystick_rotation.DoRotZ(joystick_angular_speed.z);
                        MathUtils::poseToKDLRotation(joystick_set_point,joystick_rotation,true);

                        MathUtils::poseToTF(joystick_set_point,current_position);
                }

                /** MODE FOLLOW TARGET */
                if(current_mode==FOLLOW_TARGET) {
                        std::cout << "Mode: Follow Target"<<std::endl;

                        if(markers_current_list.size()<=0) {
                                std::cout << "NO TARGET ARE YOU CRAZY!!! "<<follow_target_id<<std::endl;
                        }else{
                                std::cout << "Following target: "<<follow_target_id<<std::endl;

                                follow_target_approach_pose.position.z +=  joystick_speed.z/100.0f;
                                if(follow_target_approach_pose.position.z<0.05f) {
                                        follow_target_approach_pose.position.z=0.025f;
                                }
                                KDL::Rotation approach_rotation;
                                MathUtils::poseToKDLRotation(follow_target_approach_pose,approach_rotation);
                                approach_rotation.DoRotZ(joystick_angular_speed.z/100.0f);
                                MathUtils::poseToKDLRotation(follow_target_approach_pose,approach_rotation,true);

                                current_position = target_approach;

                        }
                }




                if(GLOBAL_CALIBRATION_ACTIVE) {
                        try{
                                listener->lookupTransform("camera_rgb_frame", calibration_marker_name, ros::Time(0), t_cam_marker);
                                listener->lookupTransform("base", "comau_base_marker",ros::Time(0), t_0_marker);

                                t_6_0 = t_0_6.inverse();
                                t_marker_cam = t_cam_marker.inverse();
                                t_6_cam = t_6_0 * t_0_marker;
                                t_6_cam = t_6_cam * t_marker_cam;

                                tf_broadcaster->sendTransform(tf::StampedTransform(t_6_cam, ros::Time::now(), "comau_t0U", "comau_t0CAM"));

                                tf::Transform full = t_0_6*t_6_cam;
                                tf_broadcaster->sendTransform(tf::StampedTransform(full, ros::Time::now(), "base", "comau_t0CAM_full"));

                                //tf_broadcaster->sendTransform(tf::StampedTransform(t_0_6, ros::Time(0), "base", "camera_rgb_frame"));
                        }
                        catch (tf::TransformException ex) {
                                ROS_ERROR("%s",ex.what());
                        }
                }else{
                        listener->lookupTransform("base", "comau_t0U",ros::Time(0), t_0_6);
                        //tf_broadcaster->sendTransform(tf::StampedTransform(t_0_6, ros::Time(0), "base", "camera_rgb_frame"));
                }

                /** TARGET SELECTION */
                lar_visionsystem::MarkerApproachCommand target_approach_command;
                target_approach_command.marker_id = follow_target_id;
                target_approach_command.update_target_only = true;
                marker_approach.publish(target_approach_command);
                std::cout << markers_current_list.size() <<std::endl;

                /** BROACAST COMAU TF TARGET */
                tf_broadcaster->sendTransform(tf::StampedTransform(current_position, ros::Time(0), "base", "comau_tf_target"));
                std::cout << "Sending "<<current_position.getOrigin()[0]<<std::endl;

                /** SEND DATA TO COMAU */
                MathUtils::poseToTF(comau_command.pose,current_position,true);
                std::cout << comau_command<<std::endl;
                comau_cartesian_controller.publish(comau_command);

                ros::spinOnce();
                std::system("clear");
        }

        updateTFsThread.join();
}
Пример #5
0
  TyErrorId processWithLock(CAS &tcas, ResultSpecification const &res_spec)
  {
    MEASURE_TIME;
    outInfo("process begins");
    rs::SceneCas cas(tcas);
    rs::Scene scene = cas.getScene();

    cas.get(VIEW_CLOUD, *cloud);
    cas.get(VIEW_COLOR_IMAGE, color);
    cas.get(VIEW_DEPTH_IMAGE, depth);
    cas.get(VIEW_CAMERA_INFO, cameraInfo);

    indices->clear();
    indices->reserve(cloud->points.size());

    camToWorld.setIdentity();
    if(scene.viewPoint.has())
    {
      rs::conversion::from(scene.viewPoint.get(), camToWorld);
    }
    else
    {
      outWarn("No camera to world transformation, no further processing!");
      throw rs::FrameFilterException();
    }
    worldToCam = tf::StampedTransform(camToWorld.inverse(), camToWorld.stamp_, camToWorld.child_frame_id_, camToWorld.frame_id_);
    computeFrustum();

    //default place to look for objects is counter tops except if we got queried for some different place
    //message comes from desigantor and is not the same as the entries from the semantic map so we need
    //to transform them
    rs::Query qs = rs::create<rs::Query>(tcas);
    std::vector<std::string> regionsToLookAt;
    regionsToLookAt.assign(defaultRegions.begin(),defaultRegions.end());
    regions.clear();
    if(cas.getFS("QUERY", qs))
    {
      outWarn("loaction set in query: " << qs.location());
      if(std::find(defaultRegions.begin(), defaultRegions.end(), qs.location()) == std::end(defaultRegions) && qs.location()!="")
      {
        regionsToLookAt.clear();
        regionsToLookAt.push_back(qs.location());
      }
    }

    if(regions.empty())
    {
      std::vector<rs::SemanticMapObject> semanticRegions;
      getSemanticMapEntries(cas, regionsToLookAt, semanticRegions);

      regions.resize(semanticRegions.size());
      for(size_t i = 0; i < semanticRegions.size(); ++i)
      {

        Region &region = regions[i];
        region.width = semanticRegions[i].width();
        region.depth = semanticRegions[i].depth();
        region.height = semanticRegions[i].height();
        region.name = semanticRegions[i].name();
        rs::conversion::from(semanticRegions[i].transform(), region.transform);
      }
    }

    for(size_t i = 0; i < regions.size(); ++i)
    {
      if(frustumCulling(regions[i]) || !frustumCulling_)
      {
        outInfo("region inside frustum: " << regions[i].name);
        filterRegion(regions[i]);
      }
      else
      {
        outInfo("region outside frustum: " << regions[i].name);
      }
    }

    pcl::ExtractIndices<PointT> ei;
    ei.setKeepOrganized(true);
    ei.setIndices(indices);
    ei.filterDirectly(cloud);

    cas.set(VIEW_CLOUD, *cloud);

    if(changeDetection && !indices->empty())
    {
      ++frames;
      if(lastImg.empty())
      {
        lastMask = cv::Mat::ones(color.rows, color.cols, CV_8U);
        lastImg = cv::Mat::zeros(color.rows, color.cols, CV_32FC4);
      }

      uint32_t secondsPassed = camToWorld.stamp_.sec - lastTime.sec;
      bool change = checkChange() || cas.has("QUERY") || secondsPassed > timeout;

      if(!change)
      {
        ++filtered;
      }
      else
      {
        lastTime = camToWorld.stamp_;
      }
      outInfo("filtered frames: " << filtered << " / " << frames << "(" << (filtered / (float)frames) * 100 << "%)");

      if(!change)
      {
        outWarn("no changes in frame detected, no further processing!");
        throw rs::FrameFilterException();
      }
    }

    return UIMA_ERR_NONE;
  }
void InputOutputLinControl::computeRelativeTransformation(tf::StampedTransform start, tf::StampedTransform current, tf::Transform& relative_transform)
{
	relative_transform = start.inverse() * current;
}