// Select which mode to use from launch file. Fixed numbers for LASA/BOXY robot or the Vision tracking.
	void initialize() {
		std::string ad;
		ros::NodeHandle _nh("~");
		_nh.getParam("action_mode", ad);
		if(ad == "lasa_fixed") {
			action_mode = ACTION_LASA_FIXED;
		} else if(ad == "boxy_fixed") {
			action_mode = ACTION_BOXY_FIXED;
		} else if(ad == "vision") {
			action_mode = ACTION_VISION;
		} else {
			ROS_ERROR_STREAM("Unrecognized action type. Must be one of 'lasa_fixed', 'boxy_fixed', 'vision'. Found '"<<ad<<"'");
			throw;
		}

		_nh.getParam("world_frame", world_frame);
		_nh.getParam("model_base_path", base_path);
		_nh.getParam("use_boxy_tool", use_boxy_tool);
		_nh.getParam("path_matlab_plot", path_matlab_plot);
		
		if (use_boxy_tool)
		   ROS_INFO("Using Boxy Tool adding extra Rotation");

		ROS_INFO_STREAM("Selected Action Mode: " << ad);

		// Useful for running this in simulation. WaitForForces() would never return if ran with simulator.
		if(!_nh.getParam("wait_for_force", bWaitForForces)) {
			ROS_INFO_STREAM("Set the Waiting for forces flag");
			bWaitForForces = true;
		}
	}
int main(int argc, char** argv)
{
	//ROS setup
	ros::init(argc, argv, "read_from_camera_node");
	ros::NodeHandle _nh("~");

	ROS_DEBUG("read_from_camera_node Initialized");

	ros::Subscriber left_image_subscriber = _nh.subscribe("left_image",1,left_image_callback);
	ros::Subscriber right_image_subscriber = _nh.subscribe("right_image",1,right_image_callback);

	left_image_publisher = _nh.advertise<sensor_msgs::Image>("capture_left_frame",1);
	right_image_publisher = _nh.advertise<sensor_msgs::Image>("capture_right_frame",1);
	
	ros::ServiceServer next_frame_service = _nh.advertiseService("next_frame_service",next_frame);

	fetch_next_right_image = false;
	fetch_next_left_image = false;

	ROS_DEBUG("read_from_camera_node Ready to process requests");

	ros::spin();

	return 0;
}
int main(int argc, char** argv) {

	ros::init(argc, argv, "joint_to_cart");
	ros::NodeHandle nh;
	ros::NodeHandle _nh("~");

	mRobot = new RTKRobotArm(true);
	if(!mRobot->initialize(_nh)) {
		ROS_ERROR("Error while loading robot");
		return 1;
	}

	if(!parseParams(_nh)) {
		ROS_ERROR("Errors while parsing arguments.");
		return 1;
	}

	numdof = mRobot->numdof;
	read_torque.resize(numdof);
	read_jpos.resize(numdof);
	ee_ft.resize(6);

	pub_pose = nh.advertise<geometry_msgs::PoseStamped>(output_cart_pose, 3);
	pub_ft = nh.advertise<geometry_msgs::WrenchStamped>(output_cart_ft, 3);
	ros::Subscriber sub = nh.subscribe<sensor_msgs::JointState>(input_joint_topic, 10, jointStateCallback,ros::TransportHints().tcpNoDelay());
	ros::Subscriber sub_ft = nh.subscribe<geometry_msgs::WrenchStamped>("/right_arm_ft_sensor/wrench", 10, sensorFTCallback, ros::TransportHints().tcpNoDelay());


	ROS_INFO("Node started");
	ros::spin();

	return 0;
}
    void LoadThread()
    {
      if(!this->husky){
        ROS_FATAL_STREAM("Husky plugin requires a husky as its parent.");
        return;
      }
      ros::NodeHandle _nh("husky");
      subscriber = _nh.subscribe("diff_wrench", 1000, &HuskySimulator::HuskyStateCallback, this);
      this->_updateConnection = event::Events::ConnectWorldUpdateBegin(boost::bind(&HuskySimulator::OnUpdate, this, _1));
      ROS_INFO("Husky plugin for ROS successfully loaded :)");

      // Check for new messages
//      ros::spin();
      while(ros::ok()){
        ros::spinOnce();
      }
    }
int main(int argc, char** argv) {


	ros::init(argc, argv, "cart_to_joint");
	ros::NodeHandle nh;
	ros::NodeHandle _nh("~");


	if(!parseParams(_nh)) {
		ROS_ERROR("Errors while parsing arguments.");
		return 1;
	}

	// Initialize robot with/without orientation (variable from launch file)
	mRobot = new RTKRobotArm(bOrientCtrl);
	if(!mRobot->initialize(_nh)) {
		ROS_ERROR("Error while loading robot");
		return 1;
	}

	numdof = mRobot->numdof;
	des_ee_ft.resize(6);
	est_ee_ft.resize(6);
	des_ee_stiff.resize(6);
	shift_ee_ft.resize(6);
	for(int i=0; i<6; ++i) {
		des_ee_ft(i)=0;
		est_ee_ft(i)=0;
		shift_ee_ft(i)=0;
	}

	read_jpos.resize(numdof);
	joint_lims.resize(numdof);

	// TODO: need to find a better way to do this from launch files
	/********** Specific for RTKRobotArm (KUKA) ****************/
/*	joint_lims[0] = 150;joint_lims[1] = 107;joint_lims[2] = 150; joint_lims[3] = 107;
	joint_lims[4] = 150;joint_lims[5] = 115;joint_lims[6] = 150;*/

	/********** Specific for RTKRobotArm (BOXY) ****************/
	joint_lims[0] = 150;joint_lims[1] = 107;joint_lims[2] = 150; joint_lims[3] = 107;
	joint_lims[4] = 150;joint_lims[5] = 115;joint_lims[6] = 150;

	joint_lims = joint_lims*M_PI/180.0;
	mRobot->setJointLimits(joint_lims);
	Eigen::VectorXd np; np.resize(numdof);
	np[0] = -0.62; np[1] = 0.76; np[2] = 0.61; np[3] = -1.0;
	np[4] = -0.40; np[5] = 1.4; np[6] = 1.4;
	// TODO: not implemented properly yet
//	mRobot->setNullPosture(np);
	/*********************************************************/

	if(bUseIAI) {
		msg_vel_stiff_iai.stiffness.resize(numdof);
		msg_vel_stiff_iai.damping.resize(numdof);
		msg_vel_stiff_iai.add_torque.resize(numdof);
		msg_vel_stiff_iai.velocity.resize(numdof);
	} else {
		msg_vel_stiff_jstate.velocity.resize(numdof);
	}


	if(bUseIAI) {
		ROS_INFO_STREAM("Using iai_control_msgs");
		pub_joints = nh.advertise<iai_control_msgs::MultiJointVelocityImpedanceCommand>(output_joints_topic, 3);
	} else {
		ROS_INFO_STREAM("Joints states");
//		pub_joints = nh.advertise<sensor_msgs::JointState>(output_joints_topic, 3);
        pub_joints = nh.advertise<kuka_fri_bridge::JointStateImpedance>(output_joints_topic, 3);
	}

	ros::Subscriber sub_pos = nh.subscribe<geometry_msgs::PoseStamped>(input_pose_topic, 1, cartCallback, ros::TransportHints().tcpNoDelay());
	ros::Subscriber sub_ft = nh.subscribe<geometry_msgs::WrenchStamped>(input_ft_topic, 1, ftCallback, ros::TransportHints().tcpNoDelay());
	ros::Subscriber sub_stiff = nh.subscribe<geometry_msgs::WrenchStamped>(input_stiff_topic, 1, stiffnessCallback, ros::TransportHints().tcpNoDelay());
	ros::Subscriber sub_est_ft = nh.subscribe<geometry_msgs::WrenchStamped>(input_estimate_ft_topic, 1, estFTCallback, ros::TransportHints().tcpNoDelay());
	ros::Subscriber sub_jstate = nh.subscribe<sensor_msgs::JointState>(input_joint_topic, 1, jointStateCallback, ros::TransportHints().tcpNoDelay());
    ros::Subscriber sub = nh.subscribe("Action_State", 1, actionStateCallback, ros::TransportHints().tcpNoDelay());
    ros::Subscriber sub_ja = nh.subscribe<std_msgs::Bool>("joint_action", 1, jointActionCallback, ros::TransportHints().tcpNoDelay());


	joint_vel.resize(numdof); joint_stiff.resize(numdof), joint_damp.resize(numdof);
	for(int i=0; i<numdof; ++i) {
		joint_vel[i] = 0.;
		joint_stiff[i] = DEFAULT_JSTIFF;
		joint_damp[i] = DEFAULT_JDAMP;
	}

	ROS_INFO("Waiting for robot joint state and FT estimate topic...");
	ros::Rate r(1000);
	isJointOkay = false; isFTOkay = false; isAllOkay = false;
	while(ros::ok() && (!isJointOkay || (bUseForce && !isFTOkay)) ) {
//	while(ros::ok()){ 
		ros::spinOnce();
		r.sleep();
	}

	tf::Pose tmp;
	mRobot->setJoints(read_jpos);
	mRobot->getEEPose(tmp);

	des_ee_pose = tmp;

	// Remove the residual force/torque from the future estimates. To compensate for error in ee force estimation or tool calibration
	shift_ee_ft = est_ee_ft;

	ROS_INFO_STREAM("Shift: "<<shift_ee_ft.transpose());
	if(shift_ee_ft.norm() > ft_tol) {
		ROS_WARN("Shift in EE FT estimate more than the required tolerance!!");
		ROS_WARN("Either increase the tolerance or calibrate the tool better");
		ros::Duration warn(2);
		warn.sleep();
	}
	ROS_INFO("Node started");

	isAllOkay = true;

	// This is to ensure that we send atleast one zero velocity message before dying!
	// For safety.
	ros::Rate r_(1000);
	while(ros::ok()) {
		ros::spinOnce();
		r_.sleep();
	}

	for(int i=0; i<numdof; ++i) {
		joint_vel[i] = 0.0;
	}

	sendJointMessage(joint_vel, joint_stiff, joint_damp);
	ros::Duration(1.0).sleep();
	nh.shutdown();
	return 0;
}
示例#6
0
int main(int argc, char** argv)
{
    ros::init(argc, argv, "image_publisher");
    ros::NodeHandle nh;
    ros::NodeHandle _nh("~"); // to get the private params
    image_transport::ImageTransport it(nh);
    image_transport::CameraPublisher pub = it.advertiseCamera("camera", 1);

    // provider can be an url (e.g.: rtsp://10.0.0.1:554) or a number of device, (e.g.: 0 would be /dev/video0)
    std::string video_stream_provider;
    cv::VideoCapture cap;
    if (_nh.getParam("video_stream_provider", video_stream_provider)){
        ROS_INFO_STREAM("Resource video_stream_provider: " << video_stream_provider);
        // If we are given a string of 4 chars or less (I don't think we'll have more than 100 video devices connected)
        // treat is as a number and act accordingly so we open up the videoNUMBER device
        if (video_stream_provider.size() < 4){
            ROS_INFO_STREAM("Getting video from provider: /dev/video" << video_stream_provider);
            cap.open(atoi(video_stream_provider.c_str()));
        }
        else{
            ROS_INFO_STREAM("Getting video from provider: " << video_stream_provider);
            cap.open(video_stream_provider);
        }
    }
    else{
        ROS_ERROR("Failed to get param 'video_stream_provider'");
        return -1;
    }

    std::string camera_name;
    _nh.param("camera_name", camera_name, std::string("camera"));
    ROS_INFO_STREAM("Camera name: " << camera_name);

    int fps;
    _nh.param("fps", fps, 240);
    ROS_INFO_STREAM("Throttling to fps: " << fps);

    std::string frame_id;
    _nh.param("frame_id", frame_id, std::string("camera"));
    ROS_INFO_STREAM("Publishing with frame_id: " << frame_id);

    std::string camera_info_url;
    _nh.param("camera_info_url", camera_info_url, std::string(""));
    ROS_INFO_STREAM("Provided camera_info_url: '" << camera_info_url << "'");

    bool flip_horizontal;
    _nh.param("flip_horizontal", flip_horizontal, false);
    ROS_INFO_STREAM("Flip horizontal image is : " << ((flip_horizontal)?"true":"false"));

    bool flip_vertical;
    _nh.param("flip_vertical", flip_vertical, false);
    ROS_INFO_STREAM("Flip flip_vertical image is : " << ((flip_vertical)?"true":"false"));

    // From http://docs.opencv.org/modules/core/doc/operations_on_arrays.html#void flip(InputArray src, OutputArray dst, int flipCode)
    // FLIP_HORIZONTAL == 1, FLIP_VERTICAL == 0 or FLIP_BOTH == -1
    bool flip_image = true;
    int flip_value;
    if (flip_horizontal && flip_vertical)
        flip_value = 0; // flip both, horizontal and vertical
    else if (flip_horizontal)
        flip_value = 1;
    else if (flip_vertical)
        flip_value = -1;
    else
        flip_image = false;

    if(!cap.isOpened()){
        ROS_ERROR_STREAM("Could not open the stream.");
        return -1;
    }

    ROS_INFO_STREAM("Opened the stream, starting to publish.");

    cv::Mat frame;
    sensor_msgs::ImagePtr msg;
    sensor_msgs::CameraInfo cam_info_msg;
    std_msgs::Header header;
    header.frame_id = frame_id;
    camera_info_manager::CameraInfoManager cam_info_manager(nh, camera_name, camera_info_url);
    // Get the saved camera info if any
    cam_info_msg = cam_info_manager.getCameraInfo();

    ros::Rate r(fps);
    while (nh.ok()) {
        cap >> frame;
        if (pub.getNumSubscribers() > 0){
            // Check if grabbed frame is actually full with some content
            if(!frame.empty()) {
                // Flip the image if necessary
                if (flip_image)
                    cv::flip(frame, frame, flip_value);
                msg = cv_bridge::CvImage(header, "bgr8", frame).toImageMsg();
                // Create a default camera info if we didn't get a stored one on initialization
                if (cam_info_msg.distortion_model == ""){
                    ROS_WARN_STREAM("No calibration file given, publishing a reasonable default camera info.");
                    cam_info_msg = get_default_camera_info_from_image(msg);
                    cam_info_manager.setCameraInfo(cam_info_msg);
                }
                // The timestamps are in sync thanks to this publisher
                pub.publish(*msg, cam_info_msg, ros::Time::now());
            }

            ros::spinOnce();
        }
        r.sleep();
    }
}