コード例 #1
0
void handleRecievedData(char recv_buf[],Publisher config_pub,Publisher arm_pub)
{
    std_msgs::String msg;
    char type[4]= {'\0'};
    strncpy(type,recv_buf,3);
    if(!strcmp(type,"CAM"))
    {
        int written;
        char temp[20]= {'\0'};
        strncpy(temp,recv_buf+4,strlen(recv_buf));
        ROS_INFO("sent data : %s",temp);
        msg.data = temp;
        config_pub.publish(msg);
    }
    else if(!strcmp(type,"ARM"))
    {
        int written = strlen(recv_buf);
        char temp[20]= {'\0'};
        strncpy(temp,recv_buf+4,written);
        ROS_INFO("sent data : %s",temp);
        msg.data = temp;
        arm_pub.publish(msg);
    }
    else if(!strcmp(type,"PTZ"))
    {
        int written = strlen(recv_buf);
        char temp[20]= {'\0'};
        strncpy(temp,recv_buf+4,written);
        ROS_INFO("sent data : %s",temp);
        msg.data = temp;
        ptz_pub.publish(msg);
    }
}
コード例 #2
0
void sensorPacketCallback(vector<float>& joints_pos, vector<float>& joints_adc, float pressure)
{
	// Send joint state message
	JointState msg_joints;

	msg_joints.header.stamp = Time::now();

	for(int i = 0; i < joints_adc.size(); i++)
	{
		msg_joints.name.push_back(joint_names[i]);
		msg_joints.position.push_back(joints_adc[i]);
	}

	pub_joints.publish(msg_joints);

	// Send pressure message
	Float32 msg_pressure;
	msg_pressure.data = pressure;
	pub_pressure.publish(msg_pressure);

	// Store current positions for IK computations
	for(int i = 0; i < joints_pos.size(); i++)
	{
		q_current(i) = joints_pos[i];
	}
}
コード例 #3
0
void publishStatus(TimerEvent timerEvent) {
	std_msgs::Int32 intMessage;

	intMessage.data = _currentInput + 1;
	_currentInputPublisher.publish(intMessage);

	intMessage.data = _currentOutput + 1;
	_currentOutputPublisher.publish(intMessage);
}
コード例 #4
0
ファイル: ros_publisher.cpp プロジェクト: mdombro/CSC232
int main (int argc, char* argv[]) {
	gengetopt_args_info args;
	cmdline_parser(argc,argv,&args);
	double v = args.linearVelocity_arg;
	double a = args.angularVelocity_arg;
	double time = args.time_arg;
	int count = 0;
	init(argc, argv, "talker");
	geometry_msgs::Twist msg;
	msg.linear.x = v;
	msg.angular.z = a;

	NodeHandle n;
	Publisher pub = n.advertise<geometry_msgs::Twist>("/cmd_vel_mux/input/navi", 1);
	ros::Rate loop_rate(10);
	while (ros::ok() && count < (int)time*10) {
		ros::spinOnce();
		pub.publish(msg);
		loop_rate.sleep();
		count++;
	}
	for (int i = 0; i < 5; i++) {
		msg.linear.x = 0;
		msg.angular.z = 0;
		pub.publish(msg);
		ros::spinOnce();
	}



	return 0;

	//init(argc, argv, "talker");
	//geometry_msgs::Twist msg;
	//msg.linear.x = 1.0;
	//msg.angular.z = .5;

	//int max_count = 30;
	//int count = 0;
	//NodeHandle n;
	//Publisher pub = n.advertise<geometry_msgs::Twist>("/cmd_vel_mux/input/navi", 1);
	//ros::Rate loop_rate(10);
	//while (ros::ok() && count < max_count) {
	//	ros::spinOnce();
	//	pub.publish(msg);
	//	loop_rate.sleep();
	//	count++;
	//}
	//return 0;
}
コード例 #5
0
void ImageCB(const sensor_msgs::ImageConstPtr& msg) {

    cv_bridge::CvImagePtr cv_ptr;
	Mat frame;
	Mat output;
	
	try {
		cv_ptr = cv_bridge::toCvCopy(msg, "bgr8");
	} catch (cv_bridge::Exception& e) {
		ROS_ERROR("CV-Bridge error: %s", e.what());
		return;
	}
	
	frame = cv_ptr->image;

    frame = frame.mul(mask);

    Mat white_lines = findWhiteLines(frame);

    Mat blue_lines = findBlueLines(frame);

    Mat yellow_lines = findYellowLines(frame);

    Mat orange_blobs = findOrange(frame);

    output = white_lines + blue_lines + yellow_lines + orange_blobs;

    sensor_msgs::Image outmsg;

    cv_ptr->image = output;
	cv_ptr->encoding = "bgr8";
	cv_ptr->toImageMsg(outmsg);
	img_pub.publish(outmsg);
}
コード例 #6
0
ファイル: position.cpp プロジェクト: callemein/ros_robot
int main(int argc, char *argv[])
{
   init(argc, argv, "position");

   int key = 0;
   NodeHandle n;
   Position_Node::Position pos;
   Rate loop_rate(1);     /* in Hz */

   
   Subscriber sub = n.subscribe("MarkerInfo", 1000, markerCallback);
   Publisher pub = n.advertise<Position_Node::Position>("Position", 1000);

   while(ros::ok())
   {
      /* Publish our position */
      pos.x = g_robot_x;
      pos.y = g_robot_y;
      pos.theta = g_robot_theta;

      //ROS_INFO("Published position.\n");
      pub.publish(pos);

      spinOnce();

      loop_rate.sleep();
   }

   return 0;
}
コード例 #7
0
void callback(const StereoPairIMUConstPtr& pair)
{
    ImagePtr left(new Image), right(new Image);
    PoseStampedPtr pose(new PoseStamped);

    *left = pair->pair.leftImage;
    *right = pair->pair.rightImage;
    /**pose = pair->pose.quartenionPose;*/
    *pose = pair->pose.eulerPose;

    leftImagePub.publish(left);
    rightImagePub.publish(right);
    imuPosePub.publish(pose);

    fpsp.print();
}
コード例 #8
0
 Benchmark()
     : nh_()
     , benchmark_state_sub_ (nh_.subscribe ("/roah_rsbb/benchmark/state", 1, &Benchmark::benchmark_state_callback, this))
     , messages_saved_pub_ (nh_.advertise<std_msgs::UInt32> ("/roah_rsbb/messages_saved", 1, true))
 {
     // This should reflect the real number or size of messages saved.
     std_msgs::UInt32 messages_saved_msg;
     messages_saved_msg.data = 1;
     messages_saved_pub_.publish (messages_saved_msg);
 }
コード例 #9
0
int main(int argc, char** argv){
  ros::init(argc, argv, "command_module");

  NodeHandle handler;
  Subscriber s = handler.subscribe("command_message", 1000, messageHandle);
  Subscriber q = handler.subscribe("odom", 1000, odomReceived);
  Publisher p = handler.advertise<geometry_msgs::Twist>("cmd_vel", 1000);
  ROS_INFO("** Command Moudule: On-line");

  ROS_INFO("** Command Module: Waiting for Valid Commands");

  Goal goal;
  ros::Rate loop_rate(10);
  while (ros::ok()) {

	if (!GoalFifo.empty()) {
		goal = GoalFifo.front();

		ROS_INFO("** Command Module: Sending Goal");
		if (goal.dist > 1.0) {
			p.publish(move(0));
			ros::Duration(goal.dist).sleep();
			p.publish(stop());
		}

		if (goal.dTheta > 1.0) {
			p.publish(turn(goal.dir));
			ros::Duration(goal.dTheta).sleep();
			p.publish(stop());
		}	


		p.publish(stop());
                GoalFifo.pop();
	}

	ros::spinOnce();
	loop_rate.sleep();
  }
	

  return 0;
}
コード例 #10
0
/**
 * Controls all the inner workings of the PID functionality
 * Should be called by _timer
 *
 * Controls the heating element Relays manually, overriding the standard relay
 * functionality
 *
 * The pid is designed to Output an analog value, but the relay can only be On/Off.
 *
 * "time proportioning control"  it's essentially a really slow version of PWM.
 * first we decide on a window size. Then set the pid to adjust its output between 0 and that window size.
 * lastly, we add some logic that translates the PID output into "Relay On Time" with the remainder of the
 * window being "Relay Off Time"
 *
 * PID Adaptive Tuning
 * You can change the tuning parameters.  this can be
 * helpful if we want the controller to be agressive at some
 * times, and conservative at others.
 *
 */
void Ohmbrewer::Thermostat::doPID(){
    getSensor()->work();

    setPoint = getTargetTemp()->c();        //targetTemp
    input = getSensor()->getTemp()->c();//currentTemp
    double gap = abs(setPoint-input);   //distance away from target temp
    //SET TUNING PARAMETERS
    if (gap<10) {  //we're close to targetTemp, use conservative tuning parameters
        _thermPID->SetTunings(cons.kP(), cons.kI(), cons.kD());
    }else {//we're far from targetTemp, use aggressive tuning parameters
        _thermPID->SetTunings(agg.kP(), agg.kI(), agg.kD());
    }
    //COMPUTATIONS
    _thermPID->Compute();
    if (millis() - windowStartTime>windowSize) { //time to shift the Relay Window
        windowStartTime += windowSize;
    }
    //TURN ON
    if (getState() && gap!=0) {//if we want to turn on the element (thermostat is ON)
        //TURN ON state and powerPin
        if (!(getElement()->getState())) {//if heating element is off
            getElement()->setState(true);//turn it on
            if (getElement()->getPowerPin() != -1) { // if powerPin enabled
                digitalWrite(getElement()->getPowerPin(), HIGH); //turn it on (only once each time you switch state)
            }
        }
        //RELAY MODULATION
        if (output < millis() - windowStartTime) {
            digitalWrite(getElement()->getControlPin(), HIGH);
        } else {
            digitalWrite(getElement()->getControlPin(), LOW);
        }
    }
    //TURN OFF
    if (gap == 0 || getTargetTemp()->c() <= getSensor()->getTemp()->c() ) {//once reached target temp
        getElement()->setState(false); //turn off element
        getElement()->work();
//        if (getElement()->getPowerPin() != -1) { // if powerPin enabled
//            digitalWrite(getElement()->getPowerPin(), LOW); //turn it off too TODO check this
//        }

        // Notify Ohmbrewer that the target temperature has been reached.
        Publisher pub = Publisher(new String(getStream()),
                                  String("msg"),
                                  String("Target Temperature Reached."));
        pub.add(String("last_read_time"),
                String(getSensor()->getLastReadTime()));
        pub.add(String("temperature"),
                String(getSensor()->getTemp()->c()));
        pub.publish();
    }
}
コード例 #11
0
void transform_cloud(sensor_msgs::PointCloud2 cloud_in) 
{
  bool can_transform = listener.waitForTransform(cloud_in.header.frame_id, 
						 frame_new,
						 ros::Time(0), 
						 ros::Duration(3.0));
  if (can_transform)
    {
      sensor_msgs::PointCloud2 cloud_out;
      cloud_in.header.stamp = ros::Time(0);
      pcl_ros::transformPointCloud(frame_new, cloud_in, cloud_out, listener);
      pub.publish (cloud_out);
    }
}
コード例 #12
0
void publishGUI()
{ 
  static double tLast = 0;
  static double publishInterval = 0.016;
  if(debugLevel<0 || GetTimeSec()-tLast<publishInterval)
    return;
  tLast = GetTimeSec();
  ClearGUI();
  //DrawMap();
  guiMsg.robotLocX = curLoc.x;
  guiMsg.robotLocY = curLoc.y;
  guiMsg.robotAngle = curAngle;
  localization->drawDisplay(guiMsg.lines_p1x, guiMsg.lines_p1y, guiMsg.lines_p2x, guiMsg.lines_p2y, guiMsg.lines_col, guiMsg.points_x, guiMsg.points_y, guiMsg.points_col, guiMsg.circles_x, guiMsg.circles_y, guiMsg.circles_col, 1.0);
  //drawPointCloud();
  guiPublisher.publish(guiMsg);
}
コード例 #13
0
    void operator()(const TimerEvent& t)
    {
        auv.updateAuv (storeforce);
        geometry_msgs::Pose pos_msg;
        kraken_msgs::imuData imu;
        /*************************************/
        pos_msg.position.x=auv._current_position_to_world[0];
        pos_msg.position.y=auv._current_position_to_world[1];
        pos_msg.position.z=auv._current_position_to_world[2];
        //tf::Quaternion quat = tf::createQuaternionFromRPY(auv._current_position_to_world[3],auv._current_position_to_world[4],auv._current_position_to_world[5]);
        tf::Quaternion quat = tf::createQuaternionFromRPY(auv._current_position_to_body[3],auv._current_position_to_body[4],auv._current_position_to_body[5]);
        //tf::Quaternion quat = tf::createQuaternionFromRPY(/*auv._current_position_to_body[3]*/0,/*auv._current_position_to_body[4]*/0,/*auv._current_position_to_body[5]*/0.78);
//        pos_msg.orientation= quat;
        pos_msg.orientation.x=quat.getX ();
        pos_msg.orientation.y=quat.getY ();
        pos_msg.orientation.z=quat.getZ ();
        pos_msg.orientation.w=quat.getW ();
        /*************************************/
        geometry_msgs::TwistStamped twist_msg;
        twist_msg.twist.linear.x=auv._current_velocity_state_to_body[0];
        twist_msg.twist.linear.y=auv._current_velocity_state_to_body[1];
        twist_msg.twist.linear.z=auv._current_velocity_state_to_body[2];
        imu.data[kraken_sensors::gyroX] = twist_msg.twist.angular.x=auv._current_velocity_state_to_body[3];
        imu.data[kraken_sensors::gyroY]= twist_msg.twist.angular.y=auv._current_velocity_state_to_body[4];
        imu.data[kraken_sensors::gyroZ]  = twist_msg.twist.angular.z=auv._current_velocity_state_to_body[5];
        twist_msg.header.frame_id='1';
        /*************************************/
        nav_msgs::Odometry odo_msg;
        odo_msg.pose.pose=pos_msg;
        odo_msg.twist.twist=twist_msg.twist;
        /*************************************/
        sensor_msgs::Imu imu_msg;
        imu_msg.angular_velocity=twist_msg.twist.angular;
        imu.data[kraken_sensors::accelX] = imu_msg.linear_acceleration.x=auv._current_accelaration_to_body[0];
        imu.data[kraken_sensors::accelY] = imu_msg.linear_acceleration.y=auv._current_accelaration_to_body[1];
        imu.data[kraken_sensors::accelZ] = imu_msg.linear_acceleration.z=auv._current_accelaration_to_body[2];
        imu_msg.orientation=pos_msg.orientation;
        //imu_pub.publish(imu_msg);
        imu.data[kraken_sensors::roll] = auv._current_position_to_body[3];
        imu.data[kraken_sensors::pitch] = auv._current_position_to_body[4];
        imu.data[kraken_sensors::yaw] = auv._current_position_to_body[5];
        imu_pub.publish(imu);
        pose_pub.publish(pos_msg);
        if(isLineNeeded)
            odo_pub.publish(odo_msg);
    }
コード例 #14
0
void velCallback(const geometry_msgs::Twist::ConstPtr& msg) {
		geometry_msgs::Twist twist;
	
		if (msg->linear.x < 0)
				twist.linear.x = 0;		
		
		else
				twist.linear.x = new_vel*msg->linear.x;

    
		twist.linear.y = msg->linear.y;
		twist.linear.z = msg->linear.z;
		twist.angular.x = msg->angular.x; 
		twist.angular.y = msg->angular.y; 
		twist.angular.z = msg->angular.z;
		
		vel_pub.publish(twist);
}
コード例 #15
0
int main( int argc, char* argv[])
{
	init( argc, argv, "talker");

	gengetopt_args_info args;
	cmdline_parser( argc, argv, &args);

	double v = args.linearVelocity_arg;

	NodeHandle n;
	Publisher pub = n.advertise<std_msgs::Empty>("/mobile_base/commands/reset_odometry", 1000);

	//kobuki_msgs::Sound msg;
	geometry_msgs::Twist msg;

	//Publisher pub = n.advertise<msg_folder::msg_type>("topic name",1);


	msg.linear.x =0;

	double t = 10.0;

	ros::Rate loop_rate(10);

	int count = 0;
	int max_count = t*10;

	while( ros::ok() && count < max_count )
	{
		msg.angular.z = 1*sin(count/10.0);
		pub.publish(msg);
		ros:spinOnce();
		loop_rate.sleep() ;
		count++;

	}

}
コード例 #16
0
void pid_update() {

    /* PID position update */
    ctrl_data[0] = pid_vision_x -> update(img_center[1], dt); // OpenCV's x-axis is drone's y-axis
    ctrl_data[1] = pid_vision_y -> update(img_center[0], dt);
    //ctrl_data[2] = pid_z -> update(current_position.z, dt);
    ctrl_data[3] = 0; //yaw

    /* Control speed limiting */
    window(&ctrl_data[0], visionLimit);
    window(&ctrl_data[1], visionLimit);
    window(&ctrl_data[2], visionLimit);

    /* Publish the output control velocity from PID controller */
    geometry_msgs::Vector3 velocity;

    velocity.x = ctrl_data[0];
    velocity.y = ctrl_data[1];
    velocity.z = ctrl_data[2];

    ctrl_vel_pub.publish(velocity);


}
コード例 #17
0
int main(int argc, char **argv)
{
    StereoRectification *rectification = new StereoRectification(EXTR_FILE, INTR_FILE, CAM_WIDTH, CAM_HEIGHT, WIDTH, HEIGHT);
    StereoCamera *camera = new StereoCamera(rectification);

    init(argc, argv, "StereoCapture");

    NodeHandle nh;
    Publisher pairPublisher = nh.advertise<StereoPair>("stereoCapture/stereoPair", 1);

    StereoPairPtr pair;

    Rate rate(FPS);
    FPSPrinter fpsp;

    while(ok())
    {
        camera->nextFrame();

        pair->header.stamp = Time::now();
        pair->leftImage = *camera->getLeftImage();
        pair->rightImage = *camera->getRightImage();
        pairPublisher.publish(pair);

        spinOnce();

        fpsp.print();

        rate.sleep();
    }

    delete rectification;
    delete camera;

    return 0;
}
コード例 #18
0
void publishLocation(bool limitRate)
{
  static double tLast = 0;
  if(GetTimeSec()-tLast<0.03 && limitRate)
    return;
  tLast = GetTimeSec();
  LocalizationMsg msg;
  localization->computeLocation(curLoc, curAngle);
  msg.timeStamp = GetTimeSec();
  msg.x = curLoc.x;
  msg.y = curLoc.y;
  msg.angle = curAngle;
  msg.map = string(localization->getCurrentMapName());
  
  localization->getUncertainty(msg.angleUncertainty, msg.locationUncertainty);
  
  VectorLocalization2D::EvalValues laserEval, pointCloudEval;
  localization->getEvalValues(laserEval,pointCloudEval);
  msg.laserNumCorrespondences = laserEval.numCorrespondences;
  msg.laserNumObservedPoints = laserEval.numObservedPoints;
  msg.laserStage0Weights = laserEval.stage0Weights;
  msg.laserStageRWeights = laserEval.stageRWeights;
  msg.laserRunTime = laserEval.runTime;
  msg.lastLaserRunTime = laserEval.lastRunTime;
  msg.laserMeanSqError = laserEval.meanSqError;
  
  msg.pointCloudNumCorrespondences = pointCloudEval.numCorrespondences;
  msg.pointCloudNumObservedPoints = pointCloudEval.numObservedPoints;
  msg.pointCloudStage0Weights = pointCloudEval.stage0Weights;
  msg.pointCloudStageRWeights = pointCloudEval.stageRWeights;
  msg.pointCloudRunTime = pointCloudEval.runTime;
  msg.lastPointCloudRunTime = pointCloudEval.lastRunTime;
  msg.pointCloudMeanSqError = pointCloudEval.meanSqError;
  
  localizationPublisher.publish(msg);
  
  //Publish particles
  vector<Particle2D> particles;
  localization->getParticles(particles);
  particlesMsg.poses.resize(particles.size()); 
  geometry_msgs::Pose particle;
  for(unsigned int i=0; i<particles.size(); i++){
    particle.position.x = particles[i].loc.x;
    particle.position.y = particles[i].loc.y;
    particle.position.z = 0;
    particle.orientation.w = cos(0.5*particles[i].angle);
    particle.orientation.x = 0;
    particle.orientation.y = 0;
    particle.orientation.z = sin(0.5*particles[i].angle);
    particlesMsg.poses[i] = particle;
  }
  particlesPublisher.publish(particlesMsg);
  
  //Publish map to base_footprint tf
  try{
    tf::StampedTransform odomToBaseTf;
    transformListener->lookupTransform("odom","base_footprint",ros::Time(0), odomToBaseTf);
    
    vector2f map_base_trans = curLoc;
    float map_base_rot = curAngle;
    
    vector2f odom_base_trans(odomToBaseTf.getOrigin().x(), odomToBaseTf.getOrigin().y());
    float odom_base_rot = 2.0*atan2(odomToBaseTf.getRotation().z(), odomToBaseTf.getRotation().w());
    
    vector2f base_odom_trans = -odom_base_trans.rotate(-odom_base_rot);
    float base_odom_rot = -odom_base_rot;
    
    vector2f map_odom_trans = map_base_trans + base_odom_trans.rotate(map_base_rot);
    float map_odom_rot = angle_mod(map_base_rot + base_odom_rot);
    
    tf::Transform mapToOdomTf;
    mapToOdomTf.setOrigin(tf::Vector3(V2COMP(map_odom_trans), 0.0));
    mapToOdomTf.setRotation(tf::Quaternion(tf::Vector3(0,0,1),map_odom_rot));
    transformBroadcaster->sendTransform(tf::StampedTransform(mapToOdomTf, ros::Time::now(), "map", "odom"));
  }
  catch (tf::TransformException ex){
    //Do nothing: We'll just try again next time
  }
}
コード例 #19
0
void gpsFixCallBack (geometry_msgs::PoseStamped gpsPose)
{
    geometry_msgs::PoseStamped gpsFixPoseEstimate;

    // find poseAtRequestTime
    bool odomMovementFound = false;
    pair<geometry_msgs::Pose, ros::Time> prevFront;

    if (cb.size() > 0)
    {
        odomMovementFound = true;
        cout << "odom found" << endl;
        prevFront = cb.front();

        if (gpsPose.header.stamp >= cb.front().second)
        {
            while (gpsPose.header.stamp > prevFront.second && cb.size() > 0)
            {
                prevFront = cb.front();
                cb.pop_front();
            }
        }
    }

    // calculate Odom difference
    double deltaX = 0;
    double deltaY = 0;

    if (odomMovementFound)
    {
        deltaX = (current.first.position.x - prevFront.first.position.x);
        deltaY = (current.first.position.y - prevFront.first.position.y);
    }

    gpsFixPoseEstimate.pose.position.x = gpsPose.pose.position.x + deltaX;
    gpsFixPoseEstimate.pose.position.y = gpsPose.pose.position.y + deltaY;
    tf::Quaternion gpsQaut, odomCurrentQuat, odomOldQuat;

    if (odomMovementFound)
    {
        tf::quaternionMsgToTF (current.first.orientation, odomCurrentQuat);
        tf::quaternionMsgToTF (prevFront.first.orientation, odomOldQuat);
    }

    tf::quaternionMsgToTF (gpsPose.pose.orientation, gpsQaut);
    double deltaYaw = 0;

    if (odomMovementFound)
    {
        deltaYaw = (tf::getYaw (odomCurrentQuat) - tf::getYaw (odomOldQuat));
    }

    double newYaw = tf::getYaw (gpsQaut) + deltaYaw;
    gpsFixPoseEstimate.pose.orientation = tf::createQuaternionMsgFromYaw (newYaw);
    cout << "new Yaw: "  << newYaw << endl;
    // create covariance


    // publish new estimated pose
    gpsFixPoseEstimate.header.stamp = ros::Time::now();
    gpsFixPoseEstimate.header.frame_id = gpsFrame;
    geometry_msgs::PoseStamped inMapFrame;

    try
    {
        tfListener->transformPose ("/map", ros::Time::now(), gpsFixPoseEstimate, gpsFrame, inMapFrame);
        geometry_msgs::PoseWithCovarianceStamped resultPose;
        resultPose.pose.pose = inMapFrame.pose;
        resultPose.header = inMapFrame.header;


        if (odomMovementFound)
        {
            resultPose.pose.covariance[0] = sqrt (abs (current.first.position.x - prevFront.first.position.x)); // X
            resultPose.pose.covariance[7] = sqrt (abs (current.first.position.y - prevFront.first.position.y)); // Y
            resultPose.pose.covariance[35] = sqrt (abs (newYaw)); // Yaw
        }
        else
        {
            resultPose.pose.covariance[0]  = 0.1;// X
            resultPose.pose.covariance[7]  = 0.1; // Y
            resultPose.pose.covariance[35] = 0.1; // Yaw
        }

        gpsOdomCombinedLocalisationPublisher.publish (resultPose);
    }
    catch (tf::TransformException ex)
    {
        ROS_ERROR ("%s", ex.what());
    }

}
コード例 #20
0
int main(int argc, char** argv)
{
  // Initialize node and publisher.
  init(argc, argv, "opencv_demo");
  NodeHandle nodeHandle("~");
  Publisher publisher = nodeHandle.advertise<grid_map_msgs::GridMap>("grid_map", 1, true);
  const bool useTransparency = false;

  // Create grid map.
  GridMap map({"original", "elevation"});
  map.setFrameId("map");
  map.setGeometry(Length(1.2, 2.0), 0.01);
  ROS_INFO("Created map with size %f x %f m (%i x %i cells).",
    map.getLength().x(), map.getLength().y(),
    map.getSize()(0), map.getSize()(1));

  // Add data.
  if (!useTransparency) map["original"].setZero();
  grid_map::Polygon polygon;
  polygon.setFrameId(map.getFrameId());
  polygon.addVertex(Position( 0.480,  0.000));
  polygon.addVertex(Position( 0.164,  0.155));
  polygon.addVertex(Position( 0.116,  0.500));
  polygon.addVertex(Position(-0.133,  0.250));
  polygon.addVertex(Position(-0.480,  0.399));
  polygon.addVertex(Position(-0.316,  0.000));
  polygon.addVertex(Position(-0.480, -0.399));
  polygon.addVertex(Position(-0.133, -0.250));
  polygon.addVertex(Position( 0.116, -0.500));
  polygon.addVertex(Position( 0.164, -0.155));
  polygon.addVertex(Position( 0.480,  0.000));

  for (grid_map::PolygonIterator iterator(map, polygon); !iterator.isPastEnd(); ++iterator) {
    map.at("original", *iterator) = 0.3;
  }

  // Convert to CV image.
  cv::Mat originalImage;
  if (useTransparency) {
    // Note: The template parameters have to be set based on your encoding
    // of the image. For 8-bit images use `unsigned char`.
    GridMapCvConverter::toImage<unsigned short, 4>(map, "original", CV_16UC4, 0.0, 0.3, originalImage);
  } else {
    GridMapCvConverter::toImage<unsigned short, 1>(map, "original", CV_16UC1, 0.0, 0.3, originalImage);
  }

  // Create OpenCV window.
  cv::namedWindow("OpenCV Demo");

  // Work with copy of image in a loop.
  while (nodeHandle.ok()) {

    // Initialize.
    ros::Time time = ros::Time::now();
    map.setTimestamp(time.toNSec());
    cv::Mat modifiedImage;
    int blurRadius = 200 - abs((int)(200.0 * sin(time.toSec())));
    blurRadius = blurRadius - (blurRadius % 2) + 1;

    // Apply Gaussian blur.
    cv::GaussianBlur(originalImage, modifiedImage, cv::Size(blurRadius, blurRadius), 0.0, 0.0);

    // Visualize as image.
    cv::imshow("OpenCV Demo", modifiedImage);
    cv::waitKey(40);

    // Convert resulting image to a grid map.
    if (useTransparency) {
      GridMapCvConverter::addLayerFromImage<unsigned short, 4>(modifiedImage, "elevation", map, 0.0, 0.3, 0.3);
    } else {
      GridMapCvConverter::addLayerFromImage<unsigned short, 1>(modifiedImage, "elevation", map, 0.0, 0.3);
    }

    // Publish grid map.
    grid_map_msgs::GridMap message;
    GridMapRosConverter::toMessage(map, message);
    publisher.publish(message);
    ROS_INFO_STREAM("Published image and grid map with blur radius " << blurRadius << ".");
  }

  return 0;
}
コード例 #21
0
void ImageCB(const sensor_msgs::ImageConstPtr& msg) {
    cv_bridge::CvImagePtr cv_ptr;
	Mat frame;
	Mat output;
	
	try {
		cv_ptr = cv_bridge::toCvCopy(msg, "bgr8");
	} catch (cv_bridge::Exception& e) {
		ROS_ERROR("CV-Bridge error: %s", e.what());
		return;
	}
	
	frame = cv_ptr->image;

    for(int r = 0; r < frame.rows; r++) {
        unsigned char* row = frame.ptr<unsigned char>(r);
        for(int c = 0; c < frame.cols * frame.channels(); c+= frame.channels()) {
            auto& blue = row[c];
            auto& green = row[c+1];
            auto& red = row[c+2];
            if(blue == 255 && green == 0 && red == 0) {
                blue = 0;
            } else if(blue != 0 || green != 0 || red != 0) {
                blue = green = red = 255;
            }
        }
    }
    cvtColor(frame, frame, CV_BGR2GRAY);

    
    //Circuit race steerer has more rectangles for sharper turns
    auto leftRect = Rect(0,0,128,480);
    auto leftCenterRect = Rect(128,0,128,480);
    auto centerRect = Rect(256,0,128,480);
    auto rightCenterRect = Rect(384,0,128,480);
    auto rightRect = Rect(512,0,128,480);

    auto left = frame(leftRect);
    auto leftCenter = frame(leftCenterRect);
    auto center = frame(centerRect);
    auto rightCenter = frame(rightCenterRect);
    auto right = frame(rightRect);

    auto leftCount = countNonZero(left);
    auto leftCenterCount = countNonZero(leftCenter);
    auto centerCount = countNonZero(center);
    auto rightCenterCount = countNonZero(rightCenter);
    auto rightCount = countNonZero(right);
    
    // Bias towards going straight
    centerCount *= 0.90;

    vector<int> counts = {leftCount, leftCenterCount, centerCount, rightCenterCount, rightCount};

    auto min_index = distance(counts.begin(),min_element(counts.begin(),counts.end()));

    rr_platform::steering steering_msg;
    steering_msg.header.stamp = Time::now();

//circuit
    switch(min_index) {
    case 0: // left
        rectangle(frame, leftRect, Scalar(255), 3);
        steering_msg.angle = -30;
        break;
    case 1: // left center
        rectangle(frame, leftCenterRect, Scalar(255), 3);
        steering_msg.angle = -10;
        break;
    case 2: // center
        rectangle(frame, centerRect, Scalar(255), 3);
        steering_msg.angle = 0;
        break;
    case 3: // right center
        rectangle(frame, rightCenterRect, Scalar(255), 3);
        steering_msg.angle = 10;
        break;
    case 4: // right
        rectangle(frame, rightRect, Scalar(255), 3);
        steering_msg.angle = 30;
        break;
    }

    if (speed == 0) {
        steering_msg.angle = 0;
    }
    //cout << "Speed: " << speed;

    steer_pub.publish(steering_msg);

    sensor_msgs::Image outmsg;

    cv_ptr->image = frame;
	cv_ptr->encoding = "mono8";
	cv_ptr->toImageMsg(outmsg);
	img_pub.publish(outmsg);
}
コード例 #22
0
ファイル: odomPub.cpp プロジェクト: mdombro/CSC232
int main(int argc, char* argv[]) {
    gengetopt_args_info args;
    cmdline_parser(argc,argv,&args);
    float coeffs[6];
    coeffs[0] = args.a1_arg;
    coeffs[1] = args.a2_arg;
    coeffs[2] = args.a3_arg;
    coeffs[3] = args.a4_arg;
    coeffs[4] = args.a5_arg;
    coeffs[5] = args.a6_arg;
    sigmaHit = args.sigma_arg;
    angleMin = args.angleMin_arg;
    angleMax = args.angleMax_arg;
    angleMin *= (M_PI/180.0);
    angleMax *= (M_PI/180.0);
    angleIncrement = (angleMax-angleMin)/args.numBeams_arg;
    numBeams = args.numBeams_arg;
    float trueDistances[numBeams];

    float noisyDistances[numBeams];

    float commandReal[2];

    init(argc, argv, "Odom");
    nav_msgs::Odometry msg;
    sensor_msgs::LaserScan msg2;
    NodeHandle n;
    Publisher pub = n.advertise<nav_msgs::Odometry>("odom", 1);
    Publisher las = n.advertise<sensor_msgs::LaserScan>("/scan", 1);
    Subscriber sub = n.subscribe("navi", 1000, cmmdUpdate);  // update the command velocities
    Subscriber rst = n.subscribe("/mobile_base/commands/reset_odometry", 1000, reset);
    ros::Duration(1.3).sleep();
    ros::Rate loop_rate(10);
    while (ros::ok()) {
        sampleMotionModel(command, commandReal, pose, coeffs, 0.1);
        yawToQuaternion(pose[2], quaternion);
        msg.pose.pose.position.x = pose[0];
        msg.pose.pose.position.y = pose[1];
        msg.pose.pose.orientation.w = quaternion[0];
        msg.pose.pose.orientation.x = quaternion[1];
        msg.pose.pose.orientation.y = quaternion[2];
        msg.pose.pose.orientation.z = quaternion[3];
        msg.twist.twist.linear.x = commandReal[0];
        msg.twist.twist.angular.z = commandReal[1];

        // Laser scanning section
        calcTrueDistance(trueDistances, numBeams, angleIncrement);
        calcNoisyDistance(noisyDistances, trueDistances, sigmaHit, numBeams);
        msg2.angle_max = angleMax;
        msg2.angle_min = angleMin;
        msg2.time_increment = 0;
        msg2.angle_increment = angleIncrement;
        msg2.scan_time = 0.1;
        msg2.range_min = 0;
        msg2.range_max = maxRange;
        msg2.ranges.resize(numBeams);
        for (int i = 0; i < numBeams; i++) {
            msg2.ranges[i] = noisyDistances[i];
        }
        ros::spinOnce();
        pub.publish(msg);
        las.publish(msg2);
        loop_rate.sleep();
    }
    return 0;
}
コード例 #23
0
/* this method is called whenever a new messegae is arrived */
void irCallBack(const sensor_msgs::LaserScanConstPtr& laserScanMsg)
{
   //Variable assignment for ease of use	
   float angle_min = laserScanMsg->angle_min;	//angle of IR sensor to right (rad)
   float angle_max  = laserScanMsg->angle_max;	//angle of IR sensor to left  (rad)
   float angle_increment = laserScanMsg->angle_increment;
// double irMax 	    = laserScanMsg->range_max;	//maximum acceptable IR value
// double irMin 	    = laserScanMsg->range_min;	//minimum acceptable IR value

// std::cout << "# rays: " << (angle_max-angle_min)/(angle_increment) << std::endl;

   /* Creating an imaginary wall passing through the cones*/
   int point1 = ANGLE_MIN;
   int point2 = ANGLE_MIN;
   double myRanges[NUM_RAYS];

   for (int i=0; i < NUM_RAYS; i++)
   {
      // Copying the distances into myRanges array
      myRanges[i] = laserScanMsg->ranges[i];

      if((myRanges[i] < 0.01 && myRanges[i] > -0.01)){// || myRanges[i] > 4.0){
        myRanges[i] = MAX_DISTANCE;
      }
/*
      if ( i==7 )
         std::cout << "myRanges[7]: " << myRanges[7] << std::endl;
      if ( i==NUM_RAYS/2)
         std::cout << "myRanges[NUM_RAYS/2]: " << myRanges[NUM_RAYS/2] << std::endl;
      if ( i==NUM_RAYS-7)
         std::cout << "myRanges[NUM_RAYS-7]: " << myRanges[NUM_RAYS-7] << std::endl;
*/

      // If there is an empty space between the cones
      if(myRanges[i] < FIELD || i == NUM_RAYS-1)
      {
         point2 = i;
         for(int j = point2-1; point1 < j; j--)
         {
            myRanges[j] = (myRanges[point1]+myRanges[point2])/2;
         }
         point1 = point2+1;
      }

   }


   double fxcomponents[NUM_RAYS];
   double xcomponents[NUM_RAYS];
   double dx[NUM_RAYS];

   for(int i = 0; i < NUM_RAYS; i++)
   {
      if (i < NUM_RAYS/2)
      {
         //Geometry and Calculus skills!	
         fxcomponents[i] = myRanges[i] * cos((angle_min+ANGLE_MIN*angle_increment) + (i * angle_increment));
         xcomponents[i] = myRanges[i] * sin((angle_min+ANGLE_MIN*angle_increment) + (i * angle_increment));
      }
      else
      {
         fxcomponents[i] = myRanges[i] * cos((i-NUM_RAYS/2) * angle_increment);
         xcomponents[i] = myRanges[i] * sin((i-NUM_RAYS/2) * angle_increment);
      }
   }

   /* Calculating the area enclosed by the rays */
   double area = 0;
   for(int i = 0; i < NUM_RAYS; i++)
   {
      if ( i == 0)
         dx[i] = 0;
      else
      {
         if (((xcomponents[i] > 0) & (xcomponents[i-1] >= 0)) || ((xcomponents[i] < 0) & (xcomponents[i-1] <= 0)))
	    dx[i] = abs(xcomponents[i] - xcomponents[i-1]);
         else
	    dx[i] = abs(xcomponents[i] + xcomponents[i-1]);
      }

      area += (fxcomponents[i] * dx[i]);	
   }

   /* Finding x-comp of center of mass */
   double Gx = 0;
   for (int i = 0; i < NUM_RAYS; i++)
   {
      Gx += ((xcomponents[i]*fxcomponents[i]*dx[i])/area);
   }

   /* Finding y-comp of center of mass */
   double Gy = 0;
   for (int i = 0; i < NUM_RAYS; i++)
   {
      Gy += ((fxcomponents[i]*fxcomponents[i]*dx[i])/area/2);
   }

   /*Finding the angle from the center to center of mass */
   double theta = atan(Gy/Gx);
   double thetaDeg = theta*180/M_PI;

   double steering = calculateSteering(theta);

   /* Reverse the steering if using hardware*/
   // This is done because the sensor is physically up-side-down
//   if (SIMULATOR_ON==0)
//   	steering = -steering;

   std::cout << "Init steering: " << steering << std::endl;
   double steeringBck;
   double throttle = 0.5;

   steering = lidar_ptr->sharpenSteering(steering);

   /* Backing up */
   for (int i=NUM_RAYS*4/10; i < NUM_RAYS*6/10; i++)
   {
      if (myRanges[i] < 0.6)
      {
         steeringBck = -steering;
         throttle = -0.4;
         dataPub.publish(lidar_ptr->vectorCalculator(throttle, steeringBck));
         usleep(10000);
         throttle = 0;
         dataPub.publish(lidar_ptr->vectorCalculator(throttle, steeringBck));
         usleep(10000);
         throttle = -0.4;
         dataPub.publish(lidar_ptr->vectorCalculator(throttle, steeringBck));
         //ROS_INFO("I Published: throttle - %f, steering - %f", throttle, steering);
         usleep(100000);
         break;
      }
   }

   dataPub.publish(lidar_ptr->vectorCalculator(throttle, steering));
   ROS_INFO("I Published: throttle - %f, steering - %f", throttle, steering);

   double irFront = myRanges[NUM_RAYS/2];//laserScanMsg->ranges[NUM_RAYS/2];
   double irRight = myRanges[7];
   double irLeft  = myRanges[NUM_RAYS-7];

   /* Some useful stuff to look after */
   ROS_INFO("I received a new messege!\nfront: %f\nright: %f\nleft: %f\n", irFront, irRight, irLeft);
   std::cout << "area: " << area << std::endl;
   std::cout << "Center of mass (x,y): (" << Gx << ", " << Gy << ")" << std::endl;
   std::cout << "theta (Gy/Gx): " << theta << std::endl;
   std::cout << "thetaDEG: " << thetaDeg << std::endl;
   std::cout << "Throttle: " << throttle << ", Steering: " << steering << std::endl;
   std::cout << "*****************************\n" << std::endl;
}
コード例 #24
0
/*	if ( tcgetattr ( USB, &tty ) != 0 )
	{
		ROS_INFO("Error :%d , from tcgetattr:%s \n",errno,strerror(errno));
	}
	setParameters(tty,USB);
	return USB;
}*/
void handleRecievedData(char recv_buf[],Publisher config_pub,Publisher arm_pub)
{
	std_msgs::String msg;
	char type[4]={'\0'};
	strncpy(type,recv_buf,3);
	if(!strcmp(type,"CAM"))
	{
		int written;
		char temp[20]={'\0'};
		strncpy(temp,recv_buf+4,strlen(recv_buf));
		ROS_INFO("sent data : %s",temp);
		msg.data = temp;
		config_pub.publish(msg);
	}
	else if(!strcmp(type,"ARM"))
	{
		int written = strlen(recv_buf);
		char temp[20]={'\0'};
		strncpy(temp,recv_buf+4,written);
		ROS_INFO("sent data : %s",temp);
		msg.data = temp;
		arm_pub.publish(msg);
	}
	else if(!strcmp(type,"RLY"))
	{
		int written = strlen(recv_buf);
		char temp[20]={'\0'};
		strncpy(temp,recv_buf+4,written);
		msg.data = temp;
		rly_pub.publish(msg);
	}
	else if(!strcmp(type,"DRV"))
	{
		int written = strlen(recv_buf);
		char temp[20]={'\0'};
		strncpy(temp,recv_buf+4,written);
		msg.data = temp;
		drv_pub.publish(msg);
	}
	else if(!strcmp(type,"CCM"))
	{
		int written = strlen(recv_buf);
		char temp[20]={'\0'};
		strncpy(temp,recv_buf+4,written);
		msg.data = temp;
		cam_pub.publish(msg);
	}
	else if(!strcmp(type,"PTZ"))
	{
		int written = strlen(recv_buf);
		char temp[20]={'\0'};
		int var_arr[7] = {0};int i=0;
		strncpy(temp,recv_buf+4,written);
		char *pch = strtok(temp,",");
		while(pch != NULL)
		{
			var_arr[i++] = atoi(pch);
			pch = strtok(NULL,",");
		}
//		Axis axis;
//		axis.pan = (float)var_arr[0];axis.tilt = (float)var_arr[1];axis.zoom = (float)var_arr[2];
//		axis.focus = (float)var_arr[3];axis.brightness = (float)var_arr[4];axis.autofocus =(bool)var_arr[6];
//		ptz_pub.publish(axis);
	}
}
コード例 #25
0
ファイル: move_demo_node.cpp プロジェクト: DHaylock/grid_map
int main(int argc, char** argv)
{
  // Initialize node and publisher.
  init(argc, argv, "move_demo");
  NodeHandle nh("~");
  Publisher publisher = nh.advertise<grid_map_msgs::GridMap>("grid_map", 1, true);

  // Create grid map.
  GridMap map({"layer"});
  map.setFrameId("map");
  map.setGeometry(Length(0.7, 0.7), 0.01, Position(0.0, 0.0));
  ROS_INFO("Created map with size %f x %f m (%i x %i cells).\n The center of the map is located at (%f, %f) in the %s frame.",
    map.getLength().x(), map.getLength().y(),
    map.getSize()(0), map.getSize()(1),
    map.getPosition().x(), map.getPosition().y(), map.getFrameId().c_str());
  map["layer"].setRandom();

  bool useMoveMethod = true;
  while (nh.ok()) {

    if (useMoveMethod) {
      ROS_INFO("Using the `move(...)` method.");
    } else {
      ROS_INFO("Using the `setPosition(...)` method.");
    }

    // Work with temporary map in a loop.
    GridMap tempMap(map);
    Rate rate(10.0);
    ros::Time startTime = ros::Time::now();
    ros::Duration duration(0.0);

    while (duration <= ros::Duration(10.0)) {
      ros::Time time = ros::Time::now();
      duration = time - startTime;

      // Change position of the map with either the `move` or `setPosition` method.
      const double t = duration.toSec();
      Position newPosition = 0.03 * t * Position(cos(t), sin(t));

      if (useMoveMethod) {
        tempMap.move(newPosition);
      } else {
        tempMap.setPosition(newPosition);
      }

      // Publish grid map.
      tempMap.setTimestamp(time.toNSec());
      grid_map_msgs::GridMap message;
      GridMapRosConverter::toMessage(tempMap, message);
      publisher.publish(message);
      ROS_DEBUG("Grid map (duration %f) published with new position [%f, %f].",
                duration.toSec(), tempMap.getPosition().x(), tempMap.getPosition().y());
      rate.sleep();
    }

    useMoveMethod = !useMoveMethod;
  }

  return 0;
}
コード例 #26
0
void depthCallback(const sensor_msgs::Image &msg)
{
  //Copy the depth data
  if(debugLevel>0){
    printf("Depth Message t:%f\n", msg.header.stamp.toSec());
  }
  const uint8_t* ptrSrc = msg.data.data();
  uint16_t depth[640*480];
  memcpy(depth, ptrSrc, 640*480*(sizeof(uint16_t)));
  
  if(!usePointCloud)
    return;
  
  tf::StampedTransform baseLinkToKinect;
  try{
    transformListener->lookupTransform("base_footprint", msg.header.frame_id, ros::Time(0), baseLinkToKinect);
  }
  catch(tf::TransformException ex){
    ROS_ERROR("%s",ex.what());
  }
  tf::Vector3 translationTf = baseLinkToKinect.getOrigin();
  tf::Quaternion rotationTf = baseLinkToKinect.getRotation();
  Quaternionf rotQuat3D(rotationTf.w(), rotationTf.x(), rotationTf.y(), rotationTf.z());
  Matrix3f rotMatrix(rotQuat3D);
  kinectToRobotTransform.m11 = rotMatrix(0,0);
  kinectToRobotTransform.m12 = rotMatrix(0,1);
  kinectToRobotTransform.m13 = rotMatrix(0,2);
  kinectToRobotTransform.m14 = translationTf.x();
  kinectToRobotTransform.m21 = rotMatrix(1,0);
  kinectToRobotTransform.m22 = rotMatrix(1,1);
  kinectToRobotTransform.m23 = rotMatrix(1,2);
  kinectToRobotTransform.m24 = translationTf.y();
  kinectToRobotTransform.m31 = rotMatrix(2,0);
  kinectToRobotTransform.m32 = rotMatrix(2,1);
  kinectToRobotTransform.m33 = rotMatrix(2,2);
  kinectToRobotTransform.m34 = translationTf.z();
  kinectToRobotTransform.m41 = 0;
  kinectToRobotTransform.m42 = 0;
  kinectToRobotTransform.m43 = 0;
  kinectToRobotTransform.m44 = 1;
  
  //Generate filtered point cloud  
  vector<vector3f> filteredPointCloud;
  vector<vector3f> pointCloudNormals;
  vector<vector3f> outlierCloud;
  vector<vector2i> pixelLocs;
  vector<PlanePolygon> planePolygons;
  
  planeFilter.GenerateFilteredPointCloud(depth, filteredPointCloud, pixelLocs, pointCloudNormals, outlierCloud, planePolygons);
  
  //Transform from kinect coordinates to robot coordinates
  
  for(unsigned int i=0; i<filteredPointCloud.size(); i++){
    filteredPointCloud[i] = filteredPointCloud[i].transform(kinectToRobotTransform);
    pointCloudNormals[i] = pointCloudNormals[i].transform(kinectToRobotTransform);
  }
  

  if(debugLevel>0){
    filteredPointCloudMsg.points.clear();
    filteredPointCloudMsg.header.stamp = ros::Time::now();
    
    vector<geometry_msgs::Point32>* points = &(filteredPointCloudMsg.points);
    geometry_msgs::Point32 p;
    for(int i=0; i<(int)filteredPointCloud.size(); i++){
      p.x = filteredPointCloud[i].x;
      p.y = filteredPointCloud[i].y;
      p.z = filteredPointCloud[i].z;
      points->push_back(p);
    }
    filteredPointCloudPublisher.publish(filteredPointCloudMsg);
  }
  
  //Call particle filter
  {
    vector<vector2f> pointCloud2D, pointCloudNormals2D;
    
    for(unsigned int i=0; i<filteredPointCloud.size(); i++){
      if(fabs(pointCloudNormals[i].z)>sin(RAD(30.0)))
        continue;
      
      vector2f curNormal(V2COMP(pointCloudNormals[i]));
      vector2f curPoint(V2COMP(filteredPointCloud[i]));
      curNormal.normalize();
      pointCloudNormals2D.push_back(curNormal);
      pointCloud2D.push_back(curPoint);
    }
    
    localization->refinePointCloud(pointCloud2D, pointCloudNormals2D, pointCloudParams);
    localization->updatePointCloud(pointCloud2D, pointCloudNormals2D, motionParams, pointCloudParams);
    localization->resample(VectorLocalization2D::LowVarianceResampling);
    localization->computeLocation(curLoc,curAngle);
  }
}