예제 #1
0
파일: run.cpp 프로젝트: 39M/Matrice100
int my_callback(int data_type, int data_len, char *content)
{
	printf("enter callback..\n");
	g_lock.enter();
	if (e_image == data_type && NULL != content)
	{
		printf("callback: type is image..\n");
		image_data data;
		memcpy((char*)&data, content, sizeof(data));
		memcpy(g_imleft.data, data.m_greyscale_image_left[selected_vbus], IMAGE_SIZE);
		memcpy(g_imright.data, data.m_greyscale_image_right[selected_vbus], IMAGE_SIZE);
		memcpy(g_depth.data, data.m_depth_image[selected_vbus], IMAGE_SIZE * 2);
	}
	g_lock.leave();
	g_event.set_event();
	return 0;
}
예제 #2
0
int my_callback(int data_type, int data_len, char *content)
{
    g_lock.enter();

    /* image data */
/*    if (e_image == data_type && NULL != content)
    {        
        image_data* data = (image_data*)content;

		if ( data->m_greyscale_image_left[CAMERA_ID] ){
			memcpy(g_greyscale_image_left.data, data->m_greyscale_image_left[CAMERA_ID], IMAGE_SIZE);
			imshow("left",  g_greyscale_image_left);
			// publish left greyscale image
			cv_bridge::CvImage left_8;
			g_greyscale_image_left.copyTo(left_8.image);
			left_8.header.frame_id  = "guidance";
			left_8.header.stamp	= ros::Time::now();
			left_8.encoding		= sensor_msgs::image_encodings::MONO8;
			left_image_pub.publish(left_8.toImageMsg());
		}
		if ( data->m_greyscale_image_right[CAMERA_ID] ){
			memcpy(g_greyscale_image_right.data, data->m_greyscale_image_right[CAMERA_ID], IMAGE_SIZE);
			imshow("right", g_greyscale_image_right);
			// publish right greyscale image
			cv_bridge::CvImage right_8;
			g_greyscale_image_right.copyTo(right_8.image);
			right_8.header.frame_id  = "guidance";
			right_8.header.stamp	 = ros::Time::now();
			right_8.encoding  	 = sensor_msgs::image_encodings::MONO8;
			right_image_pub.publish(right_8.toImageMsg());
		}
		if ( data->m_depth_image[CAMERA_ID] ){
			memcpy(g_depth.data, data->m_depth_image[CAMERA_ID], IMAGE_SIZE * 2);
			g_depth.convertTo(depth8, CV_8UC1);
			imshow("depth", depth8);
			//publish depth image
			cv_bridge::CvImage depth_16;
			g_depth.copyTo(depth_16.image);
			depth_16.header.frame_id  = "guidance";
			depth_16.header.stamp	  = ros::Time::now();
			depth_16.encoding	  = sensor_msgs::image_encodings::MONO16;
			depth_image_pub.publish(depth_16.toImageMsg());
		}
		
        key = waitKey(1);
    }
    /* imu */
/*
    if ( e_imu == data_type && NULL != content )
    {
        imu *imu_data = (imu*)content;
        printf( "frame index: %d, stamp: %d\n", imu_data->frame_index, imu_data->time_stamp );
        printf( "imu: [%f %f %f %f %f %f %f]\n", imu_data->acc_x, imu_data->acc_y, imu_data->acc_z, imu_data->q[0], imu_data->q[1], imu_data->q[2], imu_data->q[3] );
 	
    	// publish imu data
		geometry_msgs::TransformStamped g_imu;
		g_imu.header.frame_id = "guidance";
		g_imu.header.stamp    = ros::Time::now();
		g_imu.transform.translation.x = imu_data->acc_x;
		g_imu.transform.translation.y = imu_data->acc_y;
		g_imu.transform.translation.z = imu_data->acc_z;
		g_imu.transform.rotation.w = imu_data->q[0];
		g_imu.transform.rotation.x = imu_data->q[1];
		g_imu.transform.rotation.y = imu_data->q[2];
		g_imu.transform.rotation.z = imu_data->q[3];
		imu_pub.publish(g_imu);
    }
 */
    /* velocity */
/*
    if ( e_velocity == data_type && NULL != content )
    {
        velocity *vo = (velocity*)content;
        printf( "frame index: %d, stamp: %d\n", vo->frame_index, vo->time_stamp );
        printf( "vx:%f vy:%f vz:%f\n", 0.001f * vo->vx, 0.001f * vo->vy, 0.001f * vo->vz );
	
		// publish velocity
		geometry_msgs::Vector3Stamped g_vo;
		g_vo.header.frame_id = "guidance";
		g_vo.header.stamp    = ros::Time::now();
		g_vo.vector.x = 0.001f * vo->vx;
		g_vo.vector.y = 0.001f * vo->vy;
		g_vo.vector.z = 0.001f * vo->vz;
		velocity_pub.publish(g_vo);
    }
*/
    /* obstacle distance */
    if ( e_obstacle_distance == data_type && NULL != content )
    {
        obstacle_distance *oa = (obstacle_distance*)content;
        printf( "frame index: %d, stamp: %d\n", oa->frame_index, oa->time_stamp );
        printf( "obstacle distance:" );
        for ( int i = 0; i < CAMERA_PAIR_NUM; ++i )
        {
            printf( " %f ", 0.01f * oa->distance[i] );
        }
		printf( "\n" );

		// publish obstacle distance
		sensor_msgs::LaserScan g_oa;
		g_oa.ranges.resize(CAMERA_PAIR_NUM);
		g_oa.header.frame_id = "guidance";
		g_oa.header.stamp    = ros::Time::now();
		for ( int i = 0; i < CAMERA_PAIR_NUM; ++i )
			g_oa.ranges[i] = 0.01f * oa->distance[i];
		obstacle_distance_pub.publish(g_oa);
	}

    /* ultrasonic */
/*
    if ( e_ultrasonic == data_type && NULL != content )
    {
        ultrasonic_data *ultrasonic = (ultrasonic_data*)content;
        printf( "frame index: %d, stamp: %d\n", ultrasonic->frame_index, ultrasonic->time_stamp );
        for ( int d = 0; d < CAMERA_PAIR_NUM; ++d )
        {
            printf( "ultrasonic distance: %f, reliability: %d\n", ultrasonic->ultrasonic[d] * 0.001f, (int)ultrasonic->reliability[d] );
        }
	
		// publish ultrasonic data
		sensor_msgs::LaserScan g_ul;
		g_ul.ranges.resize(CAMERA_PAIR_NUM);
		g_ul.intensities.resize(CAMERA_PAIR_NUM);
		g_ul.header.frame_id = "guidance";
		g_ul.header.stamp    = ros::Time::now();
		for ( int d = 0; d < CAMERA_PAIR_NUM; ++d ){
			g_ul.ranges[d] = 0.001f * ultrasonic->ultrasonic[d];
			g_ul.intensities[d] = 1.0 * ultrasonic->reliability[d];
		}
		ultrasonic_pub.publish(g_ul);
    }
*/
    g_lock.leave();
    g_event.set_event();

    return 0;
}
예제 #3
0
int my_callback(int data_type, int data_len, char *content)
{
    g_lock.enter();

    /* image data */
    if (e_image == data_type && NULL != content)
    {        
        image_data* data = (image_data*)content;
        dji_guidance::multi_image msg;

        // forward facing guidance sensor is disabled for now...
        //msg.images.push_back(create_image_message(data, e_vbus1));
        msg.images.push_back(create_image_message(data, e_vbus2));
        msg.images.push_back(create_image_message(data, e_vbus3));
        msg.images.push_back(create_image_message(data, e_vbus4));
        //msg.images.push_back(create_image_message(data, e_vbus5));

        image_pub.publish(msg);
        std::cout << "published " << msg.images.size() << " images" << std::endl;
    }

    /* imu */
    if ( e_imu == data_type && NULL != content )
    {
        imu *imu_data = (imu*)content;
        // printf( "frame index: %d, stamp: %d\n", imu_data->frame_index, imu_data->time_stamp );
        // printf( "imu: [%f %f %f %f %f %f %f]\n", imu_data->acc_x, imu_data->acc_y, imu_data->acc_z, imu_data->q[0], imu_data->q[1], imu_data->q[2], imu_data->q[3] );
    
        // publish imu data
        geometry_msgs::TransformStamped g_imu;
        g_imu.header.frame_id = "guidance";
        g_imu.header.stamp    = ros::Time::now();
        g_imu.transform.translation.x = imu_data->acc_x;
        g_imu.transform.translation.y = imu_data->acc_y;
        g_imu.transform.translation.z = imu_data->acc_z;
        g_imu.transform.rotation.w = imu_data->q[0];
        g_imu.transform.rotation.x = imu_data->q[1];
        g_imu.transform.rotation.y = imu_data->q[2];
        g_imu.transform.rotation.z = imu_data->q[3];
        imu_pub.publish(g_imu);
    }
    /* velocity */
    if ( e_velocity == data_type && NULL != content )
    {
        velocity *vo = (velocity*)content;
        // printf( "frame index: %d, stamp: %d\n", vo->frame_index, vo->time_stamp );
        // printf( "vx:%f vy:%f vz:%f\n", 0.001f * vo->vx, 0.001f * vo->vy, 0.001f * vo->vz );
    
        // publish velocity
        geometry_msgs::Vector3Stamped g_vo;
        g_vo.header.frame_id = "guidance";
        g_vo.header.stamp    = ros::Time::now();
        g_vo.vector.x = 0.001f * vo->vx;
        g_vo.vector.y = 0.001f * vo->vy;
        g_vo.vector.z = 0.001f * vo->vz;
        velocity_pub.publish(g_vo);
    }

    g_lock.leave();
    g_event.set_event();

    return 0;
}
int my_callback(int data_type, int data_len, char *content)
{
    g_lock.enter();


    /* image data
    if (e_image == data_type && NULL != content)
    {        
        image_data data;
        memcpy((char*)&data, content, sizeof(data));

        memcpy(g_greyscale_image_left.data, data.m_greyscale_image_left[CAMERA_ID], IMAGE_SIZE);
        memcpy(g_greyscale_image_right.data, data.m_greyscale_image_right[CAMERA_ID], IMAGE_SIZE);
        memcpy(g_depth.data, data.m_depth_image[CAMERA_ID], IMAGE_SIZE * 2);

        Mat depth8(HEIGHT, WIDTH, CV_8UC1);
        g_depth.convertTo(depth8, CV_8UC1);
        imshow("left",  g_greyscale_image_left);
	imshow("right", g_greyscale_image_right);
        imshow("depth", depth8);
        key = waitKey(1);

        //publish depth image
        cv_bridge::CvImage depth_16;
        g_depth.copyTo(depth_16.image);
        depth_16.header.frame_id  = "guidance";
        depth_16.header.stamp	  = ros::Time::now();
        depth_16.encoding	  = sensor_msgs::image_encodings::MONO16;
        depth_image_pub.publish(depth_16.toImageMsg());

        // publish left greyscale image
        cv_bridge::CvImage left_8;
        g_greyscale_image_left.copyTo(left_8.image);
        left_8.header.frame_id  = "guidance";
        left_8.header.stamp	= ros::Time::now();
        left_8.encoding		= sensor_msgs::image_encodings::MONO8;
        left_image_pub.publish(left_8.toImageMsg());

	// publish right greyscale image
        cv_bridge::CvImage right_8;
        g_greyscale_image_left.copyTo(right_8.image);
        right_8.header.frame_id  = "guidance";
        right_8.header.stamp	 = ros::Time::now();
        right_8.encoding  	 = sensor_msgs::image_encodings::MONO8;
        right_image_pub.publish(right_8.toImageMsg());
    }*/

    /* imu */
    if ( e_imu == data_type && NULL != content )
    {
        imu *imu_data = (imu*)content;
       /* printf( "frame index: %d, stamp: %d\n", imu_data->frame_index, imu_data->time_stamp );
        printf( "imu: [%f %f %f %f %f %f %f]\n", imu_data->acc_x, imu_data->acc_y, imu_data->acc_z, imu_data->q[0], imu_data->q[1], imu_data->q[2], imu_data->q[3] );
 	
    	// publish imu data
	geometry_msgs::TransformStamped g_imu;
	g_imu.header.frame_id = "guidance";
	g_imu.header.stamp    = ros::Time::now();
	g_imu.transform.translation.x = imu_data->acc_x;
	g_imu.transform.translation.y = imu_data->acc_y;
	g_imu.transform.translation.z = imu_data->acc_z;
	g_imu.transform.rotation.x = imu_data->q[0];
	g_imu.transform.rotation.y = imu_data->q[1];
	g_imu.transform.rotation.z = imu_data->q[2];
	g_imu.transform.rotation.w = imu_data->q[3];
	imu_pub.publish(g_imu);
    }

    */




    imu_p.header.stamp = ros::Time::now();
    imu_p.header.frame_id = "imu";
    imu_p.orientation.x = imu_data->q[0];
    imu_p.orientation.y = imu_data->q[1];
    imu_p.orientation.z = imu_data->q[2];
    imu_p.orientation.w = imu_data->q[3];
imu_p.linear_acceleration.x = imu_data->acc_x;
imu_p.linear_acceleration.y = imu_data->acc_y;
imu_p.linear_acceleration.z = imu_data->acc_z;
  double roll, pitch, yaw;

  tf::Quaternion orientation;
  tf::quaternionMsgToTF(imu_p.orientation, orientation);
  tf::Matrix3x3(orientation).getRPY(roll, pitch, yaw);
  //printf( "roll:%f pitch:%f yaw:%f\n", roll, pitch, yaw );
 //printf( "imu_data->acc_x:%f imu_data->acc_y:%f imu_data->acc_z:%f\n", imu_data->acc_x, imu_data->acc_y, imu_data->acc_z);
/*
if (se=="abc") orientation.setRPY(roll, pitch, yaw);
else 
{


if(se=="acb") orientation.setRPY(roll, yaw, pitch);
 else if(se=="bac") orientation.setRPY(pitch, roll, yaw);
  else if(se=="bca") orientation.setRPY(pitch, yaw, roll);
   else if(se=="cab") orientation.setRPY(yaw, roll, pitch);
    else if(se=="cba") orientation.setRPY(yaw, pitch, roll);

/*
yaw=0;

if(se=="acb") orientation.setRPY(roll, -pitch, yaw);
 else if(se=="bac") orientation.setRPY(roll, pitch, -yaw);
  else if(se=="bca") orientation.setRPY(roll, -pitch, -yaw);
   else if(se=="cab") orientation.setRPY(-roll, -pitch, yaw);
    else if(se=="cba") orientation.setRPY(-roll, pitch, -yaw);
else if(se=="aaa") orientation.setRPY(-roll, -pitch, -yaw);
else if(se=="bbb") orientation.setRPY(-roll, pitch, yaw);

else if(se=="ccc") orientation.setRPY(roll, pitch, yaw);

* /
 tf::quaternionTFToMsg(orientation, imu_p.orientation);
}
/*
imu_p.linear_acceleration.x = imu_data->acc_x;
imu_p.linear_acceleration.y = imu_data->acc_y;
imu_p.linear_acceleration.z = imu_data->acc_z;

imu_p.orientation_covariance[0]=0.0012250000000000002;
imu_p.orientation_covariance[4]=0.0012250000000000002;
imu_p.orientation_covariance[8]=0.0012250000000000002;
imu_p.linear_acceleration_covariance[0]=0.00031329000000000003;
imu_p.linear_acceleration_covariance[4]=0.00031329000000000003;
imu_p.linear_acceleration_covariance[8]=0.00031329000000000003;
imu_p.angular_velocity_covariance[0]=6.25e-06;
imu_p.angular_velocity_covariance[4]=6.25e-06;
imu_p.angular_velocity_covariance[8]=6.25e-06;


  tf::Matrix3x3(orientation).getRPY(roll, pitch, yaw);
    
*/

imu_m.publish(imu_p);
    }










    /* velocity */
    if ( e_velocity == data_type && NULL != content )
    {
        velocity *vo = (velocity*)content;
if(flag)
{
printf( "frame index: %d, stamp: %d\n", vo->frame_index, vo->time_stamp );
       printf( "vx:%f vy:%f vz:%f\n", 0.001f * vo->vx, 0.001f * vo->vy, 0.001f * vo->vz );
}
       // printf( "frame index: %d, stamp: %d\n", vo->frame_index, vo->time_stamp );
        //printf( "vx:%f vy:%f vz:%f\n", 0.001f * vo->vx, 0.001f * vo->vy, 0.001f * vo->vz );
	
	// publish velocity
	geometry_msgs::Vector3Stamped g_vo;
	g_vo.header.frame_id = "guidance";
	g_vo.header.stamp    = ros::Time::now();
	g_vo.vector.x = 0.001f * vo->vx;
	g_vo.vector.y = 0.001f * vo->vy;
	g_vo.vector.z = 0.001f * vo->vz;
	velocity_pub.publish(g_vo);
if(abs(vo->vx) >  0) flag=false;
    }

    /* obstacle distance
    if ( e_obstacle_distance == data_type && NULL != content )
    {
        obstacle_distance *oa = (obstacle_distance*)content;
        printf( "frame index: %d, stamp: %d\n", oa->frame_index, oa->time_stamp );
        printf( "obstacle distance:" );
        for ( int i = 0; i < CAMERA_PAIR_NUM; ++i )
        {
            printf( " %f ", 0.01f * oa->distance[i] );
        }
        printf( "\n" );

	// publish obstacle distance
	sensor_msgs::LaserScan g_oa;
        g_oa.ranges.resize(5);
	g_oa.header.frame_id = "guidance";
	g_oa.header.stamp    = ros::Time::now();
	g_oa.ranges[0] = 0.01f * oa->distance[0];
	g_oa.ranges[1] = 0.01f * oa->distance[1];
	g_oa.ranges[2] = 0.01f * oa->distance[2];
	g_oa.ranges[3] = 0.01f * oa->distance[3];
	g_oa.ranges[4] = 0.01f * oa->distance[4];
	obstacle_distance_pub.publish(g_oa);
    }*/

    /* ultrasonic
    if ( e_ultrasonic == data_type && NULL != content )
    {
        ultrasonic_data *ultrasonic = (ultrasonic_data*)content;
        printf( "frame index: %d, stamp: %d\n", ultrasonic->frame_index, ultrasonic->time_stamp );
        for ( int d = 0; d < CAMERA_PAIR_NUM; ++d )
        {
            printf( "ultrasonic distance: %f, reliability: %d\n", ultrasonic->ultrasonic[d] * 0.001f, (int)ultrasonic->reliability[d] );
        }
	
	// publish ultrasonic data
	sensor_msgs::LaserScan g_ul;
        g_ul.ranges.resize(5);
        g_ul.intensities.resize(5);
	g_ul.header.frame_id = "guidance";
	g_ul.header.stamp    = ros::Time::now();
	g_ul.ranges[0] = 0.001f * ultrasonic->ultrasonic[0];
	g_ul.ranges[1] = 0.001f * ultrasonic->ultrasonic[1];
	g_ul.ranges[2] = 0.001f * ultrasonic->ultrasonic[2];
	g_ul.ranges[3] = 0.001f * ultrasonic->ultrasonic[3];
	g_ul.ranges[4] = 0.001f * ultrasonic->ultrasonic[4];
	g_ul.intensities[0] = 1.0 * ultrasonic->reliability[0];
	g_ul.intensities[1] = 1.0 * ultrasonic->reliability[1];
	g_ul.intensities[2] = 1.0 * ultrasonic->reliability[2];
	g_ul.intensities[3] = 1.0 * ultrasonic->reliability[3];
	g_ul.intensities[4] = 1.0 * ultrasonic->reliability[4];
	ultrasonic_pub.publish(g_ul);
    }*/

    g_lock.leave();
    g_event.set_event();

    return 0;
}
int my_callback(int data_type, int data_len, char *content)
{
    g_lock.enter();

    /* image data */
    if (e_image == data_type && NULL != content)
    {
        ros::Time time_in_this_loop = ros::Time::now();
        image_data* data = (image_data*)content;

        if ( data->m_greyscale_image_left[CAMERA_ID] ) {
            memcpy(g_greyscale_image_left.data, data->m_greyscale_image_left[CAMERA_ID], IMAGE_SIZE);
            imshow("left",  g_greyscale_image_left);
            // publish left greyscale image
            cv_bridge::CvImage left_8;
            g_greyscale_image_left.copyTo(left_8.image);
            left_8.header.frame_id  = "guidance";
            left_8.header.stamp	= time_in_this_loop;
            left_8.encoding		= sensor_msgs::image_encodings::MONO8;
            left_image_pub.publish(left_8.toImageMsg());

            sensor_msgs::CameraInfo g_cam_info_left;
            g_cam_info_left.header.stamp = time_in_this_loop;
            g_cam_info_left.header.frame_id = "guidance";

            try {
                read_params_from_yaml_and_fill_cam_info_msg(camera_params_left, g_cam_info_left);
                cam_info_left_pub.publish(g_cam_info_left);
            } catch(...) {
                // if yaml fails to read data, don't try to publish
            }
        }
        if ( data->m_greyscale_image_right[CAMERA_ID] ) {
            memcpy(g_greyscale_image_right.data, data->m_greyscale_image_right[CAMERA_ID], IMAGE_SIZE);
            imshow("right", g_greyscale_image_right);
            // publish right greyscale image
            cv_bridge::CvImage right_8;
            g_greyscale_image_right.copyTo(right_8.image);
            right_8.header.frame_id  = "guidance";
            right_8.header.stamp	 = time_in_this_loop;
            right_8.encoding  	 = sensor_msgs::image_encodings::MONO8;
            right_image_pub.publish(right_8.toImageMsg());

            sensor_msgs::CameraInfo g_cam_info_right;
            g_cam_info_right.header.stamp = time_in_this_loop;
            g_cam_info_right.header.frame_id = "guidance";

            try {
                read_params_from_yaml_and_fill_cam_info_msg(camera_params_right, g_cam_info_right);
                cam_info_right_pub.publish(g_cam_info_right);
            } catch(...) {
                // if yaml fails to read data, don't try to publish
            }
        }
        if ( data->m_depth_image[CAMERA_ID] ) {
            memcpy(g_depth.data, data->m_depth_image[CAMERA_ID], IMAGE_SIZE * 2);
            g_depth.convertTo(depth8, CV_8UC1);
            imshow("depth", depth8);
            //publish depth image
            cv_bridge::CvImage depth_16;
            g_depth.copyTo(depth_16.image);
            depth_16.header.frame_id  = "guidance";
            depth_16.header.stamp	  = ros::Time::now();
            depth_16.encoding	  = sensor_msgs::image_encodings::MONO16;
            depth_image_pub.publish(depth_16.toImageMsg());
        }
//left image pair(ID=2)
        if ( data->m_greyscale_image_left[CAMERA_ID_2] ) {
            memcpy(g_greyscale_image_left_2.data, data->m_greyscale_image_left[CAMERA_ID_2], IMAGE_SIZE);
            imshow("left_2",  g_greyscale_image_left_2);
            // publish left greyscale image
            cv_bridge::CvImage left_8_2;
            g_greyscale_image_left_2.copyTo(left_8_2.image);
            left_8_2.header.frame_id  = "guidance2";
            left_8_2.header.stamp	= time_in_this_loop;
            left_8_2.encoding		= sensor_msgs::image_encodings::MONO8;
            left_image_pub_2.publish(left_8_2.toImageMsg());

            sensor_msgs::CameraInfo g_cam_info_left;
            g_cam_info_left.header.stamp = time_in_this_loop;
            g_cam_info_left.header.frame_id = "guidance2";

            try {
                read_params_from_yaml_and_fill_cam_info_msg(camera2_params_left, g_cam_info_left);
                cam2_info_left_pub.publish(g_cam_info_left);
            } catch(...) {
                // if yaml fails to read data, don't try to publish
            }
        }
        if ( data->m_greyscale_image_right[CAMERA_ID_2] ) {
            memcpy(g_greyscale_image_right_2.data, data->m_greyscale_image_right[CAMERA_ID_2], IMAGE_SIZE);
            imshow("right_2", g_greyscale_image_right_2);
            // publish right greyscale image
            cv_bridge::CvImage right_8_2;
            g_greyscale_image_right_2.copyTo(right_8_2.image);
            right_8_2.header.frame_id  = "guidance2";
            right_8_2.header.stamp	 = time_in_this_loop;
            right_8_2.encoding  	 = sensor_msgs::image_encodings::MONO8;
            right_image_pub_2.publish(right_8_2.toImageMsg());

            sensor_msgs::CameraInfo g_cam_info_right;
            g_cam_info_right.header.stamp = time_in_this_loop;
            g_cam_info_right.header.frame_id = "guidance2";

            try {
                read_params_from_yaml_and_fill_cam_info_msg(camera2_params_right, g_cam_info_right);
                cam2_info_right_pub.publish(g_cam_info_right);
            } catch(...) {
                // if yaml fails to read data, don't try to publish
            }
        }
//        if ( data->m_depth_image[CAMERA_ID_2] ) {
//            memcpy(g_depth_2.data, data->m_depth_image[CAMERA_ID_2], IMAGE_SIZE * 2);
//            g_depth_2.convertTo(depth8_2, CV_8UC1);
//            imshow("depth_2", depth8_2);
//            //publish depth image
//            cv_bridge::CvImage depth_16_2;
//            g_depth_2.copyTo(depth_16_2.image);
//            depth_16_2.header.frame_id  = "guidance2";
//            depth_16_2.header.stamp	  = ros::Time::now();
//            depth_16_2.encoding	  = sensor_msgs::image_encodings::MONO16;
//            depth_image_pub_2.publish(depth_16_2.toImageMsg());
//        }
        //camera 3
        if ( data->m_greyscale_image_left[CAMERA_ID_3] ) {
            memcpy(g_greyscale_image_left_3.data, data->m_greyscale_image_left[CAMERA_ID_3], IMAGE_SIZE);
            imshow("left_3",  g_greyscale_image_left_3);
            // publish left greyscale image
            cv_bridge::CvImage left_8_3;
            g_greyscale_image_left_3.copyTo(left_8_3.image);
            left_8_3.header.frame_id  = "guidance3";
            left_8_3.header.stamp	= time_in_this_loop;
            left_8_3.encoding		= sensor_msgs::image_encodings::MONO8;
            left_image_pub_3.publish(left_8_3.toImageMsg());

            sensor_msgs::CameraInfo g_cam_info_left;
            g_cam_info_left.header.stamp = time_in_this_loop;
            g_cam_info_left.header.frame_id = "guidance3";

            try {
                read_params_from_yaml_and_fill_cam_info_msg(camera3_params_left, g_cam_info_left);
                cam3_info_left_pub.publish(g_cam_info_left);
            } catch(...) {
                // if yaml fails to read data, don't try to publish
            }
        }
        if ( data->m_greyscale_image_right[CAMERA_ID_3] ) {
            memcpy(g_greyscale_image_right_3.data, data->m_greyscale_image_right[CAMERA_ID_3], IMAGE_SIZE);
            imshow("right_3", g_greyscale_image_right_3);
            // publish right greyscale image
            cv_bridge::CvImage right_8_3;
            g_greyscale_image_right_3.copyTo(right_8_3.image);
            right_8_3.header.frame_id  = "guidance3";
            right_8_3.header.stamp	 = time_in_this_loop;
            right_8_3.encoding  	 = sensor_msgs::image_encodings::MONO8;
            right_image_pub_3.publish(right_8_3.toImageMsg());

            sensor_msgs::CameraInfo g_cam_info_right;
            g_cam_info_right.header.stamp = time_in_this_loop;
            g_cam_info_right.header.frame_id = "guidance3";

            try {
                read_params_from_yaml_and_fill_cam_info_msg(camera3_params_right, g_cam_info_right);
                cam3_info_right_pub.publish(g_cam_info_right);
            } catch(...) {
                // if yaml fails to read data, don't try to publish
            }
        }

        //camera 5
        if ( data->m_greyscale_image_left[CAMERA_ID_5] ) {
            memcpy(g_greyscale_image_left_5.data, data->m_greyscale_image_left[CAMERA_ID_5], IMAGE_SIZE);
            imshow("left_5",  g_greyscale_image_left_5);
            // publish left greyscale image
            cv_bridge::CvImage left_8_5;
            g_greyscale_image_left_5.copyTo(left_8_5.image);
            left_8_5.header.frame_id  = "guidance5";
            left_8_5.header.stamp	= time_in_this_loop;
            left_8_5.encoding		= sensor_msgs::image_encodings::MONO8;
            left_image_pub_5.publish(left_8_5.toImageMsg());

            sensor_msgs::CameraInfo g_cam_info_left;
            g_cam_info_left.header.stamp = time_in_this_loop;
            g_cam_info_left.header.frame_id = "guidance5";

            try {
                read_params_from_yaml_and_fill_cam_info_msg(camera5_params_left, g_cam_info_left);
                cam5_info_left_pub.publish(g_cam_info_left);
            } catch(...) {
                // if yaml fails to read data, don't try to publish
            }
        }
        if ( data->m_greyscale_image_right[CAMERA_ID_5] ) {
            memcpy(g_greyscale_image_right_5.data, data->m_greyscale_image_right[CAMERA_ID_5], IMAGE_SIZE);
            imshow("right_5", g_greyscale_image_right_5);
            // publish right greyscale image
            cv_bridge::CvImage right_8_5;
            g_greyscale_image_right_5.copyTo(right_8_5.image);
            right_8_5.header.frame_id  = "guidance5";
            right_8_5.header.stamp	 = time_in_this_loop;
            right_8_5.encoding  	 = sensor_msgs::image_encodings::MONO8;
            right_image_pub_5.publish(right_8_5.toImageMsg());

            sensor_msgs::CameraInfo g_cam_info_right;
            g_cam_info_right.header.stamp = time_in_this_loop;
            g_cam_info_right.header.frame_id = "guidance5";

            try {
                read_params_from_yaml_and_fill_cam_info_msg(camera5_params_right, g_cam_info_right);
                cam5_info_right_pub.publish(g_cam_info_right);
            } catch(...) {
                // if yaml fails to read data, don't try to publish
            }
        }

        key = waitKey(1);
    }

    /* imu */
    if ( e_imu == data_type && NULL != content )
    {
        imu *imu_data = (imu*)content;
        printf( "frame index: %d, stamp: %d\n", imu_data->frame_index, imu_data->time_stamp );
        printf( "imu: [%f %f %f %f %f %f %f]\n", imu_data->acc_x, imu_data->acc_y, imu_data->acc_z, imu_data->q[0], imu_data->q[1], imu_data->q[2], imu_data->q[3] );

        // publish imu data
        geometry_msgs::TransformStamped g_imu;
        g_imu.header.frame_id = "guidance";
        g_imu.header.stamp    = ros::Time::now();
        g_imu.transform.translation.x = imu_data->acc_x;
        g_imu.transform.translation.y = imu_data->acc_y;
        g_imu.transform.translation.z = imu_data->acc_z;
        g_imu.transform.rotation.w = imu_data->q[0];
        g_imu.transform.rotation.x = imu_data->q[1];
        g_imu.transform.rotation.y = imu_data->q[2];
        g_imu.transform.rotation.z = imu_data->q[3];
        imu_pub.publish(g_imu);
    }
    /* velocity */
    if ( e_velocity == data_type && NULL != content )
    {
        velocity *vo = (velocity*)content;
        printf( "frame index: %d, stamp: %d\n", vo->frame_index, vo->time_stamp );
        printf( "vx:%f vy:%f vz:%f\n", 0.001f * vo->vx, 0.001f * vo->vy, 0.001f * vo->vz );

        // publish velocity
        geometry_msgs::Vector3Stamped g_vo;
        g_vo.header.frame_id = "guidance";
        g_vo.header.stamp    = ros::Time::now();
        g_vo.vector.x = 0.001f * vo->vx;
        g_vo.vector.y = 0.001f * vo->vy;
        g_vo.vector.z = 0.001f * vo->vz;
        velocity_pub.publish(g_vo);
    }

    /* obstacle distance */
    if ( e_obstacle_distance == data_type && NULL != content )
    {
        obstacle_distance *oa = (obstacle_distance*)content;
        printf( "frame index: %d, stamp: %d\n", oa->frame_index, oa->time_stamp );
        printf( "obstacle distance:" );
        for ( int i = 0; i < CAMERA_PAIR_NUM; ++i )
        {
            printf( " %f ", 0.01f * oa->distance[i] );
        }
        printf( "\n" );

        // publish obstacle distance
        sensor_msgs::LaserScan g_oa;
        g_oa.ranges.resize(CAMERA_PAIR_NUM);
        g_oa.header.frame_id = "guidance";
        g_oa.header.stamp    = ros::Time::now();
        for ( int i = 0; i < CAMERA_PAIR_NUM; ++i )
            g_oa.ranges[i] = 0.01f * oa->distance[i];
        obstacle_distance_pub.publish(g_oa);
    }

    /* ultrasonic */
    if ( e_ultrasonic == data_type && NULL != content )
    {
        ultrasonic_data *ultrasonic = (ultrasonic_data*)content;
        printf( "frame index: %d, stamp: %d\n", ultrasonic->frame_index, ultrasonic->time_stamp );
        for ( int d = 0; d < CAMERA_PAIR_NUM; ++d )
        {
            printf( "ultrasonic distance: %f, reliability: %d\n", ultrasonic->ultrasonic[d] * 0.001f, (int)ultrasonic->reliability[d] );
        }

        // publish ultrasonic data
        sensor_msgs::LaserScan g_ul;
        g_ul.ranges.resize(CAMERA_PAIR_NUM);
        g_ul.intensities.resize(CAMERA_PAIR_NUM);
        g_ul.header.frame_id = "guidance";
        g_ul.header.stamp    = ros::Time::now();
        for ( int d = 0; d < CAMERA_PAIR_NUM; ++d ) {
            g_ul.ranges[d] = 0.001f * ultrasonic->ultrasonic[d];
            g_ul.intensities[d] = 1.0 * ultrasonic->reliability[d];
        }
        ultrasonic_pub.publish(g_ul);
    }

    g_lock.leave();
    g_event.set_event();

    return 0;
}
예제 #6
0
int my_callback(int data_type, int data_len, char *content)
{
    g_lock.enter();
    if (e_image == data_type && NULL != content)
    {
        image_data data;
        memcpy( (char*)&data, content, sizeof(data) );
        printf( "frame index:%d,stamp:%d\n", data.frame_index, data.time_stamp );
#if !USE_GUIDANCE_ASSISTANT_CONFIG
#ifdef HAVE_OPENCV
        memcpy( g_greyscale_image_left.data, data.m_greyscale_image_left[sensor_id], IMAGE_SIZE );
        memcpy( g_greyscale_image_right.data, data.m_greyscale_image_right[sensor_id], IMAGE_SIZE );
        memcpy( g_depth.data, data.m_depth_image[sensor_id], IMAGE_SIZE * 2 );
        imshow("left", g_greyscale_image_left);
        imshow("right", g_greyscale_image_right);
#endif
#else
        for ( int d = 0; d < CAMERA_PAIR_NUM; ++d )
        {
            string stmps;
            if ( data.m_greyscale_image_left[d] )
            {
#ifdef HAVE_OPENCV
                memcpy( g_greyscale_image_left.data, data.m_greyscale_image_left[d], IMAGE_SIZE );//maybe select too many image so just overwrite it
                stmps = "left";
                stmps = stmps + (char)('0'+d);
                imshow(stmps.c_str(), g_greyscale_image_left);
#endif
            }
            if ( data.m_greyscale_image_right[d] )
            {
#ifdef HAVE_OPENCV
                memcpy( g_greyscale_image_right.data, data.m_greyscale_image_right[d], IMAGE_SIZE );
                stmps = "right";
                stmps = stmps + (char)('0'+d);
                imshow(stmps, g_greyscale_image_right);
#endif
            }
            if ( data.m_depth_image[d] )
            {
#ifdef HAVE_OPENCV
                Mat depthmap(HEIGHT, WIDTH, CV_16SC1);
                Mat depthmap8(HEIGHT, WIDTH, CV_8UC1);
                memcpy( depthmap.data, data.m_depth_image[d], IMAGE_SIZE * 2 );
                depthmap.convertTo(depthmap8, CV_8UC1);
                stmps = "depthmap";
                stmps = stmps + (char)('0'+d);
                imshow(stmps, depthmap8);
#endif
            }
        }
#endif

#ifdef HAVE_OPENCV
        waitKey(1);
#endif

    }

    if ( e_imu == data_type && NULL != content )
    {
        imu *imu_data = (imu*)content;
        printf( "imu:%f %f %f,%f %f %f %f\n", imu_data->acc_x, imu_data->acc_y, imu_data->acc_z, imu_data->q[0], imu_data->q[1], imu_data->q[2], imu_data->q[3] );
        printf( "frame index:%d,stamp:%d\n", imu_data->frame_index, imu_data->time_stamp );
    }

    if ( e_velocity == data_type && NULL != content )
    {
        velocity *vo = (velocity*)content;
        printf( "vx:%f vy:%f vz:%f\n", 0.001f * vo->vx, 0.001f * vo->vy, 0.001f * vo->vz );
        printf( "frame index:%d,stamp:%d\n", vo->frame_index, vo->time_stamp );
    }

    if ( e_obstacle_distance == data_type && NULL != content )
    {
        obstacle_distance *oa = (obstacle_distance*)content;
        printf( "obstacle distance:" );
        for ( int i = 0; i < CAMERA_PAIR_NUM; ++i )
        {
            printf( " %f ", 0.01f * oa->distance[i] );
        }
        printf( "\n" );
        printf( "frame index:%d,stamp:%d\n", oa->frame_index, oa->time_stamp );
    }

    if ( e_ultrasonic == data_type && NULL != content )
    {
        ultrasonic_data *ultrasonic = (ultrasonic_data*)content;
        for ( int d = 0; d < CAMERA_PAIR_NUM; ++d )
        {
            printf( "ultrasonic distance:%f,reliability:%d\n", ultrasonic->ultrasonic[d] * 0.001f, (int)ultrasonic->reliability[d] );
        }
        printf( "frame index:%d,stamp:%d\n", ultrasonic->frame_index, ultrasonic->time_stamp );
    }

    g_lock.leave();
    g_event.set_event();

    return 0;
}