예제 #1
0
파일: run.cpp 프로젝트: 39M/Matrice100
int my_callback(int data_type, int data_len, char *content)
{
	printf("enter callback..\n");
	g_lock.enter();
	if (e_image == data_type && NULL != content)
	{
		printf("callback: type is image..\n");
		image_data data;
		memcpy((char*)&data, content, sizeof(data));
		memcpy(g_imleft.data, data.m_greyscale_image_left[selected_vbus], IMAGE_SIZE);
		memcpy(g_imright.data, data.m_greyscale_image_right[selected_vbus], IMAGE_SIZE);
		memcpy(g_depth.data, data.m_depth_image[selected_vbus], IMAGE_SIZE * 2);
	}
	g_lock.leave();
	g_event.set_event();
	return 0;
}
예제 #2
0
int main(int argc, const char** argv)
{
    reset_config();
    int err_code = init_transfer();
    RETURN_IF_ERR( err_code );

#if !USE_GUIDANCE_ASSISTANT_CONFIG
    err_code = select_greyscale_image( sensor_id, true );
    RETURN_IF_ERR( err_code );
    err_code = select_greyscale_image( sensor_id, false );
    RETURN_IF_ERR( err_code );
    err_code = select_depth_image( sensor_id );
    RETURN_IF_ERR( err_code );
    select_imu();
    select_ultrasonic();
    select_obstacle_distance();
    select_velocity();
#endif

    err_code = set_sdk_event_handler( my_callback );
    RETURN_IF_ERR( err_code );
    err_code = start_transfer();
    RETURN_IF_ERR( err_code );

    for ( int j = 0; j < 1000; ++j )
    {
        g_event.wait_event();
    }

    err_code = stop_transfer();
    RETURN_IF_ERR( err_code );
    //make sure the ack packet from GUIDANCE is received
    sleep( 1000000 );
    err_code = release_transfer();
    RETURN_IF_ERR( err_code );

    return 0;
}
예제 #3
0
int my_callback(int data_type, int data_len, char *content)
{
    g_lock.enter();

    /* image data */
/*    if (e_image == data_type && NULL != content)
    {        
        image_data* data = (image_data*)content;

		if ( data->m_greyscale_image_left[CAMERA_ID] ){
			memcpy(g_greyscale_image_left.data, data->m_greyscale_image_left[CAMERA_ID], IMAGE_SIZE);
			imshow("left",  g_greyscale_image_left);
			// publish left greyscale image
			cv_bridge::CvImage left_8;
			g_greyscale_image_left.copyTo(left_8.image);
			left_8.header.frame_id  = "guidance";
			left_8.header.stamp	= ros::Time::now();
			left_8.encoding		= sensor_msgs::image_encodings::MONO8;
			left_image_pub.publish(left_8.toImageMsg());
		}
		if ( data->m_greyscale_image_right[CAMERA_ID] ){
			memcpy(g_greyscale_image_right.data, data->m_greyscale_image_right[CAMERA_ID], IMAGE_SIZE);
			imshow("right", g_greyscale_image_right);
			// publish right greyscale image
			cv_bridge::CvImage right_8;
			g_greyscale_image_right.copyTo(right_8.image);
			right_8.header.frame_id  = "guidance";
			right_8.header.stamp	 = ros::Time::now();
			right_8.encoding  	 = sensor_msgs::image_encodings::MONO8;
			right_image_pub.publish(right_8.toImageMsg());
		}
		if ( data->m_depth_image[CAMERA_ID] ){
			memcpy(g_depth.data, data->m_depth_image[CAMERA_ID], IMAGE_SIZE * 2);
			g_depth.convertTo(depth8, CV_8UC1);
			imshow("depth", depth8);
			//publish depth image
			cv_bridge::CvImage depth_16;
			g_depth.copyTo(depth_16.image);
			depth_16.header.frame_id  = "guidance";
			depth_16.header.stamp	  = ros::Time::now();
			depth_16.encoding	  = sensor_msgs::image_encodings::MONO16;
			depth_image_pub.publish(depth_16.toImageMsg());
		}
		
        key = waitKey(1);
    }
    /* imu */
/*
    if ( e_imu == data_type && NULL != content )
    {
        imu *imu_data = (imu*)content;
        printf( "frame index: %d, stamp: %d\n", imu_data->frame_index, imu_data->time_stamp );
        printf( "imu: [%f %f %f %f %f %f %f]\n", imu_data->acc_x, imu_data->acc_y, imu_data->acc_z, imu_data->q[0], imu_data->q[1], imu_data->q[2], imu_data->q[3] );
 	
    	// publish imu data
		geometry_msgs::TransformStamped g_imu;
		g_imu.header.frame_id = "guidance";
		g_imu.header.stamp    = ros::Time::now();
		g_imu.transform.translation.x = imu_data->acc_x;
		g_imu.transform.translation.y = imu_data->acc_y;
		g_imu.transform.translation.z = imu_data->acc_z;
		g_imu.transform.rotation.w = imu_data->q[0];
		g_imu.transform.rotation.x = imu_data->q[1];
		g_imu.transform.rotation.y = imu_data->q[2];
		g_imu.transform.rotation.z = imu_data->q[3];
		imu_pub.publish(g_imu);
    }
 */
    /* velocity */
/*
    if ( e_velocity == data_type && NULL != content )
    {
        velocity *vo = (velocity*)content;
        printf( "frame index: %d, stamp: %d\n", vo->frame_index, vo->time_stamp );
        printf( "vx:%f vy:%f vz:%f\n", 0.001f * vo->vx, 0.001f * vo->vy, 0.001f * vo->vz );
	
		// publish velocity
		geometry_msgs::Vector3Stamped g_vo;
		g_vo.header.frame_id = "guidance";
		g_vo.header.stamp    = ros::Time::now();
		g_vo.vector.x = 0.001f * vo->vx;
		g_vo.vector.y = 0.001f * vo->vy;
		g_vo.vector.z = 0.001f * vo->vz;
		velocity_pub.publish(g_vo);
    }
*/
    /* obstacle distance */
    if ( e_obstacle_distance == data_type && NULL != content )
    {
        obstacle_distance *oa = (obstacle_distance*)content;
        printf( "frame index: %d, stamp: %d\n", oa->frame_index, oa->time_stamp );
        printf( "obstacle distance:" );
        for ( int i = 0; i < CAMERA_PAIR_NUM; ++i )
        {
            printf( " %f ", 0.01f * oa->distance[i] );
        }
		printf( "\n" );

		// publish obstacle distance
		sensor_msgs::LaserScan g_oa;
		g_oa.ranges.resize(CAMERA_PAIR_NUM);
		g_oa.header.frame_id = "guidance";
		g_oa.header.stamp    = ros::Time::now();
		for ( int i = 0; i < CAMERA_PAIR_NUM; ++i )
			g_oa.ranges[i] = 0.01f * oa->distance[i];
		obstacle_distance_pub.publish(g_oa);
	}

    /* ultrasonic */
/*
    if ( e_ultrasonic == data_type && NULL != content )
    {
        ultrasonic_data *ultrasonic = (ultrasonic_data*)content;
        printf( "frame index: %d, stamp: %d\n", ultrasonic->frame_index, ultrasonic->time_stamp );
        for ( int d = 0; d < CAMERA_PAIR_NUM; ++d )
        {
            printf( "ultrasonic distance: %f, reliability: %d\n", ultrasonic->ultrasonic[d] * 0.001f, (int)ultrasonic->reliability[d] );
        }
	
		// publish ultrasonic data
		sensor_msgs::LaserScan g_ul;
		g_ul.ranges.resize(CAMERA_PAIR_NUM);
		g_ul.intensities.resize(CAMERA_PAIR_NUM);
		g_ul.header.frame_id = "guidance";
		g_ul.header.stamp    = ros::Time::now();
		for ( int d = 0; d < CAMERA_PAIR_NUM; ++d ){
			g_ul.ranges[d] = 0.001f * ultrasonic->ultrasonic[d];
			g_ul.intensities[d] = 1.0 * ultrasonic->reliability[d];
		}
		ultrasonic_pub.publish(g_ul);
    }
*/
    g_lock.leave();
    g_event.set_event();

    return 0;
}
예제 #4
0
int main(int argc, char** argv)
{
/*	if(argc>1){
		printf("This is demo program showing data from Guidance.\n\t" 
			" 'a','d','w','s','x' to select sensor direction.\n\t"
			" 'j','k' to change the exposure parameters.\n\t"
			" 'm' to switch between AEC and constant exposure modes.\n\t"
			" 'n' to return to default exposure mode and parameters.\n\t"
			" 'q' to quit.");
		return 0;
	}
	
    /* initialize ros */
    ros::init(argc, argv, "GuidanceNode");
    ros::NodeHandle my_node;
  //  depth_image_pub			= my_node.advertise<sensor_msgs::Image>("/guidance/depth_image",1);
   // left_image_pub			= my_node.advertise<sensor_msgs::Image>("/guidance/left_image",1);
   // right_image_pub			= my_node.advertise<sensor_msgs::Image>("/guidance/right_image",1);
    //imu_pub  				= my_node.advertise<geometry_msgs::TransformStamped>("/guidance/imu",1);
    //velocity_pub  			= my_node.advertise<geometry_msgs::Vector3Stamped>("/guidance/velocity",1);
    obstacle_distance_pub	= my_node.advertise<sensor_msgs::LaserScan>("/guidance/obstacle_distance",1);
    //ultrasonic_pub			= my_node.advertise<sensor_msgs::LaserScan>("/guidance/ultrasonic",1);

    /* initialize guidance */
    reset_config();
    int err_code = init_transfer();
    RETURN_IF_ERR(err_code);

	int online_status[CAMERA_PAIR_NUM];
	err_code = get_online_status(online_status);
	RETURN_IF_ERR(err_code);
    std::cout<<"Sensor online status: ";
	for (int i=0; i<CAMERA_PAIR_NUM; i++)
        std::cout<<online_status[i]<<" ";
    std::cout<<std::endl;

	// get cali param
	stereo_cali cali[CAMERA_PAIR_NUM];
	err_code = get_stereo_cali(cali);
	RETURN_IF_ERR(err_code);
    std::cout<<"cu\tcv\tfocal\tbaseline\n";
	for (int i=0; i<CAMERA_PAIR_NUM; i++)
	{
        std::cout<<cali[i].cu<<"\t"<<cali[i].cv<<"\t"<<cali[i].focal<<"\t"<<cali[i].baseline<<std::endl;
	}
	
    /* select data */
    err_code = select_greyscale_image(CAMERA_ID, true);
	RETURN_IF_ERR(err_code);
    err_code = select_greyscale_image(CAMERA_ID, false);
	RETURN_IF_ERR(err_code);
    err_code = select_depth_image(CAMERA_ID);
	RETURN_IF_ERR(err_code);
    select_imu();
    select_ultrasonic();
    select_obstacle_distance();
    select_velocity();
    /* start data transfer */
    err_code = set_sdk_event_handler(my_callback);
    RETURN_IF_ERR(err_code);
    err_code = start_transfer();
    RETURN_IF_ERR(err_code);
	
	// for setting exposure
	exposure_param para;
	para.m_is_auto_exposure = 1;
	para.m_step = 10;
	para.m_expected_brightness = 120;
    para.m_camera_pair_index = CAMERA_ID;
	
	std::cout << "start_transfer" << std::endl;

	while (ros::ok())
	{
		g_event.wait_event();
		if (key > 0)
			// set exposure parameters
			if(key=='j' || key=='k' || key=='m' || key=='n'){
				if(key=='j')
					if(para.m_is_auto_exposure) para.m_expected_brightness += 20;
					else para.m_exposure_time += 3;
				else if(key=='k')
					if(para.m_is_auto_exposure) para.m_expected_brightness -= 20;
					else para.m_exposure_time -= 3;
				else if(key=='m'){
					para.m_is_auto_exposure = !para.m_is_auto_exposure;
                    std::cout<<"exposure is "<<para.m_is_auto_exposure<<std::endl;
				}
				else if(key=='n'){//return to default
					para.m_expected_brightness = para.m_exposure_time = 0;
				}

                std::cout<<"Setting exposure parameters....SensorId="<<CAMERA_ID<<std::endl;
                para.m_camera_pair_index = CAMERA_ID;
				set_exposure_param(&para);
				key = 0;
			}
			else {// switch image direction
				err_code = stop_transfer();
				RETURN_IF_ERR(err_code);
				reset_config();

				if (key == 'q') break;
				if (key == 'w') CAMERA_ID = e_vbus1;
				if (key == 'd') CAMERA_ID = e_vbus2;
				if (key == 'x') CAMERA_ID = e_vbus3;
				if (key == 'a') CAMERA_ID = e_vbus4;	   
				if (key == 's') CAMERA_ID = e_vbus5;

				select_greyscale_image(CAMERA_ID, true);
				select_greyscale_image(CAMERA_ID, false);
				select_depth_image(CAMERA_ID);

				err_code = start_transfer();
				RETURN_IF_ERR(err_code);
				key = 0;
			}
			ros::spinOnce();
	}

	/* release data transfer */
	err_code = stop_transfer();
	RETURN_IF_ERR(err_code);
	//make sure the ack packet from GUIDANCE is received
	sleep(1);
	std::cout << "release_transfer" << std::endl;
	err_code = release_transfer();
	RETURN_IF_ERR(err_code);

    return 0;
}
예제 #5
0
int guidance_callback(int data_type, int data_len, char *content)
{
    g_lock.enter();
    if(e_image == data_type && NULL!=content)
    {
        printf("Success..\n");
        image_data image;
        memcpy((char*)&image,content,sizeof(image));
        printf("frame index:%d, time stamp:%d\n",image.frame_index,image.time_stamp);
        
        for(int d=0;d<CAMERA_PAIR_NUM;d++)
        {
            
            if(image.m_greyscale_image_left[d])
            {
                memcpy(g_greyscale_image_left.data, image.m_greyscale_image_left[d],IMAGE_SIZE);
                
                vector<Point2f> nextleftpts,kpts;
                if(prevleftpts.size()<MIN_FEATURES)
                {
                    //control input
                }
                else if(frame_num>0)
                {
                    //vector<CvPoint2D32f> nextleftpts;
                    vector<uchar> status;
                    vector<float> error;
                    vector<Mat> nextleftpyr,prevleftpyr;
                    buildOpticalFlowPyramid(prevleftimg, prevleftpyr, Size(7,7), 4);
                    buildOpticalFlowPyramid(g_greyscale_image_left, nextleftpyr, Size(7,7), 4);
                    CvTermCriteria optical_flow_termination_criteria
                    = cvTermCriteria( CV_TERMCRIT_ITER | CV_TERMCRIT_EPS, 20, 0.1 );
                    calcOpticalFlowPyrLK(prevleftpyr, nextleftpyr, prevleftpts, nextleftpts, status, error,Size(7,7),4,optical_flow_termination_criteria,OPTFLOW_LK_GET_MIN_EIGENVALS);
                    
                    //drawing featues
                    
                    Mat left_mask(g_greyscale_image_left.size(),CV_8UC3);
                    cvtColor(g_greyscale_image_left, left_mask, CV_GRAY2BGR);
                    
                    for(size_t i=0;i<nextleftpts.size();i++)
                    {
                        if(nextleftpts[i].x>=0 && nextleftpts[i].x<=WIDTH && nextleftpts[i].y>=0 && nextleftpts[i].y<=HEIGHT)
                        {
                            CvScalar line_color; line_color = CV_RGB(255,0,0);
                            //int line_thickness = 1;
                            CvPoint p,q;
                            p.x = (int) prevleftpts[i].x;
                            p.y = (int) prevleftpts[i].y;
                            q.x = (int) nextleftpts[i].x;
                            q.y = (int) nextleftpts[i].y;
//                            double angle; angle = atan2( (double) p.y - q.y, (double) p.x - q.x );
//                            double hypotenuse; hypotenuse = sqrt( pow((p.y - q.y),2) + pow((p.x - q.x),2) );
//                            
//                            //scale the line by a factor of 3
//                            q.x = (int) (p.x - 2 * hypotenuse * cos(angle));
//                            q.y = (int) (p.y - 2 * hypotenuse * sin(angle));
                            
                            arrowedLine(left_mask, p, q, line_color);
                            //cvLine( &left_mask, p, q, line_color, line_thickness, CV_AA, 0 );
                           
//                            p.x = (int) (q.x + 9 * cos(angle + pi / 4));
//                            p.y = (int) (q.y + 9 * sin(angle + pi / 4));
//                            cvLine( &prevleftimg, p, q, line_color, line_thickness, CV_AA, 0 );
//                            p.x = (int) (q.x + 9 * cos(angle - pi / 4));
//                            p.y = (int) (q.y + 9 * sin(angle - pi / 4));
//                            cvLine( &prevleftimg, p, q, line_color, line_thickness, CV_AA, 0 );
                            
                            circle(left_mask, nextleftpts[i], 2, Scalar(0,255,0),-1);
                            
                            kpts.push_back(nextleftpts[i]);
                        }
                    }
                    
                    imshow("left", left_mask);
                    moveWindow("left", 0, 0);
                    
                    }
                
                
                
                if(frame_num>0)
                {
                    if(nextleftpts.size()<=MIN_FEATURES)
                    {
                        goodFeaturesToTrack(g_greyscale_image_left, prevleftpts, MAX_FEATURES, QUALITY_EIGVALUE, MIN_DISTANCE);
                    }
                    else
                    {
                        prevleftpts.assign(kpts.begin(), kpts.end());
                    }
                }
                
                g_greyscale_image_left.copyTo(prevleftimg);
                
            }
            
            if(image.m_greyscale_image_right[d])
            {
                memcpy(g_greyscale_image_right.data, image.m_greyscale_image_right[d],IMAGE_SIZE);
                
                vector<Point2f> nextrightpts,kpts;
                if(prevrightpts.size()<MIN_FEATURES)
                {
                    //control input
                }
                else if(frame_num>0)
                {
                    //vector<CvPoint2D32f> nextleftpts;
                    vector<uchar> status;
                    vector<float> error;
                    vector<Mat> nextrightpyr,prevrightpyr;
                    buildOpticalFlowPyramid(prevrightimg, prevrightpyr, Size(7,7), 4);
                    buildOpticalFlowPyramid(g_greyscale_image_right, nextrightpyr, Size(7,7), 4);
                    CvTermCriteria optical_flow_termination_criteria
                    = cvTermCriteria( CV_TERMCRIT_ITER | CV_TERMCRIT_EPS, 20, 0.1 );
                    calcOpticalFlowPyrLK(prevrightpyr, nextrightpyr, prevrightpts, nextrightpts, status, error,Size(7,7),4,optical_flow_termination_criteria,OPTFLOW_LK_GET_MIN_EIGENVALS);
                    
                    //drawing featues
                    
                    Mat right_mask(g_greyscale_image_right.size(),CV_8UC3);
                    cvtColor(g_greyscale_image_right, right_mask, CV_GRAY2BGR);
                    
                    for(size_t i=0;i<nextrightpts.size();i++)
                    {
                        if(nextrightpts[i].x>=0 && nextrightpts[i].x<=WIDTH && nextrightpts[i].y>=0 && nextrightpts[i].y<=HEIGHT)
                        {
                            CvScalar line_color; line_color = CV_RGB(255,0,0);
                            //int line_thickness = 1;
                            CvPoint p,q;
                            p.x = (int) prevrightpts[i].x;
                            p.y = (int) prevrightpts[i].y;
                            q.x = (int) nextrightpts[i].x;
                            q.y = (int) nextrightpts[i].y;
//                            double angle; angle = atan2( (double) p.y - q.y, (double) p.x - q.x );
//                            double hypotenuse; hypotenuse = sqrt( pow((p.y - q.y),2) + pow((p.x - q.x),2) );
//                            
////                            //scale the line by a factor of 3
//                            q.x = (int) (p.x - 2 * hypotenuse * cos(angle));
//                            q.y = (int) (p.y - 2 * hypotenuse * sin(angle));
//
//                            cvLine( &prevrightimg, p, q, line_color, line_thickness, CV_AA, 0 );
//                            
//                            p.x = (int) (q.x + 9 * cos(angle + pi / 4));
//                            p.y = (int) (q.y + 9 * sin(angle + pi / 4));
//                            cvLine( &prevrightimg, p, q, line_color, line_thickness, CV_AA, 0 );
//                            p.x = (int) (q.x + 9 * cos(angle - pi / 4));
//                            p.y = (int) (q.y + 9 * sin(angle - pi / 4));
//                            cvLine( &prevrightimg, p, q, line_color, line_thickness, CV_AA, 0 );
//
                            arrowedLine(right_mask, p, q, line_color);
                            circle(right_mask, nextrightpts[i], 2, Scalar(0,255,0),-1);
                            
                            kpts.push_back(nextrightpts[i]);
                        }
                    }
                    
                    imshow("right", right_mask);
                    moveWindow("right", 500, 0);
                    
                }
                
                
                
                if(frame_num>0)
                {
                    if(nextrightpts.size()<=MIN_FEATURES)
                    {
                        goodFeaturesToTrack(g_greyscale_image_right, prevrightpts, MAX_FEATURES, QUALITY_EIGVALUE, MIN_DISTANCE);
                    }
                    else
                    {
                        prevrightpts.assign(kpts.begin(), kpts.end());
                    }
                }
                
                g_greyscale_image_right.copyTo(prevrightimg);
                frame_num++;
            }
            
            
            //Stero matching
            
//            Mat g_l,g_r;
//            
//            cvtColor(g_greyscale_image_left, g_l, CV_BGR2GRAY);
//            cvtColor(g_greyscale_image_right,g_r, CV_BGR2GRAY);
            
            Mat imgDisparity16S = Mat( g_greyscale_image_left.rows, g_greyscale_image_left.cols, CV_16S );
            Mat imgDisparity8U = Mat( g_greyscale_image_left.rows, g_greyscale_image_left.cols, CV_8UC1 );
            
            int ndisparities = 16*4;
            int SADWindowSize = 7;
            
            Ptr<StereoBM> sbm = StereoBM::create( ndisparities, SADWindowSize );
            
            sbm->compute( g_greyscale_image_left, g_greyscale_image_right, imgDisparity16S );
            
            double minVal; double maxVal;
            
            minMaxLoc( imgDisparity16S, &minVal, &maxVal );
            
            printf("Min disp: %f Max value: %f \n", minVal, maxVal);
            
            imgDisparity16S.convertTo( imgDisparity8U, CV_8UC1, 255/(maxVal - minVal));
            
            namedWindow( "windowDisparity", WINDOW_AUTOSIZE );
            imshow( "windowDisparity", imgDisparity8U );
            moveWindow("windowDisparity", 500, 500);

            
        }
        waitKey(1);
        
    }
    
    g_lock.leave();
    g_event.set_DJI_event();
    
    
    return 0;
}
예제 #6
0
int my_callback(int data_type, int data_len, char *content)
{
    g_lock.enter();

    /* image data */
    if (e_image == data_type && NULL != content)
    {        
        image_data* data = (image_data*)content;
        dji_guidance::multi_image msg;

        // forward facing guidance sensor is disabled for now...
        //msg.images.push_back(create_image_message(data, e_vbus1));
        msg.images.push_back(create_image_message(data, e_vbus2));
        msg.images.push_back(create_image_message(data, e_vbus3));
        msg.images.push_back(create_image_message(data, e_vbus4));
        //msg.images.push_back(create_image_message(data, e_vbus5));

        image_pub.publish(msg);
        std::cout << "published " << msg.images.size() << " images" << std::endl;
    }

    /* imu */
    if ( e_imu == data_type && NULL != content )
    {
        imu *imu_data = (imu*)content;
        // printf( "frame index: %d, stamp: %d\n", imu_data->frame_index, imu_data->time_stamp );
        // printf( "imu: [%f %f %f %f %f %f %f]\n", imu_data->acc_x, imu_data->acc_y, imu_data->acc_z, imu_data->q[0], imu_data->q[1], imu_data->q[2], imu_data->q[3] );
    
        // publish imu data
        geometry_msgs::TransformStamped g_imu;
        g_imu.header.frame_id = "guidance";
        g_imu.header.stamp    = ros::Time::now();
        g_imu.transform.translation.x = imu_data->acc_x;
        g_imu.transform.translation.y = imu_data->acc_y;
        g_imu.transform.translation.z = imu_data->acc_z;
        g_imu.transform.rotation.w = imu_data->q[0];
        g_imu.transform.rotation.x = imu_data->q[1];
        g_imu.transform.rotation.y = imu_data->q[2];
        g_imu.transform.rotation.z = imu_data->q[3];
        imu_pub.publish(g_imu);
    }
    /* velocity */
    if ( e_velocity == data_type && NULL != content )
    {
        velocity *vo = (velocity*)content;
        // printf( "frame index: %d, stamp: %d\n", vo->frame_index, vo->time_stamp );
        // printf( "vx:%f vy:%f vz:%f\n", 0.001f * vo->vx, 0.001f * vo->vy, 0.001f * vo->vz );
    
        // publish velocity
        geometry_msgs::Vector3Stamped g_vo;
        g_vo.header.frame_id = "guidance";
        g_vo.header.stamp    = ros::Time::now();
        g_vo.vector.x = 0.001f * vo->vx;
        g_vo.vector.y = 0.001f * vo->vy;
        g_vo.vector.z = 0.001f * vo->vz;
        velocity_pub.publish(g_vo);
    }

    g_lock.leave();
    g_event.set_event();

    return 0;
}
int my_callback(int data_type, int data_len, char *content)
{
    g_lock.enter();


    /* image data
    if (e_image == data_type && NULL != content)
    {        
        image_data data;
        memcpy((char*)&data, content, sizeof(data));

        memcpy(g_greyscale_image_left.data, data.m_greyscale_image_left[CAMERA_ID], IMAGE_SIZE);
        memcpy(g_greyscale_image_right.data, data.m_greyscale_image_right[CAMERA_ID], IMAGE_SIZE);
        memcpy(g_depth.data, data.m_depth_image[CAMERA_ID], IMAGE_SIZE * 2);

        Mat depth8(HEIGHT, WIDTH, CV_8UC1);
        g_depth.convertTo(depth8, CV_8UC1);
        imshow("left",  g_greyscale_image_left);
	imshow("right", g_greyscale_image_right);
        imshow("depth", depth8);
        key = waitKey(1);

        //publish depth image
        cv_bridge::CvImage depth_16;
        g_depth.copyTo(depth_16.image);
        depth_16.header.frame_id  = "guidance";
        depth_16.header.stamp	  = ros::Time::now();
        depth_16.encoding	  = sensor_msgs::image_encodings::MONO16;
        depth_image_pub.publish(depth_16.toImageMsg());

        // publish left greyscale image
        cv_bridge::CvImage left_8;
        g_greyscale_image_left.copyTo(left_8.image);
        left_8.header.frame_id  = "guidance";
        left_8.header.stamp	= ros::Time::now();
        left_8.encoding		= sensor_msgs::image_encodings::MONO8;
        left_image_pub.publish(left_8.toImageMsg());

	// publish right greyscale image
        cv_bridge::CvImage right_8;
        g_greyscale_image_left.copyTo(right_8.image);
        right_8.header.frame_id  = "guidance";
        right_8.header.stamp	 = ros::Time::now();
        right_8.encoding  	 = sensor_msgs::image_encodings::MONO8;
        right_image_pub.publish(right_8.toImageMsg());
    }*/

    /* imu */
    if ( e_imu == data_type && NULL != content )
    {
        imu *imu_data = (imu*)content;
       /* printf( "frame index: %d, stamp: %d\n", imu_data->frame_index, imu_data->time_stamp );
        printf( "imu: [%f %f %f %f %f %f %f]\n", imu_data->acc_x, imu_data->acc_y, imu_data->acc_z, imu_data->q[0], imu_data->q[1], imu_data->q[2], imu_data->q[3] );
 	
    	// publish imu data
	geometry_msgs::TransformStamped g_imu;
	g_imu.header.frame_id = "guidance";
	g_imu.header.stamp    = ros::Time::now();
	g_imu.transform.translation.x = imu_data->acc_x;
	g_imu.transform.translation.y = imu_data->acc_y;
	g_imu.transform.translation.z = imu_data->acc_z;
	g_imu.transform.rotation.x = imu_data->q[0];
	g_imu.transform.rotation.y = imu_data->q[1];
	g_imu.transform.rotation.z = imu_data->q[2];
	g_imu.transform.rotation.w = imu_data->q[3];
	imu_pub.publish(g_imu);
    }

    */




    imu_p.header.stamp = ros::Time::now();
    imu_p.header.frame_id = "imu";
    imu_p.orientation.x = imu_data->q[0];
    imu_p.orientation.y = imu_data->q[1];
    imu_p.orientation.z = imu_data->q[2];
    imu_p.orientation.w = imu_data->q[3];
imu_p.linear_acceleration.x = imu_data->acc_x;
imu_p.linear_acceleration.y = imu_data->acc_y;
imu_p.linear_acceleration.z = imu_data->acc_z;
  double roll, pitch, yaw;

  tf::Quaternion orientation;
  tf::quaternionMsgToTF(imu_p.orientation, orientation);
  tf::Matrix3x3(orientation).getRPY(roll, pitch, yaw);
  //printf( "roll:%f pitch:%f yaw:%f\n", roll, pitch, yaw );
 //printf( "imu_data->acc_x:%f imu_data->acc_y:%f imu_data->acc_z:%f\n", imu_data->acc_x, imu_data->acc_y, imu_data->acc_z);
/*
if (se=="abc") orientation.setRPY(roll, pitch, yaw);
else 
{


if(se=="acb") orientation.setRPY(roll, yaw, pitch);
 else if(se=="bac") orientation.setRPY(pitch, roll, yaw);
  else if(se=="bca") orientation.setRPY(pitch, yaw, roll);
   else if(se=="cab") orientation.setRPY(yaw, roll, pitch);
    else if(se=="cba") orientation.setRPY(yaw, pitch, roll);

/*
yaw=0;

if(se=="acb") orientation.setRPY(roll, -pitch, yaw);
 else if(se=="bac") orientation.setRPY(roll, pitch, -yaw);
  else if(se=="bca") orientation.setRPY(roll, -pitch, -yaw);
   else if(se=="cab") orientation.setRPY(-roll, -pitch, yaw);
    else if(se=="cba") orientation.setRPY(-roll, pitch, -yaw);
else if(se=="aaa") orientation.setRPY(-roll, -pitch, -yaw);
else if(se=="bbb") orientation.setRPY(-roll, pitch, yaw);

else if(se=="ccc") orientation.setRPY(roll, pitch, yaw);

* /
 tf::quaternionTFToMsg(orientation, imu_p.orientation);
}
/*
imu_p.linear_acceleration.x = imu_data->acc_x;
imu_p.linear_acceleration.y = imu_data->acc_y;
imu_p.linear_acceleration.z = imu_data->acc_z;

imu_p.orientation_covariance[0]=0.0012250000000000002;
imu_p.orientation_covariance[4]=0.0012250000000000002;
imu_p.orientation_covariance[8]=0.0012250000000000002;
imu_p.linear_acceleration_covariance[0]=0.00031329000000000003;
imu_p.linear_acceleration_covariance[4]=0.00031329000000000003;
imu_p.linear_acceleration_covariance[8]=0.00031329000000000003;
imu_p.angular_velocity_covariance[0]=6.25e-06;
imu_p.angular_velocity_covariance[4]=6.25e-06;
imu_p.angular_velocity_covariance[8]=6.25e-06;


  tf::Matrix3x3(orientation).getRPY(roll, pitch, yaw);
    
*/

imu_m.publish(imu_p);
    }










    /* velocity */
    if ( e_velocity == data_type && NULL != content )
    {
        velocity *vo = (velocity*)content;
if(flag)
{
printf( "frame index: %d, stamp: %d\n", vo->frame_index, vo->time_stamp );
       printf( "vx:%f vy:%f vz:%f\n", 0.001f * vo->vx, 0.001f * vo->vy, 0.001f * vo->vz );
}
       // printf( "frame index: %d, stamp: %d\n", vo->frame_index, vo->time_stamp );
        //printf( "vx:%f vy:%f vz:%f\n", 0.001f * vo->vx, 0.001f * vo->vy, 0.001f * vo->vz );
	
	// publish velocity
	geometry_msgs::Vector3Stamped g_vo;
	g_vo.header.frame_id = "guidance";
	g_vo.header.stamp    = ros::Time::now();
	g_vo.vector.x = 0.001f * vo->vx;
	g_vo.vector.y = 0.001f * vo->vy;
	g_vo.vector.z = 0.001f * vo->vz;
	velocity_pub.publish(g_vo);
if(abs(vo->vx) >  0) flag=false;
    }

    /* obstacle distance
    if ( e_obstacle_distance == data_type && NULL != content )
    {
        obstacle_distance *oa = (obstacle_distance*)content;
        printf( "frame index: %d, stamp: %d\n", oa->frame_index, oa->time_stamp );
        printf( "obstacle distance:" );
        for ( int i = 0; i < CAMERA_PAIR_NUM; ++i )
        {
            printf( " %f ", 0.01f * oa->distance[i] );
        }
        printf( "\n" );

	// publish obstacle distance
	sensor_msgs::LaserScan g_oa;
        g_oa.ranges.resize(5);
	g_oa.header.frame_id = "guidance";
	g_oa.header.stamp    = ros::Time::now();
	g_oa.ranges[0] = 0.01f * oa->distance[0];
	g_oa.ranges[1] = 0.01f * oa->distance[1];
	g_oa.ranges[2] = 0.01f * oa->distance[2];
	g_oa.ranges[3] = 0.01f * oa->distance[3];
	g_oa.ranges[4] = 0.01f * oa->distance[4];
	obstacle_distance_pub.publish(g_oa);
    }*/

    /* ultrasonic
    if ( e_ultrasonic == data_type && NULL != content )
    {
        ultrasonic_data *ultrasonic = (ultrasonic_data*)content;
        printf( "frame index: %d, stamp: %d\n", ultrasonic->frame_index, ultrasonic->time_stamp );
        for ( int d = 0; d < CAMERA_PAIR_NUM; ++d )
        {
            printf( "ultrasonic distance: %f, reliability: %d\n", ultrasonic->ultrasonic[d] * 0.001f, (int)ultrasonic->reliability[d] );
        }
	
	// publish ultrasonic data
	sensor_msgs::LaserScan g_ul;
        g_ul.ranges.resize(5);
        g_ul.intensities.resize(5);
	g_ul.header.frame_id = "guidance";
	g_ul.header.stamp    = ros::Time::now();
	g_ul.ranges[0] = 0.001f * ultrasonic->ultrasonic[0];
	g_ul.ranges[1] = 0.001f * ultrasonic->ultrasonic[1];
	g_ul.ranges[2] = 0.001f * ultrasonic->ultrasonic[2];
	g_ul.ranges[3] = 0.001f * ultrasonic->ultrasonic[3];
	g_ul.ranges[4] = 0.001f * ultrasonic->ultrasonic[4];
	g_ul.intensities[0] = 1.0 * ultrasonic->reliability[0];
	g_ul.intensities[1] = 1.0 * ultrasonic->reliability[1];
	g_ul.intensities[2] = 1.0 * ultrasonic->reliability[2];
	g_ul.intensities[3] = 1.0 * ultrasonic->reliability[3];
	g_ul.intensities[4] = 1.0 * ultrasonic->reliability[4];
	ultrasonic_pub.publish(g_ul);
    }*/

    g_lock.leave();
    g_event.set_event();

    return 0;
}
예제 #8
0
파일: run.cpp 프로젝트: 39M/Matrice100
int main(int argc, const char** argv)
{
	if (argc>1)
		help();

	// Create the KCFTracker object with one of the available options
	// HOG FIXEDWINDOW MULTISCALE LAB
	KCFTracker tracker(false, true, true, false);
	
	VideoWriter	vWriter("result.avi", CV_FOURCC('M', 'J', 'P', 'G'), 25, Size(WIDTH, HEIGHT), false);
	Rect trackWindow;
	int hsize[] = { 16, 16 };
	float hranges[] = { 0, 255 };
	float dranges[] = { 0, 255 };

	const float* phranges[] = { hranges, dranges };

	selection = Rect(10, 10, 100, 100);
	trackObject = 0;

	namedWindow(winDemoName, CV_WINDOW_AUTOSIZE);
	setMouseCallback(winDemoName, onMouse, 0);

	bool paused = false;

	// Connect to Guidance and subscribe data
	reset_config();
	int err_code = init_transfer();
	RETURN_IF_ERR(err_code);

	err_code = select_greyscale_image(selected_vbus, true);
	RELEASE_IF_ERR(err_code);

	err_code = select_greyscale_image(selected_vbus, false);
	RELEASE_IF_ERR(err_code);

	err_code = select_depth_image(selected_vbus);
	RELEASE_IF_ERR(err_code);

	err_code = set_sdk_event_handler(my_callback);
	RELEASE_IF_ERR(err_code);

	err_code = start_transfer();
	RELEASE_IF_ERR(err_code);

	Mat depth(HEIGHT, WIDTH, CV_8UC1);

	for (int times = 0; times < 30000; ++times) {
		g_event.wait_event();

		// filter depth image
		filterSpeckles(g_depth, -16, 50, 20);
		// convert 16 bit depth image to 8 bit
		g_depth.convertTo(depth, CV_8UC1);

		imshow("depth", depth);
		waitKey(1);
		g_imleft.copyTo(image);
		im.clear();
		for (int i = 0; i < 3; i++)
			im.push_back(image);
		merge(im, image);

		Rect new_rect;
		if (!paused) {
			if (trackObject) {
				if (select_frame_count > 0) {
					new_rect = tracker.update(image);
					rectangle(image, new_rect, Scalar(255), 3);
				}
				else {
					tracker.init(selection, image);
					select_frame_count++;
				}

				trackObject = 1;
			}
			else if (trackObject < 0) {
				paused = false;
			}
		}

		if (selectObject && selection.width > 0 && selection.height > 0) {
			rectangle(image, selection, Scalar(255), 3);
		}

		imshow(winDemoName, image);
		vWriter << image;

		char c = (char)waitKey(10);
		if (c == 27 || c == 'q') {
			break;
		}
		switch (c) {
		case 'c':
			trackObject = 0;
			break;
		case 'p':
		case ' ':
			paused = !paused;
			break;
		default:
			;
		}
	}

	err_code = stop_transfer();
	RELEASE_IF_ERR(err_code);

	sleep(100000);

	err_code = release_transfer();
	RETURN_IF_ERR(err_code);

	return 0;
}
int my_callback(int data_type, int data_len, char *content)
{
    g_lock.enter();

    /* image data */
    if (e_image == data_type && NULL != content)
    {
        ros::Time time_in_this_loop = ros::Time::now();
        image_data* data = (image_data*)content;

        if ( data->m_greyscale_image_left[CAMERA_ID] ) {
            memcpy(g_greyscale_image_left.data, data->m_greyscale_image_left[CAMERA_ID], IMAGE_SIZE);
            imshow("left",  g_greyscale_image_left);
            // publish left greyscale image
            cv_bridge::CvImage left_8;
            g_greyscale_image_left.copyTo(left_8.image);
            left_8.header.frame_id  = "guidance";
            left_8.header.stamp	= time_in_this_loop;
            left_8.encoding		= sensor_msgs::image_encodings::MONO8;
            left_image_pub.publish(left_8.toImageMsg());

            sensor_msgs::CameraInfo g_cam_info_left;
            g_cam_info_left.header.stamp = time_in_this_loop;
            g_cam_info_left.header.frame_id = "guidance";

            try {
                read_params_from_yaml_and_fill_cam_info_msg(camera_params_left, g_cam_info_left);
                cam_info_left_pub.publish(g_cam_info_left);
            } catch(...) {
                // if yaml fails to read data, don't try to publish
            }
        }
        if ( data->m_greyscale_image_right[CAMERA_ID] ) {
            memcpy(g_greyscale_image_right.data, data->m_greyscale_image_right[CAMERA_ID], IMAGE_SIZE);
            imshow("right", g_greyscale_image_right);
            // publish right greyscale image
            cv_bridge::CvImage right_8;
            g_greyscale_image_right.copyTo(right_8.image);
            right_8.header.frame_id  = "guidance";
            right_8.header.stamp	 = time_in_this_loop;
            right_8.encoding  	 = sensor_msgs::image_encodings::MONO8;
            right_image_pub.publish(right_8.toImageMsg());

            sensor_msgs::CameraInfo g_cam_info_right;
            g_cam_info_right.header.stamp = time_in_this_loop;
            g_cam_info_right.header.frame_id = "guidance";

            try {
                read_params_from_yaml_and_fill_cam_info_msg(camera_params_right, g_cam_info_right);
                cam_info_right_pub.publish(g_cam_info_right);
            } catch(...) {
                // if yaml fails to read data, don't try to publish
            }
        }
        if ( data->m_depth_image[CAMERA_ID] ) {
            memcpy(g_depth.data, data->m_depth_image[CAMERA_ID], IMAGE_SIZE * 2);
            g_depth.convertTo(depth8, CV_8UC1);
            imshow("depth", depth8);
            //publish depth image
            cv_bridge::CvImage depth_16;
            g_depth.copyTo(depth_16.image);
            depth_16.header.frame_id  = "guidance";
            depth_16.header.stamp	  = ros::Time::now();
            depth_16.encoding	  = sensor_msgs::image_encodings::MONO16;
            depth_image_pub.publish(depth_16.toImageMsg());
        }
//left image pair(ID=2)
        if ( data->m_greyscale_image_left[CAMERA_ID_2] ) {
            memcpy(g_greyscale_image_left_2.data, data->m_greyscale_image_left[CAMERA_ID_2], IMAGE_SIZE);
            imshow("left_2",  g_greyscale_image_left_2);
            // publish left greyscale image
            cv_bridge::CvImage left_8_2;
            g_greyscale_image_left_2.copyTo(left_8_2.image);
            left_8_2.header.frame_id  = "guidance2";
            left_8_2.header.stamp	= time_in_this_loop;
            left_8_2.encoding		= sensor_msgs::image_encodings::MONO8;
            left_image_pub_2.publish(left_8_2.toImageMsg());

            sensor_msgs::CameraInfo g_cam_info_left;
            g_cam_info_left.header.stamp = time_in_this_loop;
            g_cam_info_left.header.frame_id = "guidance2";

            try {
                read_params_from_yaml_and_fill_cam_info_msg(camera2_params_left, g_cam_info_left);
                cam2_info_left_pub.publish(g_cam_info_left);
            } catch(...) {
                // if yaml fails to read data, don't try to publish
            }
        }
        if ( data->m_greyscale_image_right[CAMERA_ID_2] ) {
            memcpy(g_greyscale_image_right_2.data, data->m_greyscale_image_right[CAMERA_ID_2], IMAGE_SIZE);
            imshow("right_2", g_greyscale_image_right_2);
            // publish right greyscale image
            cv_bridge::CvImage right_8_2;
            g_greyscale_image_right_2.copyTo(right_8_2.image);
            right_8_2.header.frame_id  = "guidance2";
            right_8_2.header.stamp	 = time_in_this_loop;
            right_8_2.encoding  	 = sensor_msgs::image_encodings::MONO8;
            right_image_pub_2.publish(right_8_2.toImageMsg());

            sensor_msgs::CameraInfo g_cam_info_right;
            g_cam_info_right.header.stamp = time_in_this_loop;
            g_cam_info_right.header.frame_id = "guidance2";

            try {
                read_params_from_yaml_and_fill_cam_info_msg(camera2_params_right, g_cam_info_right);
                cam2_info_right_pub.publish(g_cam_info_right);
            } catch(...) {
                // if yaml fails to read data, don't try to publish
            }
        }
//        if ( data->m_depth_image[CAMERA_ID_2] ) {
//            memcpy(g_depth_2.data, data->m_depth_image[CAMERA_ID_2], IMAGE_SIZE * 2);
//            g_depth_2.convertTo(depth8_2, CV_8UC1);
//            imshow("depth_2", depth8_2);
//            //publish depth image
//            cv_bridge::CvImage depth_16_2;
//            g_depth_2.copyTo(depth_16_2.image);
//            depth_16_2.header.frame_id  = "guidance2";
//            depth_16_2.header.stamp	  = ros::Time::now();
//            depth_16_2.encoding	  = sensor_msgs::image_encodings::MONO16;
//            depth_image_pub_2.publish(depth_16_2.toImageMsg());
//        }
        //camera 3
        if ( data->m_greyscale_image_left[CAMERA_ID_3] ) {
            memcpy(g_greyscale_image_left_3.data, data->m_greyscale_image_left[CAMERA_ID_3], IMAGE_SIZE);
            imshow("left_3",  g_greyscale_image_left_3);
            // publish left greyscale image
            cv_bridge::CvImage left_8_3;
            g_greyscale_image_left_3.copyTo(left_8_3.image);
            left_8_3.header.frame_id  = "guidance3";
            left_8_3.header.stamp	= time_in_this_loop;
            left_8_3.encoding		= sensor_msgs::image_encodings::MONO8;
            left_image_pub_3.publish(left_8_3.toImageMsg());

            sensor_msgs::CameraInfo g_cam_info_left;
            g_cam_info_left.header.stamp = time_in_this_loop;
            g_cam_info_left.header.frame_id = "guidance3";

            try {
                read_params_from_yaml_and_fill_cam_info_msg(camera3_params_left, g_cam_info_left);
                cam3_info_left_pub.publish(g_cam_info_left);
            } catch(...) {
                // if yaml fails to read data, don't try to publish
            }
        }
        if ( data->m_greyscale_image_right[CAMERA_ID_3] ) {
            memcpy(g_greyscale_image_right_3.data, data->m_greyscale_image_right[CAMERA_ID_3], IMAGE_SIZE);
            imshow("right_3", g_greyscale_image_right_3);
            // publish right greyscale image
            cv_bridge::CvImage right_8_3;
            g_greyscale_image_right_3.copyTo(right_8_3.image);
            right_8_3.header.frame_id  = "guidance3";
            right_8_3.header.stamp	 = time_in_this_loop;
            right_8_3.encoding  	 = sensor_msgs::image_encodings::MONO8;
            right_image_pub_3.publish(right_8_3.toImageMsg());

            sensor_msgs::CameraInfo g_cam_info_right;
            g_cam_info_right.header.stamp = time_in_this_loop;
            g_cam_info_right.header.frame_id = "guidance3";

            try {
                read_params_from_yaml_and_fill_cam_info_msg(camera3_params_right, g_cam_info_right);
                cam3_info_right_pub.publish(g_cam_info_right);
            } catch(...) {
                // if yaml fails to read data, don't try to publish
            }
        }

        //camera 5
        if ( data->m_greyscale_image_left[CAMERA_ID_5] ) {
            memcpy(g_greyscale_image_left_5.data, data->m_greyscale_image_left[CAMERA_ID_5], IMAGE_SIZE);
            imshow("left_5",  g_greyscale_image_left_5);
            // publish left greyscale image
            cv_bridge::CvImage left_8_5;
            g_greyscale_image_left_5.copyTo(left_8_5.image);
            left_8_5.header.frame_id  = "guidance5";
            left_8_5.header.stamp	= time_in_this_loop;
            left_8_5.encoding		= sensor_msgs::image_encodings::MONO8;
            left_image_pub_5.publish(left_8_5.toImageMsg());

            sensor_msgs::CameraInfo g_cam_info_left;
            g_cam_info_left.header.stamp = time_in_this_loop;
            g_cam_info_left.header.frame_id = "guidance5";

            try {
                read_params_from_yaml_and_fill_cam_info_msg(camera5_params_left, g_cam_info_left);
                cam5_info_left_pub.publish(g_cam_info_left);
            } catch(...) {
                // if yaml fails to read data, don't try to publish
            }
        }
        if ( data->m_greyscale_image_right[CAMERA_ID_5] ) {
            memcpy(g_greyscale_image_right_5.data, data->m_greyscale_image_right[CAMERA_ID_5], IMAGE_SIZE);
            imshow("right_5", g_greyscale_image_right_5);
            // publish right greyscale image
            cv_bridge::CvImage right_8_5;
            g_greyscale_image_right_5.copyTo(right_8_5.image);
            right_8_5.header.frame_id  = "guidance5";
            right_8_5.header.stamp	 = time_in_this_loop;
            right_8_5.encoding  	 = sensor_msgs::image_encodings::MONO8;
            right_image_pub_5.publish(right_8_5.toImageMsg());

            sensor_msgs::CameraInfo g_cam_info_right;
            g_cam_info_right.header.stamp = time_in_this_loop;
            g_cam_info_right.header.frame_id = "guidance5";

            try {
                read_params_from_yaml_and_fill_cam_info_msg(camera5_params_right, g_cam_info_right);
                cam5_info_right_pub.publish(g_cam_info_right);
            } catch(...) {
                // if yaml fails to read data, don't try to publish
            }
        }

        key = waitKey(1);
    }

    /* imu */
    if ( e_imu == data_type && NULL != content )
    {
        imu *imu_data = (imu*)content;
        printf( "frame index: %d, stamp: %d\n", imu_data->frame_index, imu_data->time_stamp );
        printf( "imu: [%f %f %f %f %f %f %f]\n", imu_data->acc_x, imu_data->acc_y, imu_data->acc_z, imu_data->q[0], imu_data->q[1], imu_data->q[2], imu_data->q[3] );

        // publish imu data
        geometry_msgs::TransformStamped g_imu;
        g_imu.header.frame_id = "guidance";
        g_imu.header.stamp    = ros::Time::now();
        g_imu.transform.translation.x = imu_data->acc_x;
        g_imu.transform.translation.y = imu_data->acc_y;
        g_imu.transform.translation.z = imu_data->acc_z;
        g_imu.transform.rotation.w = imu_data->q[0];
        g_imu.transform.rotation.x = imu_data->q[1];
        g_imu.transform.rotation.y = imu_data->q[2];
        g_imu.transform.rotation.z = imu_data->q[3];
        imu_pub.publish(g_imu);
    }
    /* velocity */
    if ( e_velocity == data_type && NULL != content )
    {
        velocity *vo = (velocity*)content;
        printf( "frame index: %d, stamp: %d\n", vo->frame_index, vo->time_stamp );
        printf( "vx:%f vy:%f vz:%f\n", 0.001f * vo->vx, 0.001f * vo->vy, 0.001f * vo->vz );

        // publish velocity
        geometry_msgs::Vector3Stamped g_vo;
        g_vo.header.frame_id = "guidance";
        g_vo.header.stamp    = ros::Time::now();
        g_vo.vector.x = 0.001f * vo->vx;
        g_vo.vector.y = 0.001f * vo->vy;
        g_vo.vector.z = 0.001f * vo->vz;
        velocity_pub.publish(g_vo);
    }

    /* obstacle distance */
    if ( e_obstacle_distance == data_type && NULL != content )
    {
        obstacle_distance *oa = (obstacle_distance*)content;
        printf( "frame index: %d, stamp: %d\n", oa->frame_index, oa->time_stamp );
        printf( "obstacle distance:" );
        for ( int i = 0; i < CAMERA_PAIR_NUM; ++i )
        {
            printf( " %f ", 0.01f * oa->distance[i] );
        }
        printf( "\n" );

        // publish obstacle distance
        sensor_msgs::LaserScan g_oa;
        g_oa.ranges.resize(CAMERA_PAIR_NUM);
        g_oa.header.frame_id = "guidance";
        g_oa.header.stamp    = ros::Time::now();
        for ( int i = 0; i < CAMERA_PAIR_NUM; ++i )
            g_oa.ranges[i] = 0.01f * oa->distance[i];
        obstacle_distance_pub.publish(g_oa);
    }

    /* ultrasonic */
    if ( e_ultrasonic == data_type && NULL != content )
    {
        ultrasonic_data *ultrasonic = (ultrasonic_data*)content;
        printf( "frame index: %d, stamp: %d\n", ultrasonic->frame_index, ultrasonic->time_stamp );
        for ( int d = 0; d < CAMERA_PAIR_NUM; ++d )
        {
            printf( "ultrasonic distance: %f, reliability: %d\n", ultrasonic->ultrasonic[d] * 0.001f, (int)ultrasonic->reliability[d] );
        }

        // publish ultrasonic data
        sensor_msgs::LaserScan g_ul;
        g_ul.ranges.resize(CAMERA_PAIR_NUM);
        g_ul.intensities.resize(CAMERA_PAIR_NUM);
        g_ul.header.frame_id = "guidance";
        g_ul.header.stamp    = ros::Time::now();
        for ( int d = 0; d < CAMERA_PAIR_NUM; ++d ) {
            g_ul.ranges[d] = 0.001f * ultrasonic->ultrasonic[d];
            g_ul.intensities[d] = 1.0 * ultrasonic->reliability[d];
        }
        ultrasonic_pub.publish(g_ul);
    }

    g_lock.leave();
    g_event.set_event();

    return 0;
}
예제 #10
0
int my_callback(int data_type, int data_len, char *content)
{
    g_lock.enter();
    if (e_image == data_type && NULL != content)
    {
        image_data data;
        memcpy( (char*)&data, content, sizeof(data) );
        printf( "frame index:%d,stamp:%d\n", data.frame_index, data.time_stamp );
#if !USE_GUIDANCE_ASSISTANT_CONFIG
#ifdef HAVE_OPENCV
        memcpy( g_greyscale_image_left.data, data.m_greyscale_image_left[sensor_id], IMAGE_SIZE );
        memcpy( g_greyscale_image_right.data, data.m_greyscale_image_right[sensor_id], IMAGE_SIZE );
        memcpy( g_depth.data, data.m_depth_image[sensor_id], IMAGE_SIZE * 2 );
        imshow("left", g_greyscale_image_left);
        imshow("right", g_greyscale_image_right);
#endif
#else
        for ( int d = 0; d < CAMERA_PAIR_NUM; ++d )
        {
            string stmps;
            if ( data.m_greyscale_image_left[d] )
            {
#ifdef HAVE_OPENCV
                memcpy( g_greyscale_image_left.data, data.m_greyscale_image_left[d], IMAGE_SIZE );//maybe select too many image so just overwrite it
                stmps = "left";
                stmps = stmps + (char)('0'+d);
                imshow(stmps.c_str(), g_greyscale_image_left);
#endif
            }
            if ( data.m_greyscale_image_right[d] )
            {
#ifdef HAVE_OPENCV
                memcpy( g_greyscale_image_right.data, data.m_greyscale_image_right[d], IMAGE_SIZE );
                stmps = "right";
                stmps = stmps + (char)('0'+d);
                imshow(stmps, g_greyscale_image_right);
#endif
            }
            if ( data.m_depth_image[d] )
            {
#ifdef HAVE_OPENCV
                Mat depthmap(HEIGHT, WIDTH, CV_16SC1);
                Mat depthmap8(HEIGHT, WIDTH, CV_8UC1);
                memcpy( depthmap.data, data.m_depth_image[d], IMAGE_SIZE * 2 );
                depthmap.convertTo(depthmap8, CV_8UC1);
                stmps = "depthmap";
                stmps = stmps + (char)('0'+d);
                imshow(stmps, depthmap8);
#endif
            }
        }
#endif

#ifdef HAVE_OPENCV
        waitKey(1);
#endif

    }

    if ( e_imu == data_type && NULL != content )
    {
        imu *imu_data = (imu*)content;
        printf( "imu:%f %f %f,%f %f %f %f\n", imu_data->acc_x, imu_data->acc_y, imu_data->acc_z, imu_data->q[0], imu_data->q[1], imu_data->q[2], imu_data->q[3] );
        printf( "frame index:%d,stamp:%d\n", imu_data->frame_index, imu_data->time_stamp );
    }

    if ( e_velocity == data_type && NULL != content )
    {
        velocity *vo = (velocity*)content;
        printf( "vx:%f vy:%f vz:%f\n", 0.001f * vo->vx, 0.001f * vo->vy, 0.001f * vo->vz );
        printf( "frame index:%d,stamp:%d\n", vo->frame_index, vo->time_stamp );
    }

    if ( e_obstacle_distance == data_type && NULL != content )
    {
        obstacle_distance *oa = (obstacle_distance*)content;
        printf( "obstacle distance:" );
        for ( int i = 0; i < CAMERA_PAIR_NUM; ++i )
        {
            printf( " %f ", 0.01f * oa->distance[i] );
        }
        printf( "\n" );
        printf( "frame index:%d,stamp:%d\n", oa->frame_index, oa->time_stamp );
    }

    if ( e_ultrasonic == data_type && NULL != content )
    {
        ultrasonic_data *ultrasonic = (ultrasonic_data*)content;
        for ( int d = 0; d < CAMERA_PAIR_NUM; ++d )
        {
            printf( "ultrasonic distance:%f,reliability:%d\n", ultrasonic->ultrasonic[d] * 0.001f, (int)ultrasonic->reliability[d] );
        }
        printf( "frame index:%d,stamp:%d\n", ultrasonic->frame_index, ultrasonic->time_stamp );
    }

    g_lock.leave();
    g_event.set_event();

    return 0;
}