예제 #1
0
int main(int argc, const char** argv)
{
    reset_config();
    int err_code = init_transfer();
    RETURN_IF_ERR( err_code );

#if !USE_GUIDANCE_ASSISTANT_CONFIG
    err_code = select_greyscale_image( sensor_id, true );
    RETURN_IF_ERR( err_code );
    err_code = select_greyscale_image( sensor_id, false );
    RETURN_IF_ERR( err_code );
    err_code = select_depth_image( sensor_id );
    RETURN_IF_ERR( err_code );
    select_imu();
    select_ultrasonic();
    select_obstacle_distance();
    select_velocity();
#endif

    err_code = set_sdk_event_handler( my_callback );
    RETURN_IF_ERR( err_code );
    err_code = start_transfer();
    RETURN_IF_ERR( err_code );

    for ( int j = 0; j < 1000; ++j )
    {
        g_event.wait_event();
    }

    err_code = stop_transfer();
    RETURN_IF_ERR( err_code );
    //make sure the ack packet from GUIDANCE is received
    sleep( 1000000 );
    err_code = release_transfer();
    RETURN_IF_ERR( err_code );

    return 0;
}
예제 #2
0
int main(int argc, char** argv)
{
/*	if(argc>1){
		printf("This is demo program showing data from Guidance.\n\t" 
			" 'a','d','w','s','x' to select sensor direction.\n\t"
			" 'j','k' to change the exposure parameters.\n\t"
			" 'm' to switch between AEC and constant exposure modes.\n\t"
			" 'n' to return to default exposure mode and parameters.\n\t"
			" 'q' to quit.");
		return 0;
	}
	
    /* initialize ros */
    ros::init(argc, argv, "GuidanceNode");
    ros::NodeHandle my_node;
  //  depth_image_pub			= my_node.advertise<sensor_msgs::Image>("/guidance/depth_image",1);
   // left_image_pub			= my_node.advertise<sensor_msgs::Image>("/guidance/left_image",1);
   // right_image_pub			= my_node.advertise<sensor_msgs::Image>("/guidance/right_image",1);
    //imu_pub  				= my_node.advertise<geometry_msgs::TransformStamped>("/guidance/imu",1);
    //velocity_pub  			= my_node.advertise<geometry_msgs::Vector3Stamped>("/guidance/velocity",1);
    obstacle_distance_pub	= my_node.advertise<sensor_msgs::LaserScan>("/guidance/obstacle_distance",1);
    //ultrasonic_pub			= my_node.advertise<sensor_msgs::LaserScan>("/guidance/ultrasonic",1);

    /* initialize guidance */
    reset_config();
    int err_code = init_transfer();
    RETURN_IF_ERR(err_code);

	int online_status[CAMERA_PAIR_NUM];
	err_code = get_online_status(online_status);
	RETURN_IF_ERR(err_code);
    std::cout<<"Sensor online status: ";
	for (int i=0; i<CAMERA_PAIR_NUM; i++)
        std::cout<<online_status[i]<<" ";
    std::cout<<std::endl;

	// get cali param
	stereo_cali cali[CAMERA_PAIR_NUM];
	err_code = get_stereo_cali(cali);
	RETURN_IF_ERR(err_code);
    std::cout<<"cu\tcv\tfocal\tbaseline\n";
	for (int i=0; i<CAMERA_PAIR_NUM; i++)
	{
        std::cout<<cali[i].cu<<"\t"<<cali[i].cv<<"\t"<<cali[i].focal<<"\t"<<cali[i].baseline<<std::endl;
	}
	
    /* select data */
    err_code = select_greyscale_image(CAMERA_ID, true);
	RETURN_IF_ERR(err_code);
    err_code = select_greyscale_image(CAMERA_ID, false);
	RETURN_IF_ERR(err_code);
    err_code = select_depth_image(CAMERA_ID);
	RETURN_IF_ERR(err_code);
    select_imu();
    select_ultrasonic();
    select_obstacle_distance();
    select_velocity();
    /* start data transfer */
    err_code = set_sdk_event_handler(my_callback);
    RETURN_IF_ERR(err_code);
    err_code = start_transfer();
    RETURN_IF_ERR(err_code);
	
	// for setting exposure
	exposure_param para;
	para.m_is_auto_exposure = 1;
	para.m_step = 10;
	para.m_expected_brightness = 120;
    para.m_camera_pair_index = CAMERA_ID;
	
	std::cout << "start_transfer" << std::endl;

	while (ros::ok())
	{
		g_event.wait_event();
		if (key > 0)
			// set exposure parameters
			if(key=='j' || key=='k' || key=='m' || key=='n'){
				if(key=='j')
					if(para.m_is_auto_exposure) para.m_expected_brightness += 20;
					else para.m_exposure_time += 3;
				else if(key=='k')
					if(para.m_is_auto_exposure) para.m_expected_brightness -= 20;
					else para.m_exposure_time -= 3;
				else if(key=='m'){
					para.m_is_auto_exposure = !para.m_is_auto_exposure;
                    std::cout<<"exposure is "<<para.m_is_auto_exposure<<std::endl;
				}
				else if(key=='n'){//return to default
					para.m_expected_brightness = para.m_exposure_time = 0;
				}

                std::cout<<"Setting exposure parameters....SensorId="<<CAMERA_ID<<std::endl;
                para.m_camera_pair_index = CAMERA_ID;
				set_exposure_param(&para);
				key = 0;
			}
			else {// switch image direction
				err_code = stop_transfer();
				RETURN_IF_ERR(err_code);
				reset_config();

				if (key == 'q') break;
				if (key == 'w') CAMERA_ID = e_vbus1;
				if (key == 'd') CAMERA_ID = e_vbus2;
				if (key == 'x') CAMERA_ID = e_vbus3;
				if (key == 'a') CAMERA_ID = e_vbus4;	   
				if (key == 's') CAMERA_ID = e_vbus5;

				select_greyscale_image(CAMERA_ID, true);
				select_greyscale_image(CAMERA_ID, false);
				select_depth_image(CAMERA_ID);

				err_code = start_transfer();
				RETURN_IF_ERR(err_code);
				key = 0;
			}
			ros::spinOnce();
	}

	/* release data transfer */
	err_code = stop_transfer();
	RETURN_IF_ERR(err_code);
	//make sure the ack packet from GUIDANCE is received
	sleep(1);
	std::cout << "release_transfer" << std::endl;
	err_code = release_transfer();
	RETURN_IF_ERR(err_code);

    return 0;
}
예제 #3
0
파일: run.cpp 프로젝트: 39M/Matrice100
int main(int argc, const char** argv)
{
	if (argc>1)
		help();

	// Create the KCFTracker object with one of the available options
	// HOG FIXEDWINDOW MULTISCALE LAB
	KCFTracker tracker(false, true, true, false);
	
	VideoWriter	vWriter("result.avi", CV_FOURCC('M', 'J', 'P', 'G'), 25, Size(WIDTH, HEIGHT), false);
	Rect trackWindow;
	int hsize[] = { 16, 16 };
	float hranges[] = { 0, 255 };
	float dranges[] = { 0, 255 };

	const float* phranges[] = { hranges, dranges };

	selection = Rect(10, 10, 100, 100);
	trackObject = 0;

	namedWindow(winDemoName, CV_WINDOW_AUTOSIZE);
	setMouseCallback(winDemoName, onMouse, 0);

	bool paused = false;

	// Connect to Guidance and subscribe data
	reset_config();
	int err_code = init_transfer();
	RETURN_IF_ERR(err_code);

	err_code = select_greyscale_image(selected_vbus, true);
	RELEASE_IF_ERR(err_code);

	err_code = select_greyscale_image(selected_vbus, false);
	RELEASE_IF_ERR(err_code);

	err_code = select_depth_image(selected_vbus);
	RELEASE_IF_ERR(err_code);

	err_code = set_sdk_event_handler(my_callback);
	RELEASE_IF_ERR(err_code);

	err_code = start_transfer();
	RELEASE_IF_ERR(err_code);

	Mat depth(HEIGHT, WIDTH, CV_8UC1);

	for (int times = 0; times < 30000; ++times) {
		g_event.wait_event();

		// filter depth image
		filterSpeckles(g_depth, -16, 50, 20);
		// convert 16 bit depth image to 8 bit
		g_depth.convertTo(depth, CV_8UC1);

		imshow("depth", depth);
		waitKey(1);
		g_imleft.copyTo(image);
		im.clear();
		for (int i = 0; i < 3; i++)
			im.push_back(image);
		merge(im, image);

		Rect new_rect;
		if (!paused) {
			if (trackObject) {
				if (select_frame_count > 0) {
					new_rect = tracker.update(image);
					rectangle(image, new_rect, Scalar(255), 3);
				}
				else {
					tracker.init(selection, image);
					select_frame_count++;
				}

				trackObject = 1;
			}
			else if (trackObject < 0) {
				paused = false;
			}
		}

		if (selectObject && selection.width > 0 && selection.height > 0) {
			rectangle(image, selection, Scalar(255), 3);
		}

		imshow(winDemoName, image);
		vWriter << image;

		char c = (char)waitKey(10);
		if (c == 27 || c == 'q') {
			break;
		}
		switch (c) {
		case 'c':
			trackObject = 0;
			break;
		case 'p':
		case ' ':
			paused = !paused;
			break;
		default:
			;
		}
	}

	err_code = stop_transfer();
	RELEASE_IF_ERR(err_code);

	sleep(100000);

	err_code = release_transfer();
	RETURN_IF_ERR(err_code);

	return 0;
}