Beispiel #1
0
// Copy basic properties between VideoStream
void CopyGeneralProperties( const VideoStream& rSource, VideoStream& rTarget )
{
	rTarget.setVideoMode( rSource.getVideoMode() );

	// assign basic properties
	rTarget.setProperty( ONI_STREAM_PROPERTY_VERTICAL_FOV,		rSource.getVerticalFieldOfView() );
	rTarget.setProperty( ONI_STREAM_PROPERTY_HORIZONTAL_FOV,	rSource.getHorizontalFieldOfView() );
	rTarget.setProperty( ONI_STREAM_PROPERTY_MIRRORING,			rSource.getMirroringEnabled() );

	// assign dpeth only properties
	rTarget.setProperty( ONI_STREAM_PROPERTY_MIN_VALUE,			rSource.getMinPixelValue() );
	rTarget.setProperty( ONI_STREAM_PROPERTY_MAX_VALUE,			rSource.getMaxPixelValue() );
}
void* camera_thread(void*){
  VideoFrameRef frame;
  VideoFrameRef rgbframe;
  int changed_stream;
  VideoStream* pStream;
  VideoStream* pRgbStream;
  sensor_msgs::CameraInfo rgb_info;
  sensor_msgs::CameraInfo depth_info;
  sensor_msgs::Image image;
  image.header.frame_id=frame_id;
  image.is_bigendian=0;

  int count = 0;
  int num_frames_stat=64;
  struct timeval previous_time;
  gettimeofday(&previous_time, 0);
  uint64_t last_stamp = 0;
  while(run){
    bool new_frame = false;
    uint64_t current_stamp = 0;
    openni::OpenNI::waitForAnyStream(streams, 2, &changed_stream);
    switch (changed_stream)
      {
	//DEPTH
      case 0:
	depth.readFrame(&frame);
	depth_info.header.stamp=ros::Time::now();
	image.header.stamp=ros::Time::now();
	if(!frame.isValid()) break;
	current_stamp=frame.getTimestamp();

	if (current_stamp-last_stamp>5000){
	  count++;
	  new_frame = true;
	}
	last_stamp = current_stamp;

	if (! (count % (_frame_skip)) ){
	  image.header.seq = count;
	  if (! _registration){
	    image.header.frame_id=frame_id+"_depth";
	  } else 
	    image.header.frame_id=frame_id+"_rgb";

	  depth_info.width=frame.getWidth();
	  depth_info.height=frame.getHeight();

	  depth_info.header.seq = count;

    depth_info.distortion_model = sensor_msgs::distortion_models::PLUMB_BOB;
    depth_info.D.resize(5, 0.0);

	  depth_info.K[0]=depth_info.width/(2*tan(depth.getHorizontalFieldOfView()/2)); //fx
	  depth_info.K[4]=depth_info.height/(2*tan(depth.getVerticalFieldOfView()/2));; //fy
	  depth_info.K[2]=depth_info.width/2; //cx
	  depth_info.K[5]=depth_info.height/2; //cy
	  depth_info.K[8]=1;

	  depth_info.header.frame_id=image.header.frame_id;
	  image.height=frame.getHeight();
	  image.width=frame.getWidth();
	  image.encoding="mono16";
	  image.step=frame.getWidth()*2;
	  image.data.resize(image.step*image.height);
	  memcpy((char*)(&image.data[0]),frame.getData(),image.height*image.width*2);
	  pub_depth.publish(image);
	  pub_camera_info_depth.publish(depth_info);
	}
	break;
	//RGB
      case 1:
	rgb.readFrame(&rgbframe);
	image.header.stamp=ros::Time::now();
	rgb_info.header.stamp=ros::Time::now();
	if(!rgbframe.isValid()) break;
	current_stamp=rgbframe.getTimestamp();

	if (current_stamp-last_stamp>5000){
	  count++;
	  new_frame = true;
	}
	last_stamp = current_stamp;

	if (_gain>=0) {
	  if (count > 64 && gain_status==0){
	    rgb.getCameraSettings()->setAutoExposureEnabled(false);
	    rgb.getCameraSettings()->setAutoWhiteBalanceEnabled(false);
	    rgb.getCameraSettings()->setExposure(1);
	    rgb.getCameraSettings()->setGain(100);
	    gain_status=1;
	  } else if (count > 128 && gain_status==1){
	    rgb.getCameraSettings()->setExposure(_exposure);
	    rgb.getCameraSettings()->setGain(_gain);
	    gain_status=2;
	  }
	}

	if (! (count%(_frame_skip)) ){
	  rgb_info.header.frame_id=frame_id+"_rgb";
	  rgb_info.width=rgbframe.getWidth();
	  rgb_info.height=rgbframe.getHeight();
	  rgb_info.header.seq = count;
	  rgb_info.K[0]=rgb_info.width/(2*tan(rgb.getHorizontalFieldOfView()/2)); //fx
	  rgb_info.K[4]=rgb_info.height/(2*tan(rgb.getVerticalFieldOfView()/2));; //fy
	  rgb_info.K[2]=rgb_info.width/2; //cx
	  rgb_info.K[5]=rgb_info.height/2; //cy
	  rgb_info.K[8]=1;
	  
	  image.header.seq = count;
	  image.header.frame_id=frame_id+"_rgb";
	  image.height=rgbframe.getHeight();
	  image.width=rgbframe.getWidth();
	  image.encoding="rgb8";
	  image.step=rgbframe.getWidth()*3;
	  image.data.resize(image.step*image.height);
	  memcpy((char*)(&image.data[0]),rgbframe.getData(),image.height*image.width*3);
	  pub_rgb.publish(image);
	  pub_camera_info_rgb.publish(rgb_info);
	}
	break;
      default:
	printf("Error in wait\n");
      }


    if (!(count%num_frames_stat) && new_frame){
      struct timeval current_time, interval;
      gettimeofday(&current_time, 0);
      timersub(&current_time, &previous_time, &interval);
      previous_time = current_time;
      double fps = num_frames_stat/(interval.tv_sec +1e-6*interval.tv_usec);
      printf("running at %lf fps\n", fps);
      fflush(stdout);
    }
  }
}