Exemplo n.º 1
0
/*!
  Acquire a color image from the active camera.

  \param I : Image data structure (RGBa image).

  \param timestamp : The acquisition timestamp.
*/
void vpFlyCaptureGrabber::acquire(vpImage<vpRGBa> &I, FlyCapture2::TimeStamp &timestamp)
{
  this->open();

  FlyCapture2::Error error;
  // Retrieve an image
  error = m_camera.RetrieveBuffer( &m_rawImage );
  if (error != FlyCapture2::PGRERROR_OK) {
    error.PrintErrorTrace();
    throw (vpException(vpException::fatalError,
                       "Cannot retrieve image for camera with guid 0x%lx",
                       m_guid) );
  }
  timestamp = m_rawImage.GetTimeStamp();

  // Create a converted image
  FlyCapture2::Image convertedImage;

  // Convert the raw image
  error = m_rawImage.Convert( FlyCapture2::PIXEL_FORMAT_RGBU, &convertedImage );
  if (error != FlyCapture2::PGRERROR_OK) {
    error.PrintErrorTrace();
    throw (vpException(vpException::fatalError,
                       "Cannot convert image for camera with guid 0x%lx",
                       m_guid) );
  }
  height = convertedImage.GetRows();
  width = convertedImage.GetCols();
  unsigned char *data = convertedImage.GetData();
  I.resize(height, width);
  unsigned int bps = convertedImage.GetBitsPerPixel();
  memcpy(I.bitmap, data, width*height*bps/8);
}
Exemplo n.º 2
0
/*!
   Connect the active camera.

   \sa disconnect()
 */
void vpFlyCaptureGrabber::connect()
{
  if (m_connected == false) {
    FlyCapture2::Error error;
    m_numCameras = this->getNumCameras();
    if (m_numCameras == 0) {
      throw (vpException(vpException::fatalError, "No camera found on the bus"));
    }

    FlyCapture2::BusManager busMgr;

    error = busMgr.GetCameraFromIndex(m_index, &m_guid);
    if (error != FlyCapture2::PGRERROR_OK) {
      error.PrintErrorTrace();
      throw (vpException(vpException::fatalError,
                         "Cannot retrieve guid of camera with index %u.",
                         m_index) );
    }
    // Connect to a camera
    error = m_camera.Connect(&m_guid);
    if (error != FlyCapture2::PGRERROR_OK) {
      error.PrintErrorTrace();
      throw (vpException(vpException::fatalError,
                         "Cannot connect to camera with guid 0x%lx", m_guid));
    }
    m_connected = true;
  }
  if (m_connected && m_capture)
    init = true;
  else
    init = false;
}
Exemplo n.º 3
0
/*!
  Power on/off the camera.

  \param on : true to power on the camera, false to power off the camera.

  The following example shows how to turn off a camera.
  \code
#include <visp3/flycapture/vpFlyCaptureGrabber.h>

int main()
{
#if defined(VISP_HAVE_FLYCAPTURE)
  vpFlyCaptureGrabber g;

  g.setCameraIndex(0);
  g.connect();

  bool power = g.getCameraPower();
  std::cout << "Camera is powered: " << ((power == true) ? "on" : "off") << std::endl;

  if (power)
    g.setCameraPower(false); // Power off the camera
#endif
}
  \endcode

  \sa getCameraPower()
 */
void vpFlyCaptureGrabber::setCameraPower(bool on)
{
  this->connect();

  if ( ! isCameraPowerAvailable() ) {
    throw (vpException(vpException::badValue,
                       "Cannot power on camera. Feature not available") );
  }

  // Power on the camera
  const unsigned int powerReg = 0x610;
  unsigned int powerRegVal = 0;

  powerRegVal = (on == true) ? 0x80000000 : 0x0;

  FlyCapture2::Error error;
  error  = m_camera.WriteRegister( powerReg, powerRegVal );
  if (error != FlyCapture2::PGRERROR_OK) {
    error.PrintErrorTrace();
    throw (vpException(vpException::fatalError, "Cannot power on the camera.") );
  }

  const unsigned int millisecondsToSleep = 100;
  unsigned int regVal = 0;
  unsigned int retries = 10;

  // Wait for camera to complete power-up
  do
  {
    vpTime::wait(millisecondsToSleep);
    error = m_camera.ReadRegister(powerReg, &regVal);
    if (error == FlyCapture2::PGRERROR_TIMEOUT) {
      // ignore timeout errors, camera may not be responding to
      // register reads during power-up
    }
    else if (error != FlyCapture2::PGRERROR_OK) {
      error.PrintErrorTrace();
      throw (vpException(vpException::fatalError, "Cannot power on the camera.") );
    }

    retries--;
  } while ((regVal & powerRegVal) == 0 && retries > 0);

  // Check for timeout errors after retrying
  if (error == FlyCapture2::PGRERROR_TIMEOUT) {
    error.PrintErrorTrace();
    throw (vpException(vpException::fatalError, "Cannot power on the camera. Timeout occur") );
  }
}
Exemplo n.º 4
0
/*!
  Set format7 video mode.
  \param format7_mode : Format 7 mode.
  \param pixel_format : Pixel format.
  \param width,height : Size of the centered roi. If set to 0, use the max allowed size.

  If the format7 video mode and pixel format are not supported, return an exception.

  The following example shows how to use this fonction to capture a 640x480 roi:

  \code
#include <iomanip>
#include <visp3/flycapture/vpFlyCaptureGrabber.h>

int main()
{
#if defined(VISP_HAVE_FLYCAPTURE)
  vpImage<unsigned char> I;

  vpFlyCaptureGrabber g;
  g.setCameraIndex(0);

  g.setFormat7VideoMode(FlyCapture2::MODE_0, FlyCapture2::PIXEL_FORMAT_MONO8, 640, 480);

  g.open(I);
  ...
#endif
}
  \endcode
 */
void vpFlyCaptureGrabber::setFormat7VideoMode(FlyCapture2::Mode format7_mode,
                                              FlyCapture2::PixelFormat pixel_format,
                                              int width, int height)
{
  this->connect();

  FlyCapture2::Format7Info fmt7_info;
  bool fmt7_supported;
  FlyCapture2::Error error;

  fmt7_info.mode = format7_mode;
  error = m_camera.GetFormat7Info(&fmt7_info, &fmt7_supported);
  if (error != FlyCapture2::PGRERROR_OK) {
    error.PrintErrorTrace();
    throw (vpException(vpException::fatalError, "Cannot get format7 info.") );
  }
  if (! fmt7_supported) {
    throw (vpException(vpException::fatalError, "Format7 mode %d not supported.", (int)format7_mode) );
  }

  FlyCapture2::Format7ImageSettings fmt7_settings;
  fmt7_settings.mode = format7_mode;
  fmt7_settings.pixelFormat = pixel_format;
  // Set centered roi
  std::pair<int, int> roi_w = this->centerRoi(width, fmt7_info.maxWidth, fmt7_info.imageHStepSize);
  std::pair<int, int> roi_h = this->centerRoi(height, fmt7_info.maxHeight, fmt7_info.imageVStepSize);
  fmt7_settings.width   = roi_w.first;
  fmt7_settings.offsetX = roi_w.second;
  fmt7_settings.height  = roi_h.first;
  fmt7_settings.offsetY = roi_h.second;

  // Validate the settings
  FlyCapture2::Format7PacketInfo fmt7_packet_info;
  bool valid = false;
  error = m_camera.ValidateFormat7Settings(&fmt7_settings, &valid, &fmt7_packet_info);
  if (error != FlyCapture2::PGRERROR_OK) {
    error.PrintErrorTrace();
    throw (vpException(vpException::fatalError, "Cannot validate format7 settings.") );
  }
  if (! valid) {
    throw (vpException(vpException::fatalError, "Format7 settings are not valid.") );
  }
  error = m_camera.SetFormat7Configuration(&fmt7_settings, fmt7_packet_info.recommendedBytesPerPacket);
  if (error != FlyCapture2::PGRERROR_OK) {
    error.PrintErrorTrace();
    throw (vpException(vpException::fatalError, "Cannot set format7 settings.") );
  }
}
Exemplo n.º 5
0
  static bool frameToImage (FlyCapture2::Image * frame, sensor_msgs::Image & image)
  {
    // Get the raw image dimensions
    FlyCapture2::PixelFormat pixFormat;
    uint32_t rows, cols, stride;

    FlyCapture2::Image convertedImage;
    FlyCapture2::Error error;

    error = frame->Convert(FlyCapture2::PIXEL_FORMAT_RGB, &convertedImage );

    if (error != FlyCapture2::PGRERROR_OK)
    {
        error.PrintErrorTrace();
        return false;
    }

    // Get the raw image dimensions
    convertedImage.GetDimensions( &rows, &cols, &stride, &pixFormat );

    return sensor_msgs::fillImage (image, sensor_msgs::image_encodings::RGB8,
                                   rows, cols, stride,
                                   convertedImage.GetData ());

  }
Exemplo n.º 6
0
// Print point grey errors.
void TrackerDataSource::ptgr_err( FlyCapture2::Error error ){
  if(error != FlyCapture2::PGRERROR_OK){
    error.PrintErrorTrace();
    std::cout << "Press Enter to continue" << std::endl;
    getchar();
  }
}
Exemplo n.º 7
0
void _pgrSafeCall(FlyCapture2::Error err, std::string fileName, int lineNum)
{
	if(err != FlyCapture2::PGRERROR_OK)
	{	
		std::cout << "File: " << fileName << " Line: " << lineNum << std::endl;
		err.PrintErrorTrace();
		std::cout << std::endl;
	}
}
Exemplo n.º 8
0
/*!
  Set video mode and framerate of the active camera.
  \param video_mode : Camera video mode.
  \param frame_rate : Camera frame rate.

  The following example shows how to use this function to set the camera image resolution to
  1280 x 960, pixel format to Y8 and capture framerate to 60 fps.
  \code
#include <visp3/core/vpImage.h>
#include <visp3/flycapture/vpFlyCaptureGrabber.h>

int main()
{
#if defined(VISP_HAVE_FLYCAPTURE)
  int nframes = 100;
  vpImage<unsigned char> I;
  vpFlyCaptureGrabber g;

  g.setCameraIndex(0); // Default camera is the first on the bus
  g.setVideoModeAndFrameRate(FlyCapture2::VIDEOMODE_1280x960Y8, FlyCapture2::FRAMERATE_60);
  g.open(I);
  g.getCameraInfo(std::cout);

  for(int i=0; i< nframes; i++) {
    g.acquire(I);
  }
#endif
}
  \endcode
 */
void vpFlyCaptureGrabber::setVideoModeAndFrameRate(FlyCapture2::VideoMode video_mode,
                                                   FlyCapture2::FrameRate frame_rate)
{
  this->connect();

  FlyCapture2::Error error;
  error = m_camera.SetVideoModeAndFrameRate(video_mode, frame_rate);
  if (error != FlyCapture2::PGRERROR_OK) {
    error.PrintErrorTrace();
    throw (vpException(vpException::fatalError, "Cannot set video mode and framerate.") );
  }
}
Exemplo n.º 9
0
int VideoHandler::start() {

  if (m_stopped) {
    if ( m_camera) {
      FlyCapture2::BusManager busMgr;
      FlyCapture2::Error error;
      FlyCapture2::PGRGuid guid;
      
      cout << m_camIdx << endl;
      error = busMgr.GetCameraFromIndex( m_camIdx, &guid );
      if (error != PGRERROR_OK)
      {
	error.PrintErrorTrace();
	return -1;
      }
      cout << "got camera from index" << endl;
      pVideoSaver = new VideoSaver();
      cout << "created VideoSaver" << endl;
      if (pVideoSaver->init(guid)!=0){
	cout << "Warning: Error in initializing the camera\n" ;
	return -1;
      }
      if (fname.empty()) {
	cout << "start capture thread" << endl;
	if (pVideoSaver->startCapture()!=0) {
	  cout << "Warning: Error in starting the capture thread the camera \n";
	  return -1;
	}
      }
      else {
	cout << "start capture and write threads" << endl;
	if (pVideoSaver->startCaptureAndWrite(fname,string("X264"))!=0) {
	  cout << "Warning: Error in starting the writing thread the camera \n";
	  return -1;
	}
      }
    }
      else  {
      pVideoCapture = new VideoCapture(fname);
      //pVideoCapture->set(cv::CAP_PROP_CONVERT_RGB,true); // output RGB
    }

    if (knnMethod)
      pBackgroundSubtractor =  cv::createBackgroundSubtractorKNN(250,400,false);
    else
      pBackgroundThresholder =  new BackgroundThresholder();

    m_stopped=false;
    startThreads();
  }
  return 0;
}
Exemplo n.º 10
0
/*!
  Return true if video mode and framerate is supported.
  */
bool vpFlyCaptureGrabber::isVideoModeAndFrameRateSupported(FlyCapture2::VideoMode video_mode,
                                                           FlyCapture2::FrameRate frame_rate)
{
  this->connect();

  FlyCapture2::Error error;
  bool supported = false;
  error = m_camera.GetVideoModeAndFrameRateInfo(video_mode, frame_rate, &supported);
  if (error != FlyCapture2::PGRERROR_OK) {
    error.PrintErrorTrace();
    throw (vpException(vpException::fatalError, "Cannot get video mode and framerate.") );
  }
  return supported;
}
Exemplo n.º 11
0
int VideoSaverFlyCapture::stopCamera() {

  if (isFlyCapture()) {
    if (m_Camera.IsConnected()) {    
      FlyCapture2::Error error = m_Camera.Disconnect();
      if ( error != FlyCapture2::PGRERROR_OK ) {
	error.PrintErrorTrace();
	return -1;
      }
    }
    return 0;
  } else {
    return VideoSaver::stopCamera();
  }
}
Exemplo n.º 12
0
/*!
  Return property values.
  \param prop_type : Property type.
 */
FlyCapture2::Property vpFlyCaptureGrabber::getProperty(FlyCapture2::PropertyType prop_type)
{
  this->connect();

  FlyCapture2::Property prop;
  prop.type = prop_type;
  FlyCapture2::Error error;
  error = m_camera.GetProperty( &prop );
  if (error != FlyCapture2::PGRERROR_OK) {
    error.PrintErrorTrace();
    throw (vpException(vpException::fatalError,
                       "Cannot get property %d value.", (int)prop_type));
  }
  return prop;
}
Exemplo n.º 13
0
/*!
  Return true if format7 mode is supported.
  */
bool vpFlyCaptureGrabber::isFormat7Supported(FlyCapture2::Mode format7_mode)
{
  this->connect();

  FlyCapture2::Format7Info fmt7_info;
  bool supported = false;
  FlyCapture2::Error error;

  fmt7_info.mode = format7_mode;
  error = m_camera.GetFormat7Info(&fmt7_info, &supported);
  if (error != FlyCapture2::PGRERROR_OK) {
    error.PrintErrorTrace();
    throw (vpException(vpException::fatalError, "Cannot get format7 info.") );
  }

  return supported;
}
Exemplo n.º 14
0
/*!
   Disconnect the active camera.

   \sa connect()
 */
void vpFlyCaptureGrabber::disconnect()
{
  if (m_connected == true) {

    FlyCapture2::Error error;
    error = m_camera.Disconnect();
    if (error != FlyCapture2::PGRERROR_OK) {
      error.PrintErrorTrace();
      throw (vpException(vpException::fatalError, "Cannot stop capture.") );
    }
    m_connected = false;
  }
  if (m_connected && m_capture)
    init = true;
  else
    init = false;
}
Exemplo n.º 15
0
/*!
  Return the serial id of a camera with \e index.
  \param index : Camera index.

  The following code shows how to retrieve the serial id of all the cameras that
  are connected on the bus.
  \code
#include <visp3/flycapture/vpFlyCaptureGrabber.h>

int main()
{
#if defined(VISP_HAVE_FLYCAPTURE)
  vpFlyCaptureGrabber g;
  unsigned int num_cameras = vpFlyCaptureGrabber::getNumCameras();
  for (unsigned int i=0; i<num_cameras; i++) {
    unsigned int serial_id = vpFlyCaptureGrabber::getCameraSerial(i);
    std::cout << "Camera with index " << i << " has serial id: " << serial_id << std::endl;
  }
#endif
}
  \endcode

  When two cameras are connected (PGR Flea3 in our case), we get the following:
  \code
Camera with index 0 has serial id: 15372913
Camera with index 1 has serial id: 15290004
  \endcode

  \sa setCameraSerial()
 */
unsigned int vpFlyCaptureGrabber::getCameraSerial(unsigned int index)
{
  unsigned int num_cameras = vpFlyCaptureGrabber::getNumCameras();
  if(index >= num_cameras) {
    throw (vpException(vpException::badValue,
                       "The camera with index %u is not present. Only %d cameras connected.",
                       index, num_cameras) );
  }
  unsigned int serial_id;
  FlyCapture2::BusManager busMgr;
  FlyCapture2::Error error;
  error = busMgr.GetCameraSerialNumberFromIndex(index, &serial_id);
  if (error != FlyCapture2::PGRERROR_OK) {
    error.PrintErrorTrace();
    throw (vpException(vpException::fatalError,
                       "Cannot get camera with index %d serial id.", index) );
  }
  return serial_id;
}
Exemplo n.º 16
0
/*!
   Start active camera capturing images.

   \sa stopCapture()
 */
void vpFlyCaptureGrabber::startCapture()
{
  this->connect();

  if (m_capture == false) {

    FlyCapture2::Error error;
    error = m_camera.StartCapture();
    if (error != FlyCapture2::PGRERROR_OK) {
      error.PrintErrorTrace();
      throw (vpException(vpException::fatalError,
                         "Cannot start capture for camera with guid 0x%lx", m_guid));
    }
    m_capture = true;
  }
  if (m_connected && m_capture)
    init = true;
  else
    init = false;
}
Exemplo n.º 17
0
/*!
  Print to the output stream active camera information (serial number, camera model,
  camera vendor, sensor, resolution, firmaware version, ...).
  */
std::ostream &vpFlyCaptureGrabber::getCameraInfo(std::ostream & os)
{
  this->connect();

  FlyCapture2::CameraInfo camInfo;
  FlyCapture2::Error error = m_camera.GetCameraInfo(&camInfo);
  if (error != FlyCapture2::PGRERROR_OK) {
    error.PrintErrorTrace();
  }

  os << "Camera information:   " << std::endl;
  os << " Serial number      : " << camInfo.serialNumber << std::endl;
  os << " Camera model       : " << camInfo.modelName << std::endl;
  os << " Camera vendor      : " << camInfo.vendorName << std::endl;
  os << " Sensor             : " << camInfo.sensorInfo << std::endl;
  os << " Resolution         : " << camInfo.sensorResolution << std::endl;
  os << " Firmware version   : " << camInfo.firmwareVersion << std::endl;
  os << " Firmware build time: " << camInfo.firmwareBuildTime << std::endl;
  return os;
}
Exemplo n.º 18
0
/*!
  Set camera property.

  \param prop_type : Property type.
  \param on : true to turn property on.
  \param auto_on : true to turn auto mode on, false to turn manual mode.
  \param value : value to set.
  \param prop_value : Switch to affect value to the corresponding variable.
 */
void vpFlyCaptureGrabber::setProperty(const FlyCapture2::PropertyType &prop_type,
                                      bool on, bool auto_on,
                                      float value, PropertyValue prop_value)
{
  this->connect();

  FlyCapture2::PropertyInfo propInfo;
  propInfo = this->getPropertyInfo(prop_type);

  if (propInfo.present) {
    FlyCapture2::Property prop;
    prop.type = prop_type;
    prop.onOff = on && propInfo.onOffSupported;
    prop.autoManualMode = auto_on && propInfo.autoSupported;
    prop.absControl = propInfo.absValSupported;
    switch(prop_value) {
    case ABS_VALUE: {
      float value_ = std::max<float>(std::min<float>(value, propInfo.absMax), propInfo.absMin);
      prop.absValue = value_;
      break;
    }
    case VALUE_A: {
      unsigned int value_ = std::max<unsigned int>(std::min<unsigned int>((unsigned int)value, propInfo.max), propInfo.min);
      prop.valueA = value_;
      break;
    }
    }

    FlyCapture2::Error error;
    error = m_camera.SetProperty(&prop);
    if (error != FlyCapture2::PGRERROR_OK) {
      error.PrintErrorTrace();
      throw (vpException(vpException::fatalError,
                         "Cannot set property %d.",
                         (int)prop_type) );
    }
  }
}
Exemplo n.º 19
0
void PrintError(FlyCapture2::Error error){
    error.PrintErrorTrace();
}
  static bool frameToImage (FlyCapture2::Image * frame, sensor_msgs::Image & image)
  {

    std::string encoding;

    // Get the raw image dimensions
    FlyCapture2::PixelFormat pixFormat;
    uint32_t rows, cols, stride;


    // NOTE: 16-bit and Yuv formats not supported
    static const char *BAYER_ENCODINGS[] = { "none", "bayer_rggb8",
      "bayer_grbg8", "bayer_gbrg8", "bayer_bggr8", "unknown"
    };
    //unsigned int bpp = frame->GetBitsPerPixel();
    FlyCapture2::BayerTileFormat bayerFmt;
    bayerFmt = frame->GetBayerTileFormat ();
    //ROS_INFO("bayer is %u", bayerFmt);
    frame->GetDimensions( &rows, &cols, &stride, &pixFormat );
    //ROS_INFO("Frame WxH = %d x %d", cols, rows); // FIXME: Dynamic resizing of camera is now working
    //ROS_INFO("Pixel Format (from Raw Image): 0x%08x", pixFormat);
    //ROS_INFO("Stride of Raw Image (The number of bytes between rows of the image): %d", stride);

    if (bayerFmt == FlyCapture2::NONE)
    {
      //ROS_INFO("Pixel Format (from Raw Image): 0x%08x", pixFormat);
      encoding = sensor_msgs::image_encodings::MONO8;
      return sensor_msgs::fillImage (image, encoding, rows,
                                     cols,
                                     stride,
                                     frame->GetData ());
    }
    else
    {
      // Create a converted image
      FlyCapture2::Image convertedImage;
      FlyCapture2::Error error;

      //encoding = BAYER_ENCODINGS[bayerFmt];
      switch ((FlyCapture2::PixelFormat) pixFormat) {
        case FlyCapture2::PIXEL_FORMAT_RAW8:
          // Convert the raw image
          error = frame->Convert(FlyCapture2::PIXEL_FORMAT_RGB, &convertedImage );
          encoding = sensor_msgs::image_encodings::RGB8; // Arbitrary encoding based on pixel format

          if (error != FlyCapture2::PGRERROR_OK)
          {
            error.PrintErrorTrace();
            return false;
          }
          // Get the raw image dimensions
          convertedImage.GetDimensions( &rows, &cols, &stride, &pixFormat );
          break;
        default:
          return false; // FIXME: Now, it's arbitrarily converting Raw images to RGB8 encoding
          // TODO: Handle encoding directly from Bayer
          break;
      } // switch
      //ROS_INFO("Stride of Converted Image (The number of bytes between rows of the image): %d", stride);

      return sensor_msgs::fillImage (image, encoding, rows,
                                     cols,
                                     stride,
                                     convertedImage.GetData ());
    } //else

  }
Exemplo n.º 21
0
int VideoSaverFlyCapture::init(FlyCapture2::PGRGuid camIdx)
{
  FlyCapture2::Error error;
  FlyCapture2::CameraInfo camInfo;
  
  // Connect the camera
  error = m_Camera.Connect( &camIdx );
  if ( error != FlyCapture2::PGRERROR_OK )
    {
      std::cout << "Failed to connect to camera" << std::endl;     
      return -1;
    }
    
  // Get the camera info and print it out
  error = m_Camera.GetCameraInfo( &camInfo );
  if ( error != FlyCapture2::PGRERROR_OK )
    {
      std::cout << "Failed to get camera info from camera" << std::endl;     
      return -1;
    }
  std::cout << camInfo.vendorName << " "
	    << camInfo.modelName << " " 
	    << camInfo.serialNumber << std::endl;
	
	
  //-----------------
  // get frame rate
  // Check if the camera supports the FRAME_RATE property
  FlyCapture2::PropertyInfo propInfo;
  propInfo.type = FlyCapture2::FRAME_RATE;
  error = m_Camera.GetPropertyInfo( &propInfo );
  if (error != FlyCapture2::PGRERROR_OK)
    {
      error.PrintErrorTrace();
      return -1;
    }

  m_FrameRateToUse = 15.0f;
  if ( propInfo.present == true )
    {
      // Get the frame rate
      FlyCapture2::Property prop;
      prop.type = FlyCapture2::FRAME_RATE;
      error = m_Camera.GetProperty( &prop );
      if (error != FlyCapture2::PGRERROR_OK)
        {
	  error.PrintErrorTrace();
	  return -1;
        }
      else
	{
	  // Set the frame rate.
	  // Note that the actual recording frame rate may be slower,
	  // depending on the bus speed and disk writing speed.
	  m_FrameRateToUse = prop.absValue;
	}
    }
  printf("Using frame rate of %3.1f\n", m_FrameRateToUse);

  //get the width and height
  FlyCapture2::Format7ImageSettings settings;
  unsigned int packetSize;
  float percentage;
  error = m_Camera.GetFormat7Configuration( &settings,&packetSize,&percentage );
  if ( error != FlyCapture2::PGRERROR_OK ) {
    error.PrintErrorTrace();
    return -1;
  }
  m_FrameSize =  cv::Size(settings.width,settings.height);

  settings.pixelFormat = FlyCapture2::PIXEL_FORMAT_RAW8;
  bool valid;
  FlyCapture2::Format7PacketInfo pinfo;
  error = m_Camera.ValidateFormat7Settings( &settings,&valid,&pinfo);
  if ( error != FlyCapture2::PGRERROR_OK ) {
    error.PrintErrorTrace();
    return -1;
  }

  if (!valid) {
    std::cout  << "Could not validate Format 7."  << std::endl;
    return -1;
  }

  error = m_Camera.SetFormat7Configuration( &settings,pinfo.recommendedBytesPerPacket);
  if ( error != FlyCapture2::PGRERROR_OK ) {
    error.PrintErrorTrace();
    return -1;
  }


  // set time stamping on
  FlyCapture2::EmbeddedImageInfo info;

  // Get configuration    
  error = m_Camera.GetEmbeddedImageInfo( &info );
  if ( error != FlyCapture2::PGRERROR_OK ) 
    {
      error.PrintErrorTrace();
      return -1;
    }

  info.timestamp.onOff = true;

  // Set configuration
  error = m_Camera.SetEmbeddedImageInfo( &info );
  if ( error != FlyCapture2::PGRERROR_OK ) 
    {
      error.PrintErrorTrace();
      return -1;
    }


  m_isFlyCapture = true;
  m_isRGB = true;
  return 0;
}
Exemplo n.º 22
0
int main()
{
    FlyCapture2::Error error;
    FlyCapture2::PGRGuid guid;
    FlyCapture2::BusManager busMgr;
    FlyCapture2::Camera cam;
    FlyCapture2::VideoMode vm;
    FlyCapture2::FrameRate fr;
    FlyCapture2::Image rawImage;

    //Initializing camera
        error = busMgr.GetCameraFromIndex(0, &guid);
    if (error != FlyCapture2::PGRERROR_OK)
    {
        error.PrintErrorTrace();
        return CAM_FAILURE;
    }

          vm = FlyCapture2::VIDEOMODE_640x480Y8;
    fr = FlyCapture2::FRAMERATE_60;

    error = cam.Connect(&guid);
    if (error != FlyCapture2::PGRERROR_OK)
    {
        error.PrintErrorTrace();
        return CAM_FAILURE;
    }

    cam.SetVideoModeAndFrameRate(vm, fr);
    //Starting the capture
    error = cam.StartCapture();
    if (error != FlyCapture2::PGRERROR_OK)
    {
        error.PrintErrorTrace();
               return CAM_FAILURE;
    }

        //Image Variables
        Mat frame (Size(640,480),CV_8UC1);
        Mat img_bgr (Size(640,480),CV_8UC3);
        Mat img_hsv (Size(640,480),CV_8UC3);                        //Image in HSV color space
        Mat threshy (Size(640,480),CV_8UC1);                                //Threshed Image
        // Mat labelImg (Size(640,480),CV_8UC1);        //Image Variable for blobs
        IplImage* histogram= cvCreateImage(cvSize(640,480),8,3);                        //Image histograms
        Mat histograms (Size(640,480),CV_8UC1);         //greyscale image for histogram
        Mat empty (Size(640,480),CV_8UC3);        

        

        // CvBlobs blobs;
        
        while(1)
        {
                cam.RetrieveBuffer(&rawImage);
                memcpy(frame.data, rawImage.GetData(), rawImage.GetDataSize());
                // histogram = empty.clone();
                cvZero(histogram);
                // cvAddS(frame, cvScalar(70,70,70), frame);
                cvtColor(frame,img_bgr,CV_BayerBG2BGR);
                // cout<<"\n1";
                cvtColor(img_bgr,img_hsv,CV_BGR2HSV);        
                // cout<<"\n2";                                        
                //Thresholding the frame for yellow
                inRange(img_hsv, Scalar(20, 100, 20), Scalar(70, 255, 255), threshy);                                        
                // cvInRangeS(img_hsv, cvScalar(0, 120, 100), cvScalar(255, 255, 255), threshy);
                //Filtering the frame - subsampling??
                // smooth(threshy,threshy,CV_MEDIAN,7,7);

                //Finding the blobs
                // unsigned int result=cvLabel(threshy,labelImg,blobs);
                //Filtering the blobs
                // cvFilterByArea(blobs,500,1000);
                //Rendering the blobs
                // cvRenderBlobs(labelImg,blobs,img_bgr,img_bgr);

                CvPoint prev = cvPoint(0,0);

                for(int x=0;x<640;++x)
                {
                        Mat col = threshy.col(x);
                        int y = 480 - countNonZero(col);
                        for(int i=0 ; i<480 ; ++i)
                             histograms.at<uchar>(x,i) = y;
                        CvPoint next = cvPoint(x,y);
                        cvLine(histogram,prev,next,cColor);
                        // cvCircle(histogram,next,2,cColor,3);
                        prev=next;
                }

                //Showing the images
                imshow("Live",img_bgr);
                imshow("Threshed",threshy);
                cvShowImage("Histogram",histogram);

                int c= cvWaitKey(10);

                if(c == 27)
                        break;
        }

        //Cleanup
        // cvReleaseImage(&frame);
        // cvReleaseImage(&threshy);
        // cvReleaseImage(&img_hsv);
        // cvReleaseImage(&labelImg);
        // cvReleaseImage(&histogram);
        cvDestroyAllWindows();
        return 0;
}
Exemplo n.º 23
0
int main()
{
    FlyCapture2::Error error;
    FlyCapture2::PGRGuid guid;
    FlyCapture2::BusManager busMgr;
    FlyCapture2::Camera cam;
    FlyCapture2::VideoMode vm;
    FlyCapture2::FrameRate fr;
    FlyCapture2::Image rawImage;

    //Initializing camera
        error = busMgr.GetCameraFromIndex(0, &guid);
    if (error != FlyCapture2::PGRERROR_OK)
    {
        error.PrintErrorTrace();
        return CAM_FAILURE;
    }

          vm = FlyCapture2::VIDEOMODE_640x480Y8;
    fr = FlyCapture2::FRAMERATE_60;

    error = cam.Connect(&guid);
    if (error != FlyCapture2::PGRERROR_OK)
    {
        error.PrintErrorTrace();
        return CAM_FAILURE;
    }

    cam.SetVideoModeAndFrameRate(vm, fr);
    //Starting the capture
    error = cam.StartCapture();
    if (error != FlyCapture2::PGRERROR_OK)
    {
        error.PrintErrorTrace();
               return CAM_FAILURE;
    }

    while(1)
    {
        Mat frame (Size(640,480),CV_8UC1);
        // Mat img_scene (Size(640,480),CV_8UC3);
        Mat img_object = imread( "bitslogo.png", CV_LOAD_IMAGE_GRAYSCALE );
        cam.RetrieveBuffer(&rawImage);
        memcpy(frame.data, rawImage.GetData(), rawImage.GetDataSize());
        cvtColor(frame,frame,CV_BayerBG2RGB);
        imwrite("temp.bmp",frame);
        // cvtColor(img_scene,img_scene,CV_RGB2GRAY);
        Mat img_scene = imread( "temp.bmp", CV_LOAD_IMAGE_GRAYSCALE );

        if( !img_object.data || !img_scene.data )
        { std::cout<< " --(!) Error reading images " << std::endl; return -1; }

        //-- Step 1: Detect the keypoints using SURF Detector
        int minHessian = 400;

        SurfFeatureDetector detector( minHessian );

        std::vector<KeyPoint> keypoints_object, keypoints_scene;

        detector.detect( img_object, keypoints_object );
        detector.detect( img_scene, keypoints_scene );

        //-- Step 2: Calculate descriptors (feature vectors)
        SurfDescriptorExtractor extractor;

        Mat descriptors_object, descriptors_scene;

        extractor.compute( img_object, keypoints_object, descriptors_object );
        extractor.compute( img_scene, keypoints_scene, descriptors_scene );

        //-- Step 3: Matching descriptor vectors using FLANN matcher
        FlannBasedMatcher matcher;
        std::vector< DMatch > matches;
        matcher.match( descriptors_object, descriptors_scene, matches );

        double max_dist = 0; double min_dist = 100;

        //-- Quick calculation of max and min distances between keypoints
        for( int i = 0; i < descriptors_object.rows; i++ )
        { double dist = matches[i].distance;
          if( dist < min_dist ) min_dist = dist;
          if( dist > max_dist ) max_dist = dist;
        }

        printf("-- Max dist : %f \n", max_dist );
        printf("-- Min dist : %f \n", min_dist );

        //-- Draw only "good" matches (i.e. whose distance is less than 3*min_dist )
        std::vector< DMatch > good_matches;

        for( int i = 0; i < descriptors_object.rows; i++ )
        { if( matches[i].distance < 3*min_dist )
           { good_matches.push_back( matches[i]); }
        }

        Mat img_matches;
        drawMatches( img_object, keypoints_object, img_scene, keypoints_scene,
                     good_matches, img_matches, Scalar::all(-1), Scalar::all(-1),
                     vector<char>(), DrawMatchesFlags::NOT_DRAW_SINGLE_POINTS );

        //-- Localize the object
        std::vector<Point2f> obj;
        std::vector<Point2f> scene;

        for( int i = 0; i < good_matches.size(); i++ )
        {
          //-- Get the keypoints from the good matches
          obj.push_back( keypoints_object[ good_matches[i].queryIdx ].pt );
          scene.push_back( keypoints_scene[ good_matches[i].trainIdx ].pt );
        }

        Mat H = findHomography( obj, scene, CV_RANSAC );

        //-- Get the corners from the image_1 ( the object to be "detected" )
        std::vector<Point2f> obj_corners(4);
        obj_corners[0] = cvPoint(0,0); obj_corners[1] = cvPoint( img_object.cols, 0 );
        obj_corners[2] = cvPoint( img_object.cols, img_object.rows ); obj_corners[3] = cvPoint( 0, img_object.rows );
        std::vector<Point2f> scene_corners(4);

        perspectiveTransform( obj_corners, scene_corners, H);

        //-- Draw lines between the corners (the mapped object in the scene - image_2 )
        line( img_matches, scene_corners[0] + Point2f( img_object.cols, 0), scene_corners[1] + Point2f( img_object.cols, 0), Scalar(0, 255, 0), 4 );
        line( img_matches, scene_corners[1] + Point2f( img_object.cols, 0), scene_corners[2] + Point2f( img_object.cols, 0), Scalar( 0, 255, 0), 4 );
        line( img_matches, scene_corners[2] + Point2f( img_object.cols, 0), scene_corners[3] + Point2f( img_object.cols, 0), Scalar( 0, 255, 0), 4 );
        line( img_matches, scene_corners[3] + Point2f( img_object.cols, 0), scene_corners[0] + Point2f( img_object.cols, 0), Scalar( 0, 255, 0), 4 );

        //-- Show detected matches
        imshow( "Good Matches & Object detection", img_matches );
        int c = cvWaitKey(100);
        if(c == 27)
            break;
    }

    return 0;
}
Exemplo n.º 24
0
void VideoSaverFlyCapture::captureThread()
{
  if (!isFlyCapture()) {
    VideoSaver::captureThread();
  } else {

  
    m_GrabbingFinished = false;
    m_KeepThreadAlive = true;
    m_frameNumber = 0;
    m_newFrameAvailable = false;
    FlyCapture2::Image rgbImage;
    FlyCapture2::Image rawImage;
    FlyCapture2::Image rawImage2;

    m_timer= std::clock();
  
    while (m_KeepThreadAlive) 
    {

      FlyCapture2::Error error = m_Camera.RetrieveBuffer( &rawImage );
      if ( error != FlyCapture2::PGRERROR_OK )
      {
	error.PrintErrorTrace();
      }
      
      //get the time stamp
      const double localtimestamp = M_TIMER_ELAPSED;

      // convert to bgr
      rawImage2.DeepCopy(&rawImage); // not sure if really needed since we convert below...
      rawImage2.Convert(FlyCapture2::PIXEL_FORMAT_RGB, &rgbImage );

      // convert to Mat
      unsigned int rowBytes = (double) rgbImage.GetReceivedDataSize()/(double)rgbImage.GetRows();       

      // copy to frame variable and update times
      { std::unique_lock<std::mutex> lock(VideoSaver::m_FrameMutex); 

	m_Frame.release();
	m_Frame = cv::Mat(rgbImage.GetRows(), rgbImage.GetCols(), CV_8UC3, rgbImage.GetData(),rowBytes);
	// could this happen ?
	if (m_Frame.size().width==0) 
	  m_Frame = cv::Mat::zeros(m_FrameSize,CV_8UC3);

	m_TimeStamp =  localtimestamp;
	m_frameNumber++;
	m_newFrameAvailable = true;
	VideoSaver::m_newFrameAvailableCond.notify_one();
      }


    } 
    rawImage.ReleaseBuffer();
    rawImage2.ReleaseBuffer();
    rgbImage.ReleaseBuffer();

    m_newFrameAvailableCond.notify_one();
    std::cout << "Finished grabbing." << std::endl;    
    m_GrabbingFinished  = true;

  }
}