void CCflycap_grab_next_frame_blocking_with_stride( CCflycap *ccntxt, unsigned char *out_bytes, intptr_t stride0, float timeout ) { CHECK_CC(ccntxt); FlyCapture2::Camera *cam = (FlyCapture2::Camera *)ccntxt->inherited.cam; cam_iface_backend_extras* backend_extras = (cam_iface_backend_extras*)(ccntxt->inherited.backend_extras); // The main frame grabbing code goes here. FlyCapture2::Image rawImage; CIPGRCHK(cam->RetrieveBuffer( &rawImage )); if (stride0 < (intptr_t)rawImage.GetStride()) { CAM_IFACE_THROW_ERROR("stride too small for image"); } if (stride0==(intptr_t)rawImage.GetStride()) { // same stride memcpy((void*)out_bytes, /*dest*/ (const void*)rawImage.GetData(),/*src*/ rawImage.GetDataSize());/*src*/ } else { // different strides for (int row=0; row<(int)rawImage.GetRows(); row++) { memcpy((void*)(out_bytes+row*stride0), /*dest*/ (const void*)(rawImage.GetData() + row*rawImage.GetStride()),/*src*/ rawImage.GetStride());/*size*/ } } }
CameraFrame CameraPointGrey::getFrame(){ FlyCapture2::Error error; if(triggerMode == triggerModeSoftware){ // Fire software trigger // broadcasting not supported on some platforms cam.FireSoftwareTrigger(false); } // Retrieve the image FlyCapture2::Image rawImage; error = cam.RetrieveBuffer(&rawImage); if (error != FlyCapture2::PGRERROR_OK) PrintError(error); CameraFrame frame; frame.timeStamp = rawImage.GetTimeStamp().cycleCount; frame.height = rawImage.GetRows(); frame.width = rawImage.GetCols(); frame.memory = rawImage.GetData(); return frame; }
/*! Acquire a color image from the active camera. \param I : Image data structure (RGBa image). \param timestamp : The acquisition timestamp. */ void vpFlyCaptureGrabber::acquire(vpImage<vpRGBa> &I, FlyCapture2::TimeStamp ×tamp) { this->open(); FlyCapture2::Error error; // Retrieve an image error = m_camera.RetrieveBuffer( &m_rawImage ); if (error != FlyCapture2::PGRERROR_OK) { error.PrintErrorTrace(); throw (vpException(vpException::fatalError, "Cannot retrieve image for camera with guid 0x%lx", m_guid) ); } timestamp = m_rawImage.GetTimeStamp(); // Create a converted image FlyCapture2::Image convertedImage; // Convert the raw image error = m_rawImage.Convert( FlyCapture2::PIXEL_FORMAT_RGBU, &convertedImage ); if (error != FlyCapture2::PGRERROR_OK) { error.PrintErrorTrace(); throw (vpException(vpException::fatalError, "Cannot convert image for camera with guid 0x%lx", m_guid) ); } height = convertedImage.GetRows(); width = convertedImage.GetCols(); unsigned char *data = convertedImage.GetData(); I.resize(height, width); unsigned int bps = convertedImage.GetBitsPerPixel(); memcpy(I.bitmap, data, width*height*bps/8); }
void CCflycap_CCflycap( CCflycap * ccntxt, int device_number, int NumImageBuffers, int mode_number) { // call parent CamContext_CamContext((CamContext*)ccntxt,device_number,NumImageBuffers,mode_number); // XXX cast error? ccntxt->inherited.vmt = (CamContext_functable*)&CCflycap_vmt; CAM_IFACE_CHECK_DEVICE_NUMBER(device_number); std::vector<CamMode> result; int myerr = get_mode_list(device_number, result); ccntxt->inherited.device_number = device_number; ccntxt->inherited.backend_extras = new cam_iface_backend_extras; memset(ccntxt->inherited.backend_extras,0,sizeof(cam_iface_backend_extras)); FlyCapture2::Camera *cam = new FlyCapture2::Camera; FlyCapture2::PGRGuid guid; CIPGRCHK( BACKEND_GLOBAL(busMgr_ptr)->GetCameraFromIndex(device_number, &guid)); CIPGRCHK(cam->Connect(&guid)); FlyCapture2::FC2Config cfg; CIPGRCHK(cam->GetConfiguration(&cfg)); cfg.numBuffers = NumImageBuffers; CIPGRCHK(cam->SetConfiguration(&cfg)); // Set the settings to the camera CamMode target_mode = result[mode_number]; if (target_mode.videomode == FlyCapture2::VIDEOMODE_FORMAT7) { CIPGRCHK(cam->SetFormat7Configuration(&target_mode.fmt7ImageSettings, target_mode.fmt7PacketInfo.recommendedBytesPerPacket )); } else { CIPGRCHK(cam->SetVideoModeAndFrameRate(target_mode.videomode, target_mode.framerate)); } ccntxt->inherited.cam = (void*)cam; // XXX move this to start camera and query camera for settings CIPGRCHK(cam->StartCapture()); // Retrieve an image to get width, height. XXX change to query later. FlyCapture2::Image rawImage; CIPGRCHK( cam->RetrieveBuffer( &rawImage )); cam_iface_backend_extras *extras = (cam_iface_backend_extras *)ccntxt->inherited.backend_extras; ccntxt->inherited.depth = rawImage.GetBitsPerPixel(); extras->buf_size = rawImage.GetDataSize(); extras->current_height = rawImage.GetRows(); extras->current_width = rawImage.GetCols(); extras->max_height = rawImage.GetRows(); extras->max_width = rawImage.GetCols(); CIPGRCHK( cam->GetTriggerModeInfo( &extras->trigger_mode_info )); switch (rawImage.GetPixelFormat()) { case FlyCapture2::PIXEL_FORMAT_MONO8: ccntxt->inherited.coding = CAM_IFACE_MONO8; if (rawImage.GetBayerTileFormat()!=FlyCapture2::NONE) { NOT_IMPLEMENTED; } break; case FlyCapture2::PIXEL_FORMAT_RAW8: switch (rawImage.GetBayerTileFormat()) { case FlyCapture2::NONE: ccntxt->inherited.coding = CAM_IFACE_RAW8; break; case FlyCapture2::RGGB: ccntxt->inherited.coding = CAM_IFACE_MONO8_BAYER_RGGB; break; case FlyCapture2::GRBG: ccntxt->inherited.coding = CAM_IFACE_MONO8_BAYER_GRBG; break; case FlyCapture2::GBRG: ccntxt->inherited.coding = CAM_IFACE_MONO8_BAYER_GBRG; break; case FlyCapture2::BGGR: ccntxt->inherited.coding = CAM_IFACE_MONO8_BAYER_BGGR; break; default: NOT_IMPLEMENTED; } } CIPGRCHK(cam->StopCapture()); }
/*! @brief open camera @param[out] w image width @param[out] h image height @param[out] c num of channels @retval successed or not */ bool CCam::Open( int &w, int &h, int &c ) { #ifdef USEPGR if (!mPGRCap.IsConnected()){ FlyCapture2::Error error; // connect to a camera error = mPGRCap.Connect(0); if (error != FlyCapture2::PGRERROR_OK){ return false; } FlyCapture2::CameraInfo camInfo; error = mPGRCap.GetCameraInfo(&camInfo); if (error != FlyCapture2::PGRERROR_OK) { return false; } // start capturing error = mPGRCap.StartCapture(); if (error != FlyCapture2::PGRERROR_OK){ return false; } // get image FlyCapture2::Image raw; error = mPGRCap.RetrieveBuffer(&raw); if (error != FlyCapture2::PGRERROR_OK){ return false; } // VGA w = mWidth = raw.GetCols() / 2; // image width h= mHeight = raw.GetRows() / 2; // image height c= mChannel = 3; // color camera mSize = mWidth*mHeight*mChannel; mImg = cv::Mat(cv::Size(mWidth, mHeight), CV_8UC3); return true; } else{ return false; } #else if (!mCap.isOpened()){ if (!mCap.open(mDeviceID)){ // if opening failed printf("Cam ID %d not found\n", mDeviceID); return false; } // VGA mCap.set(cv::CAP_PROP_FRAME_WIDTH, 640); mCap.set(cv::CAP_PROP_FRAME_HEIGHT, 480); // check image int count = 0; while (mImg.data == NULL){ mCap >> mImg; ++count; if (count > 10){ // if retrieval failed printf("Cannot retrieve images\n"); return false; } } mWidth = w = mImg.cols; mHeight = h = mImg.rows; mChannel = c = mImg.channels(); mSize = mHeight*(int)mImg.step; return true; } else{ return false;
void VideoSaverFlyCapture::captureThread() { if (!isFlyCapture()) { VideoSaver::captureThread(); } else { m_GrabbingFinished = false; m_KeepThreadAlive = true; m_frameNumber = 0; m_newFrameAvailable = false; FlyCapture2::Image rgbImage; FlyCapture2::Image rawImage; FlyCapture2::Image rawImage2; m_timer= std::clock(); while (m_KeepThreadAlive) { FlyCapture2::Error error = m_Camera.RetrieveBuffer( &rawImage ); if ( error != FlyCapture2::PGRERROR_OK ) { error.PrintErrorTrace(); } //get the time stamp const double localtimestamp = M_TIMER_ELAPSED; // convert to bgr rawImage2.DeepCopy(&rawImage); // not sure if really needed since we convert below... rawImage2.Convert(FlyCapture2::PIXEL_FORMAT_RGB, &rgbImage ); // convert to Mat unsigned int rowBytes = (double) rgbImage.GetReceivedDataSize()/(double)rgbImage.GetRows(); // copy to frame variable and update times { std::unique_lock<std::mutex> lock(VideoSaver::m_FrameMutex); m_Frame.release(); m_Frame = cv::Mat(rgbImage.GetRows(), rgbImage.GetCols(), CV_8UC3, rgbImage.GetData(),rowBytes); // could this happen ? if (m_Frame.size().width==0) m_Frame = cv::Mat::zeros(m_FrameSize,CV_8UC3); m_TimeStamp = localtimestamp; m_frameNumber++; m_newFrameAvailable = true; VideoSaver::m_newFrameAvailableCond.notify_one(); } } rawImage.ReleaseBuffer(); rawImage2.ReleaseBuffer(); rgbImage.ReleaseBuffer(); m_newFrameAvailableCond.notify_one(); std::cout << "Finished grabbing." << std::endl; m_GrabbingFinished = true; } }