void CCflycap_grab_next_frame_blocking_with_stride( CCflycap *ccntxt, unsigned char *out_bytes, intptr_t stride0, float timeout ) { CHECK_CC(ccntxt); FlyCapture2::Camera *cam = (FlyCapture2::Camera *)ccntxt->inherited.cam; cam_iface_backend_extras* backend_extras = (cam_iface_backend_extras*)(ccntxt->inherited.backend_extras); // The main frame grabbing code goes here. FlyCapture2::Image rawImage; CIPGRCHK(cam->RetrieveBuffer( &rawImage )); if (stride0 < (intptr_t)rawImage.GetStride()) { CAM_IFACE_THROW_ERROR("stride too small for image"); } if (stride0==(intptr_t)rawImage.GetStride()) { // same stride memcpy((void*)out_bytes, /*dest*/ (const void*)rawImage.GetData(),/*src*/ rawImage.GetDataSize());/*src*/ } else { // different strides for (int row=0; row<(int)rawImage.GetRows(); row++) { memcpy((void*)(out_bytes+row*stride0), /*dest*/ (const void*)(rawImage.GetData() + row*rawImage.GetStride()),/*src*/ rawImage.GetStride());/*size*/ } } }
void CCflycap_get_num_framebuffers( CCflycap *ccntxt, int *num_framebuffers ) { CHECK_CC(ccntxt); FlyCapture2::Camera *cam = (FlyCapture2::Camera *)ccntxt->inherited.cam; FlyCapture2::FC2Config cfg; CIPGRCHK(cam->GetConfiguration(&cfg)); *num_framebuffers = cfg.numBuffers; }
void RawMultiTestApp::setup() { setFrameRate(60); setWindowSize(1280, 480); PGRGuid guid; ////////////////// // setup camera 1 mErr = mBusManager.GetCameraFromIndex(0, &guid); if (mErr != PGRERROR_OK) { showError(); quit(); } mCamera1 = new FlyCapture2::Camera(); mErr = mCamera1->Connect(&guid); if (mErr != PGRERROR_OK) { showError(); mCamera1 = 0; quit(); } mCam1Data.targetSurface = Surface8u(640, 480, false); mErr = mCamera1->StartCapture(onImageGrabbed, &mCam1Data); if (mErr != PGRERROR_OK) { showError(); quit(); } ////////////////// // setup camera 2 mErr = mBusManager.GetCameraFromIndex(1, &guid); if (mErr != PGRERROR_OK) { showError(); quit(); } mCamera2 = new FlyCapture2::Camera(); mErr = mCamera2->Connect(&guid); if (mErr != PGRERROR_OK) { showError(); mCamera2 = 0; quit(); } mCam2Data.targetSurface = Surface8u(640, 480, false); mErr = mCamera2->StartCapture(onImageGrabbed, &mCam2Data); if (mErr != PGRERROR_OK) { showError(); quit(); } }
void CCflycap_set_num_framebuffers( CCflycap *ccntxt, int num_framebuffers ) { CHECK_CC(ccntxt); FlyCapture2::Camera *cam = (FlyCapture2::Camera *)ccntxt->inherited.cam; FlyCapture2::FC2Config cfg; CIPGRCHK(cam->StopCapture()); CIPGRCHK(cam->GetConfiguration(&cfg)); cfg.numBuffers = num_framebuffers; CIPGRCHK(cam->SetConfiguration(&cfg)); CIPGRCHK(cam->StartCapture()); }
void CCflycap_close(CCflycap *ccntxt) { if (!ccntxt) {CAM_IFACE_THROW_ERROR("no CCflycap specified (NULL argument)");} FlyCapture2::Camera *cam = (FlyCapture2::Camera *)ccntxt->inherited.cam; CIPGRCHK(cam->StopCapture()); CIPGRCHK(cam->Disconnect()); cam_iface_backend_extras* backend_extras = (cam_iface_backend_extras*)(ccntxt->inherited.backend_extras); if (backend_extras!=NULL) { delete backend_extras; ccntxt->inherited.backend_extras = (void*)NULL; } delete cam; ccntxt->inherited.cam = (void*)NULL; }
int main(int argc, char *argv[]) { get_param(); std::string t_stamp = get_timestamp(); sprintf (data_filename, "%s/data_%s.out", save_dir, t_stamp.c_str()); std::cout << "data_filename: " << data_filename << std::endl; FlyCapture2::BusManager busMgr; PGRGuid guid; err = busMgr.GetCameraFromSerialNumber(cam_serial, &guid); if (err != PGRERROR_OK) { std::cout << "Index error" << std::endl; return false; } // Point Grey cam init init_cam_capture(pt_grey, guid); if (use_calib) load_calib_param(); imageCallback(&pt_grey); printf("Stopping...\n"); err = pt_grey.StopCapture(); if ( err != PGRERROR_OK ) { // This may fail when the camera was removed, so don't show // an error message } pt_grey.Disconnect(); }
int set_cam_param ( FlyCapture2::Camera &camera) { Property prop; prop.type = SHUTTER; //Ensure the property is on. prop.onOff = true; //Ensure auto-adjust mode is off. prop.autoManualMode = false; //Ensure the property is set up to use absolute value control. prop.absControl = true; //Set the absolute value of shutter to 20 ms. // prop.absValue = 39.3; //Set the property. err = camera.SetProperty( &prop ); prop.type = AUTO_EXPOSURE; prop.onOff = true; prop.autoManualMode = true; prop.absControl = true; prop.absValue = 2.0; err = camera.SetProperty( &prop ); // Enable metadata FlyCapture2::EmbeddedImageInfo info; info.timestamp.onOff = true; err = camera.SetEmbeddedImageInfo(&info); if ( err == PGRERROR_PROPERTY_FAILED ) { std::cout << "Property failed " << std::endl; return false; } else if ( err == PGRERROR_PROPERTY_NOT_PRESENT ) { std::cout << "Property not present" << std::endl; return false; } else if (err != PGRERROR_OK) { std::cout << "Properties not set" << std::endl; return false; } }
vector<CameraInfo> CameraPointGrey::getCameraList(){ FlyCapture2::Error error; FlyCapture2::BusManager busManager; unsigned int numCameras; error = busManager.GetNumOfCameras(&numCameras); vector<CameraInfo> ret; if (error != FlyCapture2::PGRERROR_OK){ PrintError(error); return ret; } for (unsigned int i=0; i < numCameras; i++){ FlyCapture2::PGRGuid guid; error = busManager.GetCameraFromIndex(i, &guid); if (error != FlyCapture2::PGRERROR_OK) PrintError(error); // Connect to camera FlyCapture2::Camera cam; error = cam.Connect(&guid); if (error != FlyCapture2::PGRERROR_OK) PrintError( error ); // Get the camera information FlyCapture2::CameraInfo camInfo; error = cam.GetCameraInfo(&camInfo); if (error != FlyCapture2::PGRERROR_OK) PrintError( error ); CameraInfo camera; camera.busID = camInfo.nodeNumber; camera.model = camInfo.modelName; camera.vendor = "Point Grey Research"; ret.push_back(camera); } return ret; }
void CCflycap_get_camera_property(CCflycap *ccntxt, int property_number, long* Value, int* Auto ) { CHECK_CC(ccntxt); FlyCapture2::Camera *cam = (FlyCapture2::Camera *)ccntxt->inherited.cam; if ((property_number<0) || (property_number>=NUM_CAM_PROPS)) { BACKEND_GLOBAL(cam_iface_error) = CAM_IFACE_GENERIC_ERROR; CAM_IFACE_ERROR_FORMAT("property invalid"); } FlyCapture2::Property prop; FlyCapture2::PropertyInfo propinfo; prop.type = propno2prop(property_number); CIPGRCHK(cam->GetProperty( &prop )); *Value = prop.valueA; *Auto = prop.autoManualMode; }
void BACKEND_METHOD(cam_iface_get_camera_info)(int device_number, Camwire_id *out_camid) { CAM_IFACE_CHECK_DEVICE_NUMBER(device_number); if (out_camid==NULL) { CAM_IFACE_THROW_ERROR("return structure NULL"); } FlyCapture2::PGRGuid guid; CIPGRCHK( BACKEND_GLOBAL(busMgr_ptr)->GetCameraFromIndex(device_number, &guid)); FlyCapture2::CameraInfo camInfo; FlyCapture2::Camera *cam = new FlyCapture2::Camera; CIPGRCHK(cam->Connect(&guid)); CIPGRCHK(cam->GetCameraInfo(&camInfo)); CIPGRCHK(cam->Disconnect()); cam_iface_snprintf(out_camid->vendor, CAMWIRE_ID_MAX_CHARS, camInfo.vendorName); cam_iface_snprintf(out_camid->model, CAMWIRE_ID_MAX_CHARS, camInfo.modelName); cam_iface_snprintf(out_camid->chip, CAMWIRE_ID_MAX_CHARS, "GUID %x %x %x %x", guid.value[0], guid.value[1], guid.value[2], guid.value[3]); }
int init_cam_capture ( FlyCapture2::Camera &camera, PGRGuid guid) { CameraInfo camInfo; // Connect the camera err = camera.Connect( &guid ); if ( err != PGRERROR_OK ) { std::cout << "Failed to connect to camera" << std::endl; return false; } // Get the camera info and print it out err = camera.GetCameraInfo( &camInfo ); if ( err != PGRERROR_OK ) { std::cout << "Failed to get camera info from camera" << std::endl; return false; } std::cout << camInfo.vendorName << " " << camInfo.modelName << " " << camInfo.serialNumber << std::endl; err = camera.StartCapture(); if ( err == PGRERROR_ISOCH_BANDWIDTH_EXCEEDED ) { std::cout << "Bandwidth exceeded" << std::endl; return false; } else if ( err != PGRERROR_OK ) { std::cout << "Failed to start image capture" << std::endl; return false; } }
int main() { FlyCapture2::Error error; FlyCapture2::PGRGuid guid; FlyCapture2::BusManager busMgr; FlyCapture2::Camera cam; FlyCapture2::VideoMode vm; FlyCapture2::FrameRate fr; FlyCapture2::Image rawImage; //Initializing camera error = busMgr.GetCameraFromIndex(0, &guid); if (error != FlyCapture2::PGRERROR_OK) { error.PrintErrorTrace(); return CAM_FAILURE; } vm = FlyCapture2::VIDEOMODE_640x480Y8; fr = FlyCapture2::FRAMERATE_60; error = cam.Connect(&guid); if (error != FlyCapture2::PGRERROR_OK) { error.PrintErrorTrace(); return CAM_FAILURE; } cam.SetVideoModeAndFrameRate(vm, fr); //Starting the capture error = cam.StartCapture(); if (error != FlyCapture2::PGRERROR_OK) { error.PrintErrorTrace(); return CAM_FAILURE; } //Image Variables Mat frame (Size(640,480),CV_8UC1); Mat img_bgr (Size(640,480),CV_8UC3); Mat img_hsv (Size(640,480),CV_8UC3); //Image in HSV color space Mat threshy (Size(640,480),CV_8UC1); //Threshed Image // Mat labelImg (Size(640,480),CV_8UC1); //Image Variable for blobs IplImage* histogram= cvCreateImage(cvSize(640,480),8,3); //Image histograms Mat histograms (Size(640,480),CV_8UC1); //greyscale image for histogram Mat empty (Size(640,480),CV_8UC3); // CvBlobs blobs; while(1) { cam.RetrieveBuffer(&rawImage); memcpy(frame.data, rawImage.GetData(), rawImage.GetDataSize()); // histogram = empty.clone(); cvZero(histogram); // cvAddS(frame, cvScalar(70,70,70), frame); cvtColor(frame,img_bgr,CV_BayerBG2BGR); // cout<<"\n1"; cvtColor(img_bgr,img_hsv,CV_BGR2HSV); // cout<<"\n2"; //Thresholding the frame for yellow inRange(img_hsv, Scalar(20, 100, 20), Scalar(70, 255, 255), threshy); // cvInRangeS(img_hsv, cvScalar(0, 120, 100), cvScalar(255, 255, 255), threshy); //Filtering the frame - subsampling?? // smooth(threshy,threshy,CV_MEDIAN,7,7); //Finding the blobs // unsigned int result=cvLabel(threshy,labelImg,blobs); //Filtering the blobs // cvFilterByArea(blobs,500,1000); //Rendering the blobs // cvRenderBlobs(labelImg,blobs,img_bgr,img_bgr); CvPoint prev = cvPoint(0,0); for(int x=0;x<640;++x) { Mat col = threshy.col(x); int y = 480 - countNonZero(col); for(int i=0 ; i<480 ; ++i) histograms.at<uchar>(x,i) = y; CvPoint next = cvPoint(x,y); cvLine(histogram,prev,next,cColor); // cvCircle(histogram,next,2,cColor,3); prev=next; } //Showing the images imshow("Live",img_bgr); imshow("Threshed",threshy); cvShowImage("Histogram",histogram); int c= cvWaitKey(10); if(c == 27) break; } //Cleanup // cvReleaseImage(&frame); // cvReleaseImage(&threshy); // cvReleaseImage(&img_hsv); // cvReleaseImage(&labelImg); // cvReleaseImage(&histogram); cvDestroyAllWindows(); return 0; }
void CCflycap_get_camera_property_info(CCflycap *ccntxt, int property_number, CameraPropertyInfo *info) { CHECK_CC(ccntxt); FlyCapture2::Camera *cam = (FlyCapture2::Camera *)ccntxt->inherited.cam; if (info==NULL) { CAM_IFACE_THROW_ERROR("no info argument specified (NULL argument)"); } if ((property_number<0) || (property_number>=NUM_CAM_PROPS)) { BACKEND_GLOBAL(cam_iface_error) = CAM_IFACE_GENERIC_ERROR; CAM_IFACE_ERROR_FORMAT("property invalid"); } FlyCapture2::Property prop; FlyCapture2::PropertyInfo propinfo; prop.type = propno2prop(property_number); propinfo.type = prop.type; switch(prop.type) { case FlyCapture2::BRIGHTNESS: info->name = "brightness"; break; case FlyCapture2::AUTO_EXPOSURE: info->name = "auto exposure"; break; case FlyCapture2::SHARPNESS: info->name = "sharpness"; break; case FlyCapture2::WHITE_BALANCE: info->name = "white balance"; break; case FlyCapture2::HUE: info->name = "hue"; break; case FlyCapture2::SATURATION: info->name = "saturation"; break; case FlyCapture2::GAMMA: info->name = "gamma"; break; case FlyCapture2::IRIS: info->name = "iris"; break; case FlyCapture2::FOCUS: info->name = "focus"; break; case FlyCapture2::ZOOM: info->name = "zoom"; break; case FlyCapture2::PAN: info->name = "pan"; break; case FlyCapture2::TILT: info->name = "tilt"; break; case FlyCapture2::SHUTTER: info->name = "shutter"; break; case FlyCapture2::GAIN: info->name = "gain"; break; case FlyCapture2::TRIGGER_MODE: info->name = "trigger mode"; break; case FlyCapture2::TRIGGER_DELAY: info->name = "trigger delay"; break; case FlyCapture2::FRAME_RATE: info->name = "frame rate"; break; case FlyCapture2::TEMPERATURE: info->name = "temperature"; break; } FlyCapture2::Error error = cam->GetProperty( &prop ); if (error.GetType()==FlyCapture2::PGRERROR_PROPERTY_FAILED) { info->is_present = 0; return; } CIPGRCHK(error); CIPGRCHK(cam->GetPropertyInfo( &propinfo )); info->is_present = prop.present; info->min_value = propinfo.min; info->max_value = propinfo.max; info->has_auto_mode = propinfo.autoSupported; info->has_manual_mode = propinfo.manualSupported; info->is_scaled_quantity = 0; info->original_value = 0; // XXX FIXME info->available = prop.present; info->readout_capable = propinfo.readOutSupported; info->on_off_capable = propinfo.onOffSupported; info->absolute_capable = propinfo.absValSupported; info->absolute_control_mode = 0; info->absolute_min_value = propinfo.absMin; info->absolute_max_value = propinfo.absMax; return; }
void CCflycap_stop_camera( CCflycap *ccntxt ) { CHECK_CC(ccntxt); FlyCapture2::Camera *cam = (FlyCapture2::Camera *)ccntxt->inherited.cam; CIPGRCHK(cam->StopCapture()); }
void CCflycap_CCflycap( CCflycap * ccntxt, int device_number, int NumImageBuffers, int mode_number) { // call parent CamContext_CamContext((CamContext*)ccntxt,device_number,NumImageBuffers,mode_number); // XXX cast error? ccntxt->inherited.vmt = (CamContext_functable*)&CCflycap_vmt; CAM_IFACE_CHECK_DEVICE_NUMBER(device_number); std::vector<CamMode> result; int myerr = get_mode_list(device_number, result); ccntxt->inherited.device_number = device_number; ccntxt->inherited.backend_extras = new cam_iface_backend_extras; memset(ccntxt->inherited.backend_extras,0,sizeof(cam_iface_backend_extras)); FlyCapture2::Camera *cam = new FlyCapture2::Camera; FlyCapture2::PGRGuid guid; CIPGRCHK( BACKEND_GLOBAL(busMgr_ptr)->GetCameraFromIndex(device_number, &guid)); CIPGRCHK(cam->Connect(&guid)); FlyCapture2::FC2Config cfg; CIPGRCHK(cam->GetConfiguration(&cfg)); cfg.numBuffers = NumImageBuffers; CIPGRCHK(cam->SetConfiguration(&cfg)); // Set the settings to the camera CamMode target_mode = result[mode_number]; if (target_mode.videomode == FlyCapture2::VIDEOMODE_FORMAT7) { CIPGRCHK(cam->SetFormat7Configuration(&target_mode.fmt7ImageSettings, target_mode.fmt7PacketInfo.recommendedBytesPerPacket )); } else { CIPGRCHK(cam->SetVideoModeAndFrameRate(target_mode.videomode, target_mode.framerate)); } ccntxt->inherited.cam = (void*)cam; // XXX move this to start camera and query camera for settings CIPGRCHK(cam->StartCapture()); // Retrieve an image to get width, height. XXX change to query later. FlyCapture2::Image rawImage; CIPGRCHK( cam->RetrieveBuffer( &rawImage )); cam_iface_backend_extras *extras = (cam_iface_backend_extras *)ccntxt->inherited.backend_extras; ccntxt->inherited.depth = rawImage.GetBitsPerPixel(); extras->buf_size = rawImage.GetDataSize(); extras->current_height = rawImage.GetRows(); extras->current_width = rawImage.GetCols(); extras->max_height = rawImage.GetRows(); extras->max_width = rawImage.GetCols(); CIPGRCHK( cam->GetTriggerModeInfo( &extras->trigger_mode_info )); switch (rawImage.GetPixelFormat()) { case FlyCapture2::PIXEL_FORMAT_MONO8: ccntxt->inherited.coding = CAM_IFACE_MONO8; if (rawImage.GetBayerTileFormat()!=FlyCapture2::NONE) { NOT_IMPLEMENTED; } break; case FlyCapture2::PIXEL_FORMAT_RAW8: switch (rawImage.GetBayerTileFormat()) { case FlyCapture2::NONE: ccntxt->inherited.coding = CAM_IFACE_RAW8; break; case FlyCapture2::RGGB: ccntxt->inherited.coding = CAM_IFACE_MONO8_BAYER_RGGB; break; case FlyCapture2::GRBG: ccntxt->inherited.coding = CAM_IFACE_MONO8_BAYER_GRBG; break; case FlyCapture2::GBRG: ccntxt->inherited.coding = CAM_IFACE_MONO8_BAYER_GBRG; break; case FlyCapture2::BGGR: ccntxt->inherited.coding = CAM_IFACE_MONO8_BAYER_BGGR; break; default: NOT_IMPLEMENTED; } } CIPGRCHK(cam->StopCapture()); }
int get_mode_list(int device_number, std::vector<CamMode> &result ) { FlyCapture2::Camera *cam = new FlyCapture2::Camera; FlyCapture2::PGRGuid guid; FlyCapture2::Error err; CamMode mode; FlyCapture2::Format7Info fmt7Info; bool supported; //FlyCapture2::Mode k_fmt7Mode; //FlyCapture2::PixelFormat k_fmt7PixFmt; std::ostringstream oss; std::vector<FlyCapture2::Mode> fmt7modes; fmt7modes.push_back(FlyCapture2::MODE_0); fmt7modes.push_back(FlyCapture2::MODE_1); fmt7modes.push_back(FlyCapture2::MODE_2); fmt7modes.push_back(FlyCapture2::MODE_3); fmt7modes.push_back(FlyCapture2::MODE_4); fmt7modes.push_back(FlyCapture2::MODE_5); fmt7modes.push_back(FlyCapture2::MODE_6); fmt7modes.push_back(FlyCapture2::MODE_7); fmt7modes.push_back(FlyCapture2::MODE_8); fmt7modes.push_back(FlyCapture2::MODE_9); fmt7modes.push_back(FlyCapture2::MODE_10); fmt7modes.push_back(FlyCapture2::MODE_11); fmt7modes.push_back(FlyCapture2::MODE_12); fmt7modes.push_back(FlyCapture2::MODE_13); fmt7modes.push_back(FlyCapture2::MODE_14); fmt7modes.push_back(FlyCapture2::MODE_15); fmt7modes.push_back(FlyCapture2::MODE_16); fmt7modes.push_back(FlyCapture2::MODE_17); fmt7modes.push_back(FlyCapture2::MODE_18); fmt7modes.push_back(FlyCapture2::MODE_19); fmt7modes.push_back(FlyCapture2::MODE_20); fmt7modes.push_back(FlyCapture2::MODE_21); fmt7modes.push_back(FlyCapture2::MODE_22); fmt7modes.push_back(FlyCapture2::MODE_23); fmt7modes.push_back(FlyCapture2::MODE_24); fmt7modes.push_back(FlyCapture2::MODE_25); fmt7modes.push_back(FlyCapture2::MODE_26); fmt7modes.push_back(FlyCapture2::MODE_27); fmt7modes.push_back(FlyCapture2::MODE_28); fmt7modes.push_back(FlyCapture2::MODE_29); fmt7modes.push_back(FlyCapture2::MODE_30); fmt7modes.push_back(FlyCapture2::MODE_31); std::vector<FlyCapture2::PixelFormat> pixfmts; pixfmts.push_back(FlyCapture2::PIXEL_FORMAT_MONO8); pixfmts.push_back(FlyCapture2::PIXEL_FORMAT_411YUV8); pixfmts.push_back(FlyCapture2::PIXEL_FORMAT_422YUV8); pixfmts.push_back(FlyCapture2::PIXEL_FORMAT_444YUV8); pixfmts.push_back(FlyCapture2::PIXEL_FORMAT_RGB8); pixfmts.push_back(FlyCapture2::PIXEL_FORMAT_MONO16); pixfmts.push_back(FlyCapture2::PIXEL_FORMAT_RGB16); pixfmts.push_back(FlyCapture2::PIXEL_FORMAT_S_MONO16); pixfmts.push_back(FlyCapture2::PIXEL_FORMAT_S_RGB16); pixfmts.push_back(FlyCapture2::PIXEL_FORMAT_RAW8); pixfmts.push_back(FlyCapture2::PIXEL_FORMAT_RAW16); pixfmts.push_back(FlyCapture2::PIXEL_FORMAT_MONO12); pixfmts.push_back(FlyCapture2::PIXEL_FORMAT_RAW12); pixfmts.push_back(FlyCapture2::PIXEL_FORMAT_BGR); pixfmts.push_back(FlyCapture2::PIXEL_FORMAT_BGRU); pixfmts.push_back(FlyCapture2::PIXEL_FORMAT_RGB); pixfmts.push_back(FlyCapture2::PIXEL_FORMAT_RGBU); err = BACKEND_GLOBAL(busMgr_ptr)->GetCameraFromIndex(device_number, &guid); // if (err!=FlyCapture2::PGRERROR_OK) { // goto errlabel2; // } err = cam->Connect(&guid); // if (err!=FlyCapture2::PGRERROR_OK) { // goto errlabel2; // } // FORMAT 7 -- test all modes std::vector<FlyCapture2::Mode>::const_iterator fmt7mode; for(fmt7mode=fmt7modes.begin(); fmt7mode!=fmt7modes.end(); fmt7mode++) { fmt7Info.mode = *fmt7mode; err = cam->GetFormat7Info( &fmt7Info, &supported ); // if (err != FlyCapture2::PGRERROR_OK) goto errlabel1; // Do work if (supported) { std::vector<FlyCapture2::PixelFormat>::const_iterator pixfmt; for(pixfmt=pixfmts.begin(); pixfmt!=pixfmts.end(); pixfmt++) { if (*pixfmt & fmt7Info.pixelFormatBitField) { std::ostringstream oss = std::ostringstream(); oss << "format 7, mode " << *fmt7mode << " (" << pixfmt2string(*pixfmt) << ", " << fmt7Info.maxWidth << "x" << fmt7Info.maxHeight <<")"; mode.descr = oss.str(); mode.videomode = FlyCapture2::VIDEOMODE_FORMAT7; mode.fmt7ImageSettings.mode = *fmt7mode; mode.fmt7ImageSettings.offsetX = 0; mode.fmt7ImageSettings.offsetY = 0; mode.fmt7ImageSettings.width = fmt7Info.maxWidth; mode.fmt7ImageSettings.height = fmt7Info.maxHeight; mode.fmt7ImageSettings.pixelFormat = *pixfmt; bool valid; err = cam->ValidateFormat7Settings( &mode.fmt7ImageSettings, &valid, &mode.fmt7PacketInfo ); if (err == FlyCapture2::PGRERROR_OK) { result.push_back(mode); } } } } } // not format 7 -- test each videomode and framerate combination std::vector<FlyCapture2::VideoMode> videomodes; videomodes.push_back(FlyCapture2::VIDEOMODE_160x120YUV444); videomodes.push_back(FlyCapture2::VIDEOMODE_320x240YUV422); videomodes.push_back(FlyCapture2::VIDEOMODE_640x480YUV411); videomodes.push_back(FlyCapture2::VIDEOMODE_640x480YUV422); videomodes.push_back(FlyCapture2::VIDEOMODE_640x480RGB); videomodes.push_back(FlyCapture2::VIDEOMODE_640x480Y8); videomodes.push_back(FlyCapture2::VIDEOMODE_640x480Y16); videomodes.push_back(FlyCapture2::VIDEOMODE_800x600YUV422); videomodes.push_back(FlyCapture2::VIDEOMODE_800x600RGB); videomodes.push_back(FlyCapture2::VIDEOMODE_800x600Y8); videomodes.push_back(FlyCapture2::VIDEOMODE_800x600Y16); videomodes.push_back(FlyCapture2::VIDEOMODE_1024x768YUV422); videomodes.push_back(FlyCapture2::VIDEOMODE_1024x768RGB); videomodes.push_back(FlyCapture2::VIDEOMODE_1024x768Y8); videomodes.push_back(FlyCapture2::VIDEOMODE_1024x768Y16); videomodes.push_back(FlyCapture2::VIDEOMODE_1280x960YUV422); videomodes.push_back(FlyCapture2::VIDEOMODE_1280x960RGB); videomodes.push_back(FlyCapture2::VIDEOMODE_1280x960Y8); videomodes.push_back(FlyCapture2::VIDEOMODE_1280x960Y16); videomodes.push_back(FlyCapture2::VIDEOMODE_1600x1200YUV422); videomodes.push_back(FlyCapture2::VIDEOMODE_1600x1200RGB); videomodes.push_back(FlyCapture2::VIDEOMODE_1600x1200Y8); videomodes.push_back(FlyCapture2::VIDEOMODE_1600x1200Y16); std::vector<FlyCapture2::FrameRate> framerates; framerates.push_back(FlyCapture2::FRAMERATE_1_875); framerates.push_back(FlyCapture2::FRAMERATE_3_75); framerates.push_back(FlyCapture2::FRAMERATE_7_5); framerates.push_back(FlyCapture2::FRAMERATE_15); framerates.push_back(FlyCapture2::FRAMERATE_30); framerates.push_back(FlyCapture2::FRAMERATE_60); framerates.push_back(FlyCapture2::FRAMERATE_120); framerates.push_back(FlyCapture2::FRAMERATE_240); std::vector<FlyCapture2::VideoMode>::const_iterator videomode; for(videomode=videomodes.begin(); videomode!=videomodes.end(); videomode++) { std::vector<FlyCapture2::FrameRate>::const_iterator framerate; for(framerate=framerates.begin(); framerate!=framerates.end(); framerate++) { err = cam->GetVideoModeAndFrameRateInfo(*videomode, *framerate, &supported); if (err == FlyCapture2::PGRERROR_OK) { if (supported) { std::ostringstream oss = std::ostringstream(); oss << "videomode " << *videomode << " framerate " << *framerate; mode.descr = oss.str(); mode.videomode = *videomode; mode.framerate = *framerate; } } } } // err = cam->Disconnect(); cam->Disconnect(); return 0; // errlabel1: // err = cam->Disconnect(); // errlabel2: // return 1; }
int main() { FlyCapture2::Error error; FlyCapture2::PGRGuid guid; FlyCapture2::BusManager busMgr; FlyCapture2::Camera cam; FlyCapture2::VideoMode vm; FlyCapture2::FrameRate fr; FlyCapture2::Image rawImage; //Initializing camera error = busMgr.GetCameraFromIndex(0, &guid); if (error != FlyCapture2::PGRERROR_OK) { error.PrintErrorTrace(); return CAM_FAILURE; } vm = FlyCapture2::VIDEOMODE_640x480Y8; fr = FlyCapture2::FRAMERATE_60; error = cam.Connect(&guid); if (error != FlyCapture2::PGRERROR_OK) { error.PrintErrorTrace(); return CAM_FAILURE; } cam.SetVideoModeAndFrameRate(vm, fr); //Starting the capture error = cam.StartCapture(); if (error != FlyCapture2::PGRERROR_OK) { error.PrintErrorTrace(); return CAM_FAILURE; } while(1) { Mat frame (Size(640,480),CV_8UC1); // Mat img_scene (Size(640,480),CV_8UC3); Mat img_object = imread( "bitslogo.png", CV_LOAD_IMAGE_GRAYSCALE ); cam.RetrieveBuffer(&rawImage); memcpy(frame.data, rawImage.GetData(), rawImage.GetDataSize()); cvtColor(frame,frame,CV_BayerBG2RGB); imwrite("temp.bmp",frame); // cvtColor(img_scene,img_scene,CV_RGB2GRAY); Mat img_scene = imread( "temp.bmp", CV_LOAD_IMAGE_GRAYSCALE ); if( !img_object.data || !img_scene.data ) { std::cout<< " --(!) Error reading images " << std::endl; return -1; } //-- Step 1: Detect the keypoints using SURF Detector int minHessian = 400; SurfFeatureDetector detector( minHessian ); std::vector<KeyPoint> keypoints_object, keypoints_scene; detector.detect( img_object, keypoints_object ); detector.detect( img_scene, keypoints_scene ); //-- Step 2: Calculate descriptors (feature vectors) SurfDescriptorExtractor extractor; Mat descriptors_object, descriptors_scene; extractor.compute( img_object, keypoints_object, descriptors_object ); extractor.compute( img_scene, keypoints_scene, descriptors_scene ); //-- Step 3: Matching descriptor vectors using FLANN matcher FlannBasedMatcher matcher; std::vector< DMatch > matches; matcher.match( descriptors_object, descriptors_scene, matches ); double max_dist = 0; double min_dist = 100; //-- Quick calculation of max and min distances between keypoints for( int i = 0; i < descriptors_object.rows; i++ ) { double dist = matches[i].distance; if( dist < min_dist ) min_dist = dist; if( dist > max_dist ) max_dist = dist; } printf("-- Max dist : %f \n", max_dist ); printf("-- Min dist : %f \n", min_dist ); //-- Draw only "good" matches (i.e. whose distance is less than 3*min_dist ) std::vector< DMatch > good_matches; for( int i = 0; i < descriptors_object.rows; i++ ) { if( matches[i].distance < 3*min_dist ) { good_matches.push_back( matches[i]); } } Mat img_matches; drawMatches( img_object, keypoints_object, img_scene, keypoints_scene, good_matches, img_matches, Scalar::all(-1), Scalar::all(-1), vector<char>(), DrawMatchesFlags::NOT_DRAW_SINGLE_POINTS ); //-- Localize the object std::vector<Point2f> obj; std::vector<Point2f> scene; for( int i = 0; i < good_matches.size(); i++ ) { //-- Get the keypoints from the good matches obj.push_back( keypoints_object[ good_matches[i].queryIdx ].pt ); scene.push_back( keypoints_scene[ good_matches[i].trainIdx ].pt ); } Mat H = findHomography( obj, scene, CV_RANSAC ); //-- Get the corners from the image_1 ( the object to be "detected" ) std::vector<Point2f> obj_corners(4); obj_corners[0] = cvPoint(0,0); obj_corners[1] = cvPoint( img_object.cols, 0 ); obj_corners[2] = cvPoint( img_object.cols, img_object.rows ); obj_corners[3] = cvPoint( 0, img_object.rows ); std::vector<Point2f> scene_corners(4); perspectiveTransform( obj_corners, scene_corners, H); //-- Draw lines between the corners (the mapped object in the scene - image_2 ) line( img_matches, scene_corners[0] + Point2f( img_object.cols, 0), scene_corners[1] + Point2f( img_object.cols, 0), Scalar(0, 255, 0), 4 ); line( img_matches, scene_corners[1] + Point2f( img_object.cols, 0), scene_corners[2] + Point2f( img_object.cols, 0), Scalar( 0, 255, 0), 4 ); line( img_matches, scene_corners[2] + Point2f( img_object.cols, 0), scene_corners[3] + Point2f( img_object.cols, 0), Scalar( 0, 255, 0), 4 ); line( img_matches, scene_corners[3] + Point2f( img_object.cols, 0), scene_corners[0] + Point2f( img_object.cols, 0), Scalar( 0, 255, 0), 4 ); //-- Show detected matches imshow( "Good Matches & Object detection", img_matches ); int c = cvWaitKey(100); if(c == 27) break; } return 0; }