int _setVideoMode(int mode, long *sizex, long *sizey){ if(camera==NULL) { dc1394_log_error("Camera is not initialised"); return -1; } switch(mode) { case 0 : video_mode=DC1394_VIDEO_MODE_FORMAT7_0;break; case 1 : video_mode=DC1394_VIDEO_MODE_FORMAT7_1;break; case 2 : video_mode=DC1394_VIDEO_MODE_FORMAT7_2;break; case 3 : video_mode=DC1394_VIDEO_MODE_FORMAT7_3;break; default : ; } unsigned int h_size = 160; unsigned int v_size = 120; err = dc1394_format7_get_max_image_size(camera, video_mode, &h_size, &v_size); DC1394_ERR_CLN_RTN(err,_unsetupCam(),"Could not get max image size of Format7 mode"); *sizex = h_size; *sizey = v_size; err = dc1394_video_set_mode(camera, video_mode); DC1394_ERR_CLN_RTN(err,_unsetupCam(),"Could not set Format7 mode"); err = dc1394_format7_set_roi(camera, video_mode, DC1394_COLOR_CODING_MONO16, DC1394_USE_MAX_AVAIL, // use max packet size 0, 0, // left, top h_size, v_size); // width, height DC1394_ERR_RTN(err,"Unable to set Format7 mode 0.\nEdit the example file manually to fit your camera capabilities"); return 0; }
int _setupCam(long yExpo, long yGain, long *sizex, long *sizey, long speed){ dc1394camera_list_t * list; dc1394_t *d = dc1394_new (); if (!d) return -1; dc1394speed_t iso_speed; if(speed == 400) iso_speed = DC1394_ISO_SPEED_400; else if(speed == 800) iso_speed = DC1394_ISO_SPEED_800; else return -1; //iso_speed = DC1394_ISO_SPEED_400; dc1394_log_register_handler(DC1394_LOG_WARNING, NULL, NULL); err=dc1394_camera_enumerate (d, &list); DC1394_ERR_RTN(err,"Failed to enumerate cameras"); if (list->num == 0) { dc1394_log_error("No cameras found"); return -1; } camera = dc1394_camera_new (d, list->ids[0].guid); if (!camera) { dc1394_log_error("Failed to initialize camera with guid %lld", list->ids[0].guid); return -1; } dc1394_camera_free_list (list); printf("Using camera with GUID %lld\n", camera->guid); release_iso_and_bw(); uint32_t expo = yExpo; uint32_t gain = yGain; /*----------------------------------------------------------------------- * setup capture *-----------------------------------------------------------------------*/ dc1394_video_set_operation_mode(camera, DC1394_OPERATION_MODE_1394B); DC1394_ERR_CLN_RTN(err,_unsetupCam(),"Could not set operation mode"); err=dc1394_video_set_iso_speed(camera, iso_speed); DC1394_ERR_CLN_RTN(err,_unsetupCam(),"Could not set iso speed"); _setVideoMode(0, sizex, sizey); err=dc1394_feature_set_value(camera, DC1394_FEATURE_GAIN, gain); DC1394_ERR_CLN_RTN(err,_unsetupCam(),"Could not define gain"); err=dc1394_feature_set_value(camera, DC1394_FEATURE_SHUTTER, expo); DC1394_ERR_CLN_RTN(err,_unsetupCam(),"Could not define shutter"); return 0; }
int _stopCam(){ if(camera==NULL) { dc1394_log_error("Camera is not initialised"); return -1; } /*----------------------------------------------------------------------- * stop data transmission *-----------------------------------------------------------------------*/ err=dc1394_video_set_transmission(camera,DC1394_OFF); DC1394_ERR_CLN_RTN(err,_unsetupCam(),"Could not stop transmission of the camera"); err=dc1394_capture_stop(camera); DC1394_ERR_CLN_RTN(err,_unsetupCam(),"Could not stop capture of the camera"); return 0; }
int _setExposure(long yExpo){ if(camera==NULL) { dc1394_log_error("Camera is not initialised"); return -1; } err=dc1394_feature_set_value(camera, DC1394_FEATURE_EXPOSURE, yExpo); DC1394_ERR_CLN_RTN(err,_unsetupCam(),"Could not define shutter"); return 0; }
int _getHSNR(dc1394bool_t *yOnOff, uint32_t *grabCount){ if(camera==NULL) { dc1394_log_error("Camera is not initialised"); return -1; } err=dc1394_avt_get_hsnr(camera, yOnOff, grabCount); DC1394_ERR_CLN_RTN(err,_unsetupCam(),"Could not get HSNR"); return 0; }
int _setHSNR(long yOnOff, long grabCount){ if(camera==NULL) { dc1394_log_error("Camera is not initialised"); return -1; } err=dc1394_avt_set_hsnr(camera, yOnOff, grabCount); DC1394_ERR_CLN_RTN(err,_unsetupCam(),"Could not define HSNR"); return 0; }
int _setExtShut(long yExpo){ if(camera==NULL) { dc1394_log_error("Camera is not initialised"); return -1; } err=dc1394_avt_set_extented_shutter(camera, yExpo); DC1394_ERR_CLN_RTN(err,_unsetupCam(),"Could not define extented shutter"); return 0; }
int _setOffset(long yOffset){ if(camera==NULL) { dc1394_log_error("Camera is not initialised"); return -1; } err=dc1394_feature_set_value(camera, DC1394_FEATURE_BRIGHTNESS, yOffset); DC1394_ERR_CLN_RTN(err,_unsetupCam(),"Could not define gain"); return 0; }
int _setGain(long yGain){ if(camera==NULL) { dc1394_log_error("Camera is not initialised"); return -1; } err=dc1394_feature_set_value(camera, DC1394_FEATURE_GAIN, yGain); DC1394_ERR_CLN_RTN(err,_unsetupCam(),"Could not define gain"); return 0; }
int _getExposure(){ if(camera==NULL) { dc1394_log_error("Camera is not initialised"); return -1; } uint32_t expo = 0; err=dc1394_feature_get_value(camera, DC1394_FEATURE_EXPOSURE, &expo); DC1394_ERR_CLN_RTN(err,_unsetupCam(),"Could not get shutter"); return expo; }
int _getGain(){ if(camera==NULL) { dc1394_log_error("Camera is not initialised"); return -1; } uint32_t gain = 0; err=dc1394_feature_get_value(camera, DC1394_FEATURE_GAIN, &gain); DC1394_ERR_CLN_RTN(err,_unsetupCam(),"Could not get gain"); return gain; }
int _getOffset(){ if(camera==NULL) { dc1394_log_error("Camera is not initialised"); return -1; } uint32_t yOffset = 0; err=dc1394_feature_get_value(camera, DC1394_FEATURE_BRIGHTNESS, &yOffset); DC1394_ERR_CLN_RTN(err,_unsetupCam(),"Could not get gain"); return yOffset; }
int _getExtShut(){ if(camera==NULL) { dc1394_log_error("Camera is not initialised"); return -1; } uint32_t expo = 0; err=dc1394_avt_get_extented_shutter(camera, &expo); DC1394_ERR_CLN_RTN(err,_unsetupCam(),"Could not get extented shutter"); return expo; }
int _acquire(short *yFrames, long sizex, long sizey, long yNbFrames ){ if(camera==NULL) { dc1394_log_error("Camera is not initialised"); return -1; } dc1394video_frame_t *frame; int nbframes = yNbFrames; long j=0; while( j<nbframes) { /*----------------------------------------------------------------------- * capture one frame *-----------------------------------------------------------------------*/ //fprintf(stderr,"Trying... \n"); err=dc1394_capture_dequeue(camera, DC1394_CAPTURE_POLICY_WAIT, &frame); DC1394_ERR_CLN_RTN(err,_unsetupCam(),"Could not capture a frame"); if( 1 ) { unsigned int i=0; for(i=0; i<sizex*sizey; i++) yFrames[j*sizex*sizey+i] = frame->image[2*i]*256 + frame->image[2*i+1]; } else { //impossible de passer par memcpy car Yorick ne gere pas les unsigned. //memcpy((void *)&yFrames[j*sizex*sizey], (void *)frame->image, sizex*sizey*sizeof(short)); } // release buffer err=dc1394_capture_enqueue(camera,frame); DC1394_ERR_CLN_RTN(err,_unsetupCam(),"Could not return frame"); j++; } return 0; }
int _startCam(){ if(camera==NULL) { dc1394_log_error("Camera is not initialised"); return -1; } err=dc1394_capture_setup(camera, 4, DC1394_CAPTURE_FLAGS_DEFAULT); // err=dc1394_capture_setup(camera, 4, DC1394_CAPTURE_FLAGS_AUTO_ISO); // err=dc1394_capture_setup(camera, 4, DC1394_CAPTURE_FLAGS_CHANNEL_ALLOC & DC1394_CAPTURE_FLAGS_BANDWIDTH_ALLOC); DC1394_ERR_CLN_RTN(err,_unsetupCam(),"Could not setup camera-\nmake sure that the video mode and framerate are\nsupported by your camera"); /*----------------------------------------------------------------------- * print allowed and used packet size *-----------------------------------------------------------------------*/ unsigned int min_bytes, max_bytes; unsigned int actual_bytes; uint64_t total_bytes = 0; err=dc1394_format7_get_packet_parameters(camera, video_mode, &min_bytes, &max_bytes); DC1394_ERR_RTN(err,"Packet para inq error"); //printf( "camera reports allowed packet size from %d - %d bytes\n", min_bytes, max_bytes); err=dc1394_format7_get_packet_size(camera, video_mode, &actual_bytes); DC1394_ERR_RTN(err,"dc1394_format7_get_packet_size error"); //printf( "camera reports actual packet size = %d bytes\n", actual_bytes); err=dc1394_format7_get_total_bytes(camera, video_mode, &total_bytes); DC1394_ERR_RTN(err,"dc1394_query_format7_total_bytes error"); //printf( "camera reports total bytes per frame = %lld bytes\n",total_bytes); /*----------------------------------------------------------------------- * have the camera start sending us data *-----------------------------------------------------------------------*/ err=dc1394_video_set_transmission(camera, DC1394_ON); DC1394_ERR_CLN_RTN(err,_unsetupCam(),"Could not start camera iso transmission"); return 0; }
int _setROI(long yX, long yY, long ySizeX, long ySizeY){ if(camera==NULL) { dc1394_log_error("Camera is not initialised"); return -1; } err = dc1394_format7_set_roi(camera, video_mode, DC1394_COLOR_CODING_MONO16, DC1394_USE_MAX_AVAIL, // use max packet size yX, yY, // left, top ySizeX, ySizeY); // width, height DC1394_ERR_CLN_RTN(err,_unsetupCam(),"Could not define ROI"); return 0; }
int _getROI(int *yX, int *yY, int *ySizeX, int *ySizeY){ if(camera==NULL) { dc1394_log_error("Camera is not initialised"); return -1; } dc1394color_coding_t color_coding; int bytes_per_packet; err = dc1394_format7_get_roi(camera, video_mode, &color_coding, &bytes_per_packet, yX, yY, // left, top ySizeX, ySizeY); // width, height DC1394_ERR_CLN_RTN(err,_unsetupCam(),"Could not get ROI"); return 0; }
bool ieee1394capture::init(RoboCompCamera::TCamParams ¶ms_, RoboCompJointMotor::JointMotorPrx head_ , RoboCompDifferentialRobot::DifferentialRobotPrx base_ ) { params = params_; head = head_; base = base_; int32_t i; fps = (dc1394framerate_t)params.FPS;//15; res = (dc1394video_mode_t)0; switch (fps) { case 1: fps = DC1394_FRAMERATE_1_875; break; case 3: fps = DC1394_FRAMERATE_3_75; break; case 15: fps = DC1394_FRAMERATE_15; break; case 30: fps = DC1394_FRAMERATE_30; break; case 60: fps = DC1394_FRAMERATE_60; break; default: fps = DC1394_FRAMERATE_7_5; break; } switch (res) { case 1: res = DC1394_VIDEO_MODE_640x480_YUV411; device_width = 640; device_height = 480; break; case 2: res = DC1394_VIDEO_MODE_640x480_RGB8; device_width = 640; device_height = 480; break; default: res = DC1394_VIDEO_MODE_320x240_YUV422; device_width = 320; device_height = 240; break; } /// Get handle qDebug() << "ieee1394capture::init() -> Initializating first Firewire Card in the system..."; if (!(d = dc1394_new())) { qDebug() << "ieee1394capture::init() -> Fatal error: Unable to aquire a handle to the Ieee1394 device"; qDebug() << "Please check if the kernel modules `ieee1394',`raw1394' and `ohci1394' are loaded or if you have read/write access to /dev/raw1394 and to /dev/video1394-0 " ; return false; } CREATED_BUS = true; /// Create camera interfaces numCameras = 0; err = dc1394_camera_enumerate(d, &list); DC1394_ERR_RTN(err, "Failed to enumerate cameras"); if (list->num == 0) { dc1394_log_error("No cameras found"); return 1; } numCameras = 0; for (uint32_t di = 0; di < list->num; di++) { if (numCameras >= MAX_CAMERAS) break; cameras[numCameras] = dc1394_camera_new(d, list->ids[di].guid); if (!cameras[numCameras]) { dc1394_log_warning("Failed to initialize camera with guid %llx", list->ids[di].guid); continue; } printf("Camera #%d\n", numCameras); numCameras++; } dc1394_camera_free_list(list); if ( numCameras < 1 ) { qDebug() << "ieee1394capture::init() -> Fatal error: No cameras found in the bus! Called from "; cleanup(); return false; } /// Check if one camera has become the root node /* for ( int n=0; n < numCameras; n++ ) { if ( cameraNodeList[n] == numNodes ) { qDebug() << "ieee1394capture::init() -> Fatal error: Sorry, your camera is the highest numbered node of the bus, and has therefore become the root node." ; cleanup(); return false; } }*/ CREATED_CAMS = true; /// Setup cameras for capture qDebug() << "ieee1394capture::init() -> Searching cameras with requested parameters:"; printf("%s\n",params.mode.c_str()); if (params.mode == "MODE_320x240_YUV422") { res = DC1394_VIDEO_MODE_320x240_YUV422; params.width = 320; params.height = 240; } else if (params.mode == "MODE_640x480_YUV422" ) { res = DC1394_VIDEO_MODE_640x480_YUV422; params.width = 640; params.height = 480; } else if (params.mode == "MODE_640x480_RGB" ) { res = DC1394_VIDEO_MODE_640x480_RGB8; params.width = 640; params.height = 480; } else if (params.mode == "MODE_640x480_YUV411") { res = DC1394_VIDEO_MODE_640x480_YUV411; params.width = 640; params.height = 480; } else if (params.mode == "MODE_640x480_MONO") { res = DC1394_VIDEO_MODE_640x480_MONO8; params.width = 640; params.height = 480; } else if (params.mode == "MODE_640x480_MONO16") { res = DC1394_VIDEO_MODE_640x480_MONO16; params.width = 640; params.height = 480; } else if (params.mode == "MODE_516x338_YUV422") { res = DC1394_VIDEO_MODE_FORMAT7_1; params.width = 516; params.height = 338; } else qFatal("ieee1394capture::init() -> Image Mode %s not available. Aborting...", params.mode.c_str()); params.size = params.width*params.height; if (params.FPS!=15 and params.FPS!=30) { qWarning("ieee1394capture::init() -> Framerate %d not available. Aborting...", params.FPS ); cleanup(); return false; } dc1394format7modeset_t info; for (i = 0; i < numCameras; i++) { if (params.mode == "MODE_516x338_YUV422") { err = dc1394_format7_get_modeset(cameras[i], &info); for( int j=0;j<DC1394_VIDEO_MODE_FORMAT7_NUM;j++) { qDebug() << info.mode[j].present; qDebug() << info.mode[j].size_x; qDebug() << info.mode[j].size_y; qDebug() << info.mode[j].max_size_x; qDebug() << info.mode[j].max_size_y; qDebug() << info.mode[j].pos_x; qDebug() << info.mode[j].pos_y; qDebug() << info.mode[j].unit_size_x; qDebug() << info.mode[j].unit_size_y; qDebug() << info.mode[j].unit_pos_x; qDebug() << info.mode[j].unit_pos_y; qDebug() << info.mode[j].pixnum; qDebug() << info.mode[j].packet_size; /* in bytes */ qDebug() << info.mode[j].unit_packet_size; qDebug() << info.mode[j].max_packet_size; } } release_iso_and_bw(i); err = dc1394_video_set_mode(cameras[i], res); DC1394_ERR_CLN_RTN(err, cleanup(), "Could not set video mode"); err = dc1394_video_set_iso_speed(cameras[i], DC1394_ISO_SPEED_400); DC1394_ERR_CLN_RTN(err, cleanup(), "Could not set ISO speed"); //For format 7 modes only if (params.mode == "MODE_516x338_YUV422") { // uint32_t packet_size; // err=dc1394_format7_set_image_size(cameras[i], res, 514, 384); // DC1394_ERR_RTN(err,"Could not set image size"); // err=dc1394_format7_get_recommended_packet_size(cameras[i], res, &packet_size); // DC1394_ERR_RTN(err,"Could not get format 7 recommended packet size"); // err=dc1394_format7_set_roi(cameras[i], res, DC1394_COLOR_CODING_YUV422, packet_size, 0,0, 514, 384); // DC1394_ERR_RTN(err,"Could not set ROI"); qDebug() << "ya"; } err = dc1394_video_set_framerate(cameras[i], fps); DC1394_ERR_CLN_RTN(err, cleanup(), "Could not set framerate"); err = dc1394_capture_setup(cameras[i], NUM_BUFFERS, DC1394_CAPTURE_FLAGS_DEFAULT); DC1394_ERR_CLN_RTN(err, cleanup(), "Could not setup camera-\nmake sure that the video mode and framerate are\nsupported by your camera"); err = dc1394_video_set_transmission(cameras[i], DC1394_ON); DC1394_ERR_CLN_RTN(err, cleanup(), "Could not start camera iso transmission"); } fflush(stdout); qDebug() << " ieee1394capture::init() -> Iso transmission started."; ///Buffers de imagen qDebug() << "BUFFERS DE IMAGEN ------------------------------------------"; for ( int i=0; i < numCameras; i++ ) { AimgBuffer[i] = ( Ipp8u * ) ippsMalloc_8u ( params.size * 9 ); BimgBuffer[i] = ( Ipp8u * ) ippsMalloc_8u ( params.size * 9 ); img8u_lum[i] = AimgBuffer[i]; img8u_YUV[i] = AimgBuffer[i]+params.size; localYRGBImgBufferPtr[i] = BimgBuffer[i]; qDebug() << "Reservando" << params.size * 9 <<" para localYRGBImgBufferPtr["<<i<<"]"; printf("(de %p a %p)\n", localYRGBImgBufferPtr[i], localYRGBImgBufferPtr[i]+(params.size*9-1)); } planos[0]=BimgBuffer[0]+params.size*3; planos[1]=BimgBuffer[0]+ ( params.size*4 ); planos[2]=BimgBuffer[0]+ ( params.size*5 ); //img8u_aux = BimgBuffer[0]+(params.size*6); imgSize_ipp.width=params.width; imgSize_ipp.height=params.height; return true; }
//-------------------------------------------------------------------- int of1394VideoGrabber::initGrabber() { // moved from constructor int i; printf("Making new camera\n"); d = dc1394_new (); err=dc1394_camera_enumerate (d, &list); DC1394_ERR_RTN(err,"Failed to enumerate cameras"); if (list->num == 0) { if(err!=0 && bVerbose)printf("\n No cameras found\n "); return 1; } camera = dc1394_camera_new (d, list->ids[0].guid); if (!camera) { if(err!=0 && bVerbose)printf("\n Failed to initialize camera with guid %PRIx64", list->ids[0].guid); return 1; } dc1394_camera_free_list (list); /*----------------------------------------------------------------------- * setup capture *-----------------------------------------------------------------------*/ dc1394_video_set_mode(camera, DC1394_VIDEO_MODE_FORMAT7_0); uint32_t f, f2; dc1394_format7_get_image_size(camera, DC1394_VIDEO_MODE_FORMAT7_0, &f, &f2); err=dc1394_capture_setup(camera,4, DC1394_CAPTURE_FLAGS_DEFAULT); DC1394_ERR_CLN_RTN(err,cleanup_and_exit(camera),"Could not setup camera-\nmake sure that the video mode and framerate are\nsupported by your camera\n"); /*----------------------------------------------------------------------- * have the camera start sending us data *-----------------------------------------------------------------------*/ err=dc1394_video_set_transmission(camera, DC1394_ON); DC1394_ERR_CLN_RTN(err,cleanup_and_exit(camera),"Could not start camera iso transmission\n"); dc1394_get_image_size_from_video_mode(camera, video_mode, &width, &height); if(err != DC1394_SUCCESS){ cout<<"get image size error: "<<err<<endl; } printf("Size from video mode = %i wide %i high\n", width, height); pixels = new unsigned char[width*height*3]; // x3 for RGB pixels2 = new unsigned char[width*height*3]; // x3 for RGB tex.allocate(width,height,GL_LUMINANCE); err = dc1394_capture_dequeue( camera, DC1394_CAPTURE_POLICY_WAIT, &frame ) ; DC1394_ERR_CLN_RTN(err,cleanup_and_exit(camera),"Could not capture a frame\n"); memcpy( pixels, frame->image, width * height*3) ; dc1394_capture_enqueue( camera, frame ) ; ofSleepMillis(10); startThread(true, false); failedToInit = false; dc1394_feature_set_mode(camera, DC1394_FEATURE_EXPOSURE, DC1394_FEATURE_MODE_MANUAL); dc1394_feature_set_mode(camera, DC1394_FEATURE_BRIGHTNESS, DC1394_FEATURE_MODE_MANUAL); dc1394_feature_set_mode(camera, DC1394_FEATURE_GAMMA, DC1394_FEATURE_MODE_AUTO); dc1394_feature_set_mode(camera, DC1394_FEATURE_SHUTTER, DC1394_FEATURE_MODE_MANUAL); dc1394_feature_set_mode(camera, DC1394_FEATURE_GAIN, DC1394_FEATURE_MODE_AUTO); dc1394_feature_set_mode(camera, DC1394_FEATURE_IRIS, DC1394_FEATURE_MODE_AUTO); dc1394_feature_set_mode(camera, DC1394_FEATURE_WHITE_BALANCE, DC1394_FEATURE_MODE_AUTO); dc1394_feature_set_mode(camera, DC1394_FEATURE_TEMPERATURE, DC1394_FEATURE_MODE_AUTO); dc1394_feature_set_mode(camera, DC1394_FEATURE_WHITE_SHADING, DC1394_FEATURE_MODE_AUTO); dc1394_feature_set_mode(camera, DC1394_FEATURE_SATURATION, DC1394_FEATURE_MODE_AUTO); dc1394_feature_set_mode(camera, DC1394_FEATURE_HUE, DC1394_FEATURE_MODE_AUTO); //Print features dc1394_feature_get_all(camera, &features); dc1394_feature_print_all(&features, stdout); return 0; }
int main (int argc, char **argv) { char* ntry = (char*)""; if (argc > 1) { ntry = argv[1]; } double fps = FPS; double target_dur = 1.0/fps; double tol = 1.0e-3; double total_dur = 0.0; dc1394_t * d = dc1394_new(); if (!d) { return 1; } dc1394camera_list_t * list; dc1394error_t err = dc1394_camera_enumerate (d, &list); DC1394_ERR_RTN(err,"Failed to enumerate cameras"); if (list->num == 0) { /* Verify that we have at least one camera */ dc1394_log_error("No cameras found"); return 1; } gCamera.init(d, list->ids[0].guid); if (!gCamera.cam()) { dc1394_log_error("Failed to initialize camera with guid %ld", list->ids[0].guid); dc1394_camera_free_list (list); return 1; } dc1394_camera_free_list (list); /*----------------------------------------------------------------------- * have the camera start sending us data *-----------------------------------------------------------------------*/ err = gCamera.start_transmission(); DC1394_ERR_CLN_RTN(err,cleanup_and_exit(gCamera),"Could not start camera iso transmission"); /*----------------------------------------------------------------------- * capture one frame *-----------------------------------------------------------------------*/ uint32_t width = 0; uint32_t height = 0; gCamera.get_image_size(&width, &height); cv::Mat mapping = cv::getRotationMatrix2D(cv::Point2f(width/2.0, height/2.0), 180.0, 1.0); #ifdef USE_SDL static char *var = (char*)"SDL_VIDEO_WINDOW_POS=\"1280,480\""; int ret = putenv(var); if (SDL_Init(SDL_INIT_VIDEO) != 0) { std::cerr << "DC1394: Unable to initialize SDL: " << SDL_GetError() << std::endl; return 1; } atexit(SDL_Quit); SDL_Surface *screen; screen = SDL_SetVideoMode(width, height, 24, SDL_HWSURFACE); if (screen == NULL) { std::cerr << "DC1394: Unable to set SDL video mode:" << SDL_GetError() << std::endl; } SDL_Event event; #endif #ifndef LICKOMETER pthread_t save_thread, acq_thread; pthread_create( &save_thread, NULL, &thread_save_image, NULL); #endif pthread_t save_thread, acq_thread; pthread_create( &acq_thread, NULL, &thread_acq_image, NULL); timespec t_sleep, t_rem; t_sleep.tv_sec = 0; t_sleep.tv_nsec = 1000; #ifndef STANDALONE int s; if ((s = socket(SOCKTYPE, SOCK_STREAM, 0)) < 0) { perror("DC1394: client: socket"); cleanup_and_exit(gCamera); return 1; } /* * Create the address we will be connecting to. */ #ifndef INET sockaddr_un sa; sa.sun_family = AF_UNIX; std::ostringstream tmpfn; tmpfn << "fwsocket" << ntry; std::cout << "DC1394: socket name " << tmpfn.str() << std::endl; int nameLen = strlen(tmpfn.str().c_str()); if (nameLen >= (int) sizeof(sa.sun_path) -1) { /* too long? */ cleanup_and_exit(gCamera); return 1; } sa.sun_path[0] = '\0'; /* abstract namespace */ strcpy(sa.sun_path+1, tmpfn.str().c_str()); int len = 1 + nameLen + offsetof(struct sockaddr_un, sun_path); #else sockaddr_in sa; bzero((char *) &sa, sizeof(sa)); sa.sin_family = AF_INET; hostent *server = gethostbyname("128.40.156.129"); bcopy((char *)server->h_addr, (char *)&sa.sin_addr.s_addr, server->h_length); sa.sin_port = htons(35000); int len = sizeof(sa); #endif /* * Try to connect to the address. For this to * succeed, the server must already have bound * this address, and must have issued a listen() * request. * * The third argument indicates the "length" of * the structure, not just the length of the * socket name. */ std::cout << "DC1394: Waiting for connection... " << std::flush; while (true) { // wait for connection: if (connect(s, (sockaddr*)&sa, len) < 0) { nanosleep(&t_sleep, &t_rem); } else { break; } } std::cout << "done" << std::endl; bool connected = false; std::vector<char> data(BUFSIZE); int nrec = recv(s, &data[0], data.size(), 0); std::string datastr(data.begin(), data.end()); if (nrec<=0) { std::cerr << "DC1394: Didn't receive start message; exiting now" << std::endl; cleanup_and_exit(gCamera); close(s); return 1; } connected = true; std::string ready = "ready"; while (send(s, ready.c_str(), ready.size(), 0) < 0) { perror("DC1394: client: send"); } int flags = 0; if (-1 == (flags = fcntl(s, F_GETFL, 0))) flags = 0; if (fcntl(s, F_SETFL, flags | O_NONBLOCK)==-1) { perror("DC1394: client: unblock"); } #endif /* pthread_mutex_lock( &camera_mutex ); gCamera.wait_for_trigger(); pthread_mutex_unlock( &camera_mutex ); Wait for acq_frame_buffer to fill instead */ int ncount = 0; cv::Mat im(cv::Size(width, height), CV_8UC1); cv::Mat thresh = cv::Mat::ones(cv::Size(width, height), CV_8UC1); cv::Mat prevs(cv::Size(width, height), CV_8UC1); cv::Mat gray(cv::Size(width, height), CV_8UC1); // wait for image: int nframes = get_image(im, mapping, false, -1, "", ncount); std::cout << "DC1394: Waiting for first image to arrive... " << std::flush; int nwait = 0; while (!nframes) { nanosleep(&t_sleep, &t_rem); std::cout << "." << std::flush; nframes = get_image(im, mapping, false, -1, "", ncount); nwait++; #ifdef STANDALONE if (nwait > 1000) { #else if (nwait > 100000) { #endif std::cout << "Time out, stopping now\n"; cleanup_and_exit(gCamera); } } timespec time0; clock_gettime(CLOCK_REALTIME, &time0); std::cout << "DC1394: image arrived: " << IplImage(im).depth << " bits, " << IplImage(im).nChannels << " channels, " << IplImage(im).widthStep << " step width" << std::endl; #ifdef USE_SDL SDL_Surface *surface = SDL_CreateRGBSurfaceFrom((void*)im.data, im.cols, im.rows, IplImage(im).depth*IplImage(im).nChannels, IplImage(im).widthStep, 0xffffff, 0xffffff, 0xffffff, 0); screen = SDL_GetVideoSurface(); if(SDL_BlitSurface(surface, NULL, screen, NULL) == 0) SDL_UpdateRect(screen, 0, 0, 0, 0); #else cv::namedWindow("DC1394", CV_WINDOW_AUTOSIZE); cvMoveWindow("DC1394", 1280, 480); cv::imshow("DC1394", im); #endif timespec time1 = time0; timespec time2 = time0; timespec time3 = time0; timespec time4 = time0; timespec t_disconnect = time0; timespec t_notrigger = time0; #ifdef STANDALONE int s = -1; #endif std::string fn = ""; #ifdef LICKOMETER std::string fn_lick = ""; FILE* fp_lick = NULL; #endif int key = 0; int nloop = 0; while (true) { clock_gettime( CLOCK_REALTIME, &time1); #ifndef STANDALONE std::vector<char> data(BUFSIZE); int nrec = recv(s, &data[0], data.size(), 0); std::string datastr(data.begin(), data.end()); #endif nframes += get_image(im, mapping, false, s, fn, ncount); #ifndef STANDALONE // no update from blender in a long time, terminate process if (datastr.find("1")==std::string::npos) { if (connected) { t_disconnect = time1; connected = false; } else { if (tdiff(time1, t_disconnect) > TIMEOUT) { std::cout << "DC1394: Received termination signal" << std::endl; close(s); pthread_cancel(acq_thread); pthread_cancel(save_thread); return 0; } } } else { connected = true; } /* Explicit termination */ if (datastr.find("quit")!=std::string::npos) { std::cout << "DC1394: Game over signal." << std::endl; std::string sclose = "close"; while (send(s, sclose.c_str(), sclose.size(), 0) < 0) { perror("DC1394: client: send"); } close(s); pthread_cancel(acq_thread); pthread_cancel(save_thread); return 0; } // Stop recording if (datastr.find("stop") != std::string::npos && fn != "") { fn = ""; #ifdef LICKOMETER fn_lick = ""; if (fp_lick) { fclose(fp_lick); fp_lick = NULL; } #endif std::cout << "DC1394: Stopping video" << std::endl; connected = true; ncount = 0; } // Start recording if (datastr.find("avi") != std::string::npos && datastr.find("stop") == std::string::npos && fn == "") { std::size_t startpos = datastr.find("begin")+5; std::size_t endpos = datastr.find("end") - datastr.find("begin") - 5; fn = datastr.substr(startpos, endpos); fn = std::string(trunk) + "data/" + fn; #ifdef LICKOMETER fn_lick = fn + "_lick"; fp_lick = fopen(fn_lick.c_str(), "wb"); std::cout << "DC1394: Recording lick detection, writing to " << fn_lick << std::endl; #else boost::filesystem::path path(fn); boost::filesystem::path writepath(path); // Test whether dir exists: if (!boost::filesystem::exists(writepath)) { std::cout << "DC1394: Creating directory " << writepath << std::endl; boost::filesystem::create_directories(writepath); } fn += "/"; /* check save frame buffer */ std::size_t nfb = save_frame_buffer.size(); if (nfb) std::cerr << "DC1394: Frame buffer isn't empty!" << std::endl; std::cout << "DC1394: Starting video, writing to " << fn << std::endl; connected = true; ncount = 0; #endif } #endif // #nstandalone #ifdef USE_SDL if (SDL_PollEvent(&event)) { #ifdef STANDALONE /* Any of these event types will end the program */ if (event.type == SDL_QUIT || event.type == SDL_KEYDOWN || event.type == SDL_KEYUP) { std::cout << std::endl; std::cout << std::endl << "DC1394: Total number of frames was " << nframes << std::endl; std::cout << std::endl << "DC1394: Frame buffer: " << acq_frame_buffer.size() << " frames left" << std::endl; close(s); pthread_cancel(acq_thread); pthread_cancel(save_thread); return 0; } #endif // STANDALONE } surface->pixels = (void*)im.data; // SDL_CreateRGBSurfaceFrom((void*)IplImage(im).imageData, // IplImage(im).width, // IplImage(im).height, // IplImage(im).depth*IplImage(im).nChannels, // IplImage(im).widthStep, // 1, 1, 1, 0); screen = SDL_GetVideoSurface(); if(SDL_BlitSurface(surface, NULL, screen, NULL) == 0) SDL_UpdateRect(screen, 0, 0, 0, 0); #else // not SDL key = cv::waitKey(2); cv::imshow("DC1394", im); if (key == 1114155 || key == 65579 || key==43 /*+*/) { uint32_t gain = 0; err = dc1394_feature_get_value(gCamera.cam(), DC1394_FEATURE_GAIN, &gain); DC1394_ERR_CLN_RTN(err,cleanup_and_exit(gCamera),"Can't get gain"); if (gain < gCamera.get_maxgain()-10) { gain += 10; pthread_mutex_lock( &camera_mutex ); err = dc1394_feature_set_value(gCamera.cam(), DC1394_FEATURE_GAIN, gain); pthread_mutex_unlock( &camera_mutex ); std::cout << "DC1394: New gain value: " << gain << std::endl; DC1394_ERR_CLN_RTN(err,cleanup_and_exit(gCamera),"Can't set gain"); } } if (key == 1114207 || key == 45 /*-*/) { uint32_t gain = 0; err = dc1394_feature_get_value(gCamera.cam(), DC1394_FEATURE_GAIN, &gain); DC1394_ERR_CLN_RTN(err,cleanup_and_exit(gCamera),"Can't get gain"); if (gain > gCamera.get_mingain()+10) { gain -= 10; pthread_mutex_lock( &camera_mutex ); err = dc1394_feature_set_value(gCamera.cam(), DC1394_FEATURE_GAIN, gain); pthread_mutex_unlock( &camera_mutex ); DC1394_ERR_CLN_RTN(err,cleanup_and_exit(gCamera),"Can't set gain"); } } #endif // not SDL #ifdef LICKOMETER /* IS THIS ALL YOU NEED THEN? Lick detection */ /* Not required because the captured image is already gray cv::Mat gray = bgr2gray(im); */ gray = thresholding(im, LICK_FRAME_THRESHOLD); if (nloop != 0) { cv::absdiff(prevs, gray, thresh); double pixel_sum_thresh = cv::sum(thresh)[0]; double pixel_sum_gray = cv::sum(gray)[0]; if (pixel_sum_thresh > LICK_SUM_THRESHOLD) { std::cout << "DC1394: Lick" << std::endl; } if (fp_lick != NULL) { fwrite(&pixel_sum_thresh, sizeof(pixel_sum_thresh), 1, fp_lick); fwrite(&pixel_sum_gray, sizeof(pixel_sum_gray), 1, fp_lick); } } prevs = gray.clone(); nloop++; #endif #ifdef STANDALONE if (key == 1048689 || key == 113 /*q*/) { std::cout << "DC1394: Mean frame rate was " << nframes/total_dur << " fps" << std::endl; pthread_cancel(acq_thread); pthread_cancel(save_thread); return 0; } if (key == 1048691 /*s*/) { fn = ""; std::cout << "DC1394: Stopping video" << std::endl; ncount = 0; } if (key == 1048690 /*r*/) { fn = trunk + std::string("tmp/"); std::cout << "DC1394: Starting video, writing to " << fn << std::endl; ncount = 0; } #endif // #standalone clock_gettime( CLOCK_REALTIME, &time2); double loop_dur = tdiff(time2, time3); clock_gettime( CLOCK_REALTIME, &time3); double meanfps = 0; total_dur = tdiff(time3, time0); if (total_dur > 0) meanfps = nframes / total_dur; double currentfps = ret / loop_dur; std::cout << "DC1394: Current fps: " << std::setprecision(7) << currentfps << " Average fps: " << std::setprecision(7) << meanfps << "\r" << std::flush; #ifdef STANDALONE // std::cout << capture_dur << "\t" << target_dur << "\t" << rem << "\t" << loop_dur << std::endl; #endif } if (d) { dc1394_free(d); } #ifndef STANDALONE close(s); #endif return 0; }
int main(int argc, char *argv[]) { FILE* imagefile; dc1394camera_t *camera; unsigned int width, height; dc1394video_frame_t *frame=NULL; //dc1394featureset_t features; dc1394_t * d; dc1394camera_list_t * list; dc1394error_t err; int counter = 0; d = dc1394_new (); if (!d) return 1; err=dc1394_camera_enumerate (d, &list); DC1394_ERR_RTN(err,"Failed to enumerate cameras"); if (list->num == 0) { dc1394_log_error("No cameras found"); return 1; } camera = dc1394_camera_new (d, list->ids[0].guid); if (!camera) { dc1394_log_error("Failed to initialize camera with guid %llx", list->ids[0].guid); return 1; } dc1394_camera_free_list (list); printf("Using camera with GUID %" PRIx64 "\n", camera->guid); /*----------------------------------------------------------------------- * setup capture *-----------------------------------------------------------------------*/ err=dc1394_video_set_iso_speed(camera, DC1394_ISO_SPEED_400); DC1394_ERR_CLN_RTN(err,cleanup_and_exit(camera),"Could not set iso speed"); err=dc1394_video_set_mode(camera, DC1394_VIDEO_MODE_FORMAT7_7); DC1394_ERR_CLN_RTN(err,cleanup_and_exit(camera),"Could not set video mode\n"); err=dc1394_video_set_framerate(camera, DC1394_FRAMERATE_7_5); DC1394_ERR_CLN_RTN(err,cleanup_and_exit(camera),"Could not set framerate\n"); err=dc1394_capture_setup(camera,4, DC1394_CAPTURE_FLAGS_DEFAULT); DC1394_ERR_CLN_RTN(err,cleanup_and_exit(camera),"Could not setup camera-\nmake sure that the video mode and framerate are\nsupported by your camera\n"); /*----------------------------------------------------------------------- * have the camera start sending us data *-----------------------------------------------------------------------*/ err=dc1394_video_set_transmission(camera, DC1394_ON); DC1394_ERR_CLN_RTN(err,cleanup_and_exit(camera),"Could not start camera iso transmission\n"); for(;;){ /*----------------------------------------------------------------------- * capture one frame *-----------------------------------------------------------------------*/ err=dc1394_capture_dequeue(camera, DC1394_CAPTURE_POLICY_WAIT, &frame); DC1394_ERR_CLN_RTN(err,cleanup_and_exit(camera),"Could not capture a frame\n"); dc1394_get_image_size_from_video_mode(camera, DC1394_VIDEO_MODE_FORMAT7_7, &width, &height); cv::Mat img = cv::Mat(height, width, CV_8U, frame->image); err = dc1394_capture_enqueue(camera, frame); frame=NULL; cv::imshow("Image", img); printf("Frame %d\n", counter); counter++; cv::waitKey(10); } /*----------------------------------------------------------------------- * stop data transmission *-----------------------------------------------------------------------*/ err=dc1394_video_set_transmission(camera,DC1394_OFF); DC1394_ERR_CLN_RTN(err,cleanup_and_exit(camera),"Could not stop the camera?\n"); /*----------------------------------------------------------------------- * close camera *-----------------------------------------------------------------------*/ dc1394_video_set_transmission(camera, DC1394_OFF); dc1394_capture_stop(camera); dc1394_camera_free(camera); dc1394_free (d); return 0; }
int main(int argc, char *argv[]) { fitsfile *fptr; long fpixel=1, nelements, naxes[2]; dc1394camera_t *camera; int grab_n_frames; struct timeval start_time, end_time; int i, j, status; unsigned int max_height, max_width; uint64_t total_bytes = 0; unsigned int width, height; dc1394video_frame_t *frame=NULL; dc1394_t * d; dc1394camera_list_t * list; dc1394error_t err; char *filename; unsigned char *buffer; float *average; grab_n_frames = atoi(argv[1]); filename = argv[2]; status = 0; width = 320; height = 240; naxes[0] = width; naxes[1] = height; nelements = naxes[0]*naxes[1]; stderr = freopen("grab_cube.log", "w", stderr); d = dc1394_new(); if (!d) return 1; err = dc1394_camera_enumerate(d, &list); DC1394_ERR_RTN(err, "Failed to enumerate cameras."); if (list->num == 0) { dc1394_log_error("No cameras found."); return 1; } camera = dc1394_camera_new (d, list->ids[0].guid); if (!camera) { dc1394_log_error("Failed to initialize camera with guid %"PRIx64, list->ids[0].guid); return 1; } dc1394_camera_free_list(list); printf("Using camera with GUID %"PRIx64"\n", camera->guid); /*----------------------------------------------------------------------- * setup capture for format 7 *-----------------------------------------------------------------------*/ // err=dc1394_video_set_operation_mode(camera, DC1394_OPERATION_MODE_1394B); // DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot operate at 1394B"); // libdc1394 doesn't work well with firewire800 yet so set to legacy 400 mode dc1394_video_set_iso_speed(camera, DC1394_ISO_SPEED_400); // configure camera for format7 err = dc1394_video_set_mode(camera, DC1394_VIDEO_MODE_FORMAT7_1); DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot choose format7_0"); err = dc1394_format7_get_max_image_size (camera, DC1394_VIDEO_MODE_FORMAT7_1, &max_width, &max_height); DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot get max image size for format7_0"); err = dc1394_format7_set_roi (camera, DC1394_VIDEO_MODE_FORMAT7_1, DC1394_COLOR_CODING_MONO8, // not sure why RAW8/16 don't work DC1394_USE_MAX_AVAIL, 0, 0, // left, top width, height); // width, height DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set roi"); // set the frame rate to absolute value in frames/sec err = dc1394_feature_set_mode(camera, DC1394_FEATURE_FRAME_RATE, DC1394_FEATURE_MODE_MANUAL); DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set framerate to manual"); err = dc1394_feature_set_absolute_control(camera, DC1394_FEATURE_FRAME_RATE, DC1394_TRUE); DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set framerate to absolute mode"); err = dc1394_feature_set_absolute_value(camera, DC1394_FEATURE_FRAME_RATE, 100.0); DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set framerate"); // set the shutter speed to absolute value in seconds err = dc1394_feature_set_mode(camera, DC1394_FEATURE_SHUTTER, DC1394_FEATURE_MODE_MANUAL); DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set shutter to manual"); err = dc1394_feature_set_absolute_control(camera, DC1394_FEATURE_SHUTTER, DC1394_TRUE); DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set shutter to absolute mode"); err = dc1394_feature_set_absolute_value(camera, DC1394_FEATURE_SHUTTER, 1.0e-2); DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set shutter"); // set gain manually. use relative value here in range 48 to 730. err = dc1394_feature_set_mode(camera, DC1394_FEATURE_GAIN, DC1394_FEATURE_MODE_MANUAL); DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set gain to manual"); err = dc1394_feature_set_value(camera, DC1394_FEATURE_GAIN, 200); DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set gain"); // set brightness manually. use relative value in range 0 to 1023. err = dc1394_feature_set_mode(camera, DC1394_FEATURE_BRIGHTNESS, DC1394_FEATURE_MODE_MANUAL); DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set brightness to manual"); err = dc1394_feature_set_value(camera, DC1394_FEATURE_BRIGHTNESS, 50); DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set brightness"); err = dc1394_format7_get_total_bytes (camera, DC1394_VIDEO_MODE_FORMAT7_1, &total_bytes); DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot get total bytes"); // err = dc1394_feature_set_value (camera, DC1394_FEATURE_GAIN, 24); //DC1394_ERR_CLN_RTN(err, dc1394_camera_free(camera), "Error setting gain"); err = dc1394_capture_setup(camera, 16, DC1394_CAPTURE_FLAGS_DEFAULT); DC1394_ERR_CLN_RTN(err, dc1394_camera_free(camera), "Error capturing"); /*----------------------------------------------------------------------- * have the camera start sending us data *-----------------------------------------------------------------------*/ err = dc1394_video_set_transmission(camera, DC1394_ON); if (err != DC1394_SUCCESS) { dc1394_log_error("unable to start camera iso transmission"); dc1394_capture_stop(camera); dc1394_camera_free(camera); exit(1); } /* allocate the buffers */ if (!(buffer = malloc(nelements*sizeof(char)))) { printf("Couldn't Allocate Image Buffer\n"); exit(-1); } if (!(average = calloc(nelements, sizeof(float)))) { printf("Couldn't Allocate Average Image Buffer\n"); exit(-1); } // set up FITS image and capture fits_create_file(&fptr, filename, &status); dc1394_get_image_size_from_video_mode(camera, DC1394_VIDEO_MODE_FORMAT7_1, &width, &height); fits_create_img(fptr, FLOAT_IMG, 2, naxes, &status); /*----------------------------------------------------------------------- * capture frames and measure the time for this operation *-----------------------------------------------------------------------*/ gettimeofday(&start_time, NULL); printf("Start capture:\n"); for (i=0; i<grab_n_frames; ++i) { /*----------------------------------------------------------------------- * capture one frame *-----------------------------------------------------------------------*/ err = dc1394_capture_dequeue(camera, DC1394_CAPTURE_POLICY_WAIT, &frame); if (err != DC1394_SUCCESS) { dc1394_log_error("unable to capture"); dc1394_capture_stop(camera); dc1394_camera_free(camera); exit(1); } memcpy(buffer, frame->image, nelements*sizeof(char)); // release buffer dc1394_capture_enqueue(camera,frame); for (j=0; j<nelements; j++) { average[j] += (1.0/grab_n_frames)*(buffer[j]); } } gettimeofday(&end_time, NULL); printf("End capture.\n"); /*----------------------------------------------------------------------- * stop data transmission *-----------------------------------------------------------------------*/ err = dc1394_video_set_transmission(camera, DC1394_OFF); DC1394_ERR_RTN(err,"couldn't stop the camera?"); /*----------------------------------------------------------------------- * save FITS image to disk *-----------------------------------------------------------------------*/ fits_write_img(fptr, TFLOAT, fpixel, naxes[0]*naxes[1], average, &status); fits_close_file(fptr, &status); fits_report_error(stderr, status); free(buffer); free(average); printf("wrote: %s\n", filename); printf("Readout is %d bits/pixel.\n", frame->data_depth); /*----------------------------------------------------------------------- * close camera, cleanup *-----------------------------------------------------------------------*/ dc1394_capture_stop(camera); dc1394_video_set_transmission(camera, DC1394_OFF); dc1394_camera_free(camera); dc1394_free(d); return 0; }
int main(int argc, char *argv[]) { // assumes your camera can output 640x480 with 8-bit monochrome video_mode = DC1394_VIDEO_MODE_640x480_MONO8; d = dc1394_new(); if (!d) return 1; err=dc1394_camera_enumerate (d, &list); DC1394_ERR_RTN(err,"Failed to enumerate cameras"); if (list->num == 0) { dc1394_log_error("No cameras found"); return 1; } // use two counters so tht cameras array does not contain gaps in the case of errors j = 0; for (i = 0; i < list->num; i++) { if (j >= MAX_CAMERAS) break; cameras[j] = dc1394_camera_new (d, list->ids[i].guid); if (!cameras[j]) { dc1394_log_warning("Failed to initialize camera with guid %llx", list->ids[i].guid); continue; } j++; } numCameras = j; dc1394_camera_free_list (list); if (numCameras == 0) { dc1394_log_error("No cameras found"); exit (1); } // setup cameras for capture for (i = 0; i < numCameras; i++) { err=dc1394_video_set_iso_speed(cameras[i], DC1394_ISO_SPEED_800); DC1394_ERR_CLN_RTN(err,cleanup(),"Could not set ISO speed"); err=dc1394_video_set_mode(cameras[i], video_mode); DC1394_ERR_CLN_RTN(err,cleanup(),"Could not set video mode"); err=dc1394_capture_setup(cameras[i],NUM_BUFFERS, DC1394_CAPTURE_FLAGS_DEFAULT); DC1394_ERR_CLN_RTN(err,cleanup(),"Could not setup camera-\nmake sure that the video mode and framerate are\nsupported by your camera"); err=dc1394_get_image_size_from_video_mode(cameras[i], video_mode, &video_mode_width, &video_mode_height); DC1394_ERR_CLN_RTN(err,cleanup(),"Could not query video mode width and height"); err=dc1394_video_set_one_shot(cameras[i], DC1394_ON); DC1394_ERR_CLN_RTN(err,cleanup(),"Could not use one shot mode"); } fflush(stdout); if (numCameras < 1) { perror("no cameras found :(\n"); cleanup(); exit(-1); } for (i = 0; i < numCameras; i++) { if (dc1394_capture_dequeue(cameras[i], DC1394_CAPTURE_POLICY_WAIT, &frames[i])!=DC1394_SUCCESS) dc1394_log_error("Failed to capture from camera %d", i); // save image as '[GUID].pgm' char filename[256]; sprintf(filename, "%" PRIu64 "%s",list->ids[i].guid,IMAGE_FILE_EXTENSION); imagefile=fopen(filename, "w"); if( imagefile == NULL) { dc1394_log_error("Can't create %s", filename); } // adding the pgm file header fprintf(imagefile,"P5\n%u %u 255\n", video_mode_width, video_mode_height); // writing to the file fwrite((const char *)frames[i]->image, 1, \ video_mode_width * video_mode_height, imagefile); fclose(imagefile); } // exit cleanly cleanup(); return(0); }
int main(int argc, char *argv[]) { // parse configuration file // get input arguments OpenCvStereoConfig stereo_config; string config_file = ""; ConciseArgs parser(argc, argv); parser.add(config_file, "c", "config", "Configuration file containing camera GUIDs, etc.", true); parser.add(left_camera_mode, "l", "left-camera", "Calibrate just the left camera."); parser.add(right_camera_mode, "r", "right-camera", "Calibrate just the right camera."); parser.add(force_brightness, "b", "brightness", "set brightness to this level"); parser.add(force_exposure, "e", "exposure", "set exposure to this level"); parser.parse(); // parse the config file if (ParseConfigFile(config_file, &stereo_config) != true) { fprintf(stderr, "Failed to parse configuration file, quitting.\n"); return -1; } if (left_camera_mode || right_camera_mode) { stereo_mode = false; } uint64 guid = stereo_config.guidLeft; uint64 guid2 = stereo_config.guidRight; dc1394_t *d; dc1394camera_t *camera; dc1394error_t err; Mat frame_array_left[MAX_FRAMES]; Mat frame_array_right[MAX_FRAMES]; int numFrames = 0; // ----- cam 2 ----- dc1394_t *d2; dc1394camera_t *camera2; dc1394error_t err2; d = dc1394_new (); if (!d) g_critical("Could not create dc1394 context"); d2 = dc1394_new (); if (!d2) g_critical("Could not create dc1394 context for camera 2"); camera = dc1394_camera_new (d, guid); if (!camera) g_critical("Could not create dc1394 camera"); camera2 = dc1394_camera_new (d2, guid2); if (!camera2) g_critical("Could not create dc1394 camera for camera 2"); // setup err = setup_gray_capture(camera, DC1394_VIDEO_MODE_FORMAT7_1); DC1394_ERR_CLN_RTN(err, cleanup_and_exit(camera), "Could not setup camera"); err2 = setup_gray_capture(camera2, DC1394_VIDEO_MODE_FORMAT7_1); DC1394_ERR_CLN_RTN(err2, cleanup_and_exit(camera2), "Could not setup camera number 2"); // enable auto-exposure // turn on the auto exposure feature err = dc1394_feature_set_power(camera, DC1394_FEATURE_EXPOSURE, DC1394_ON); DC1394_ERR_RTN(err,"Could not turn on the exposure feature"); err = dc1394_feature_set_mode(camera, DC1394_FEATURE_EXPOSURE, DC1394_FEATURE_MODE_ONE_PUSH_AUTO); DC1394_ERR_RTN(err,"Could not turn on Auto-exposure"); // enable auto-exposure // turn on the auto exposure feature err = dc1394_feature_set_power(camera2, DC1394_FEATURE_EXPOSURE, DC1394_ON); DC1394_ERR_RTN(err,"Could not turn on the exposure feature for cam2"); err = dc1394_feature_set_mode(camera2, DC1394_FEATURE_EXPOSURE, DC1394_FEATURE_MODE_ONE_PUSH_AUTO); DC1394_ERR_RTN(err,"Could not turn on Auto-exposure for cam2"); // enable camera err = dc1394_video_set_transmission(camera, DC1394_ON); DC1394_ERR_CLN_RTN(err, cleanup_and_exit(camera), "Could not start camera iso transmission"); err2 = dc1394_video_set_transmission(camera2, DC1394_ON); DC1394_ERR_CLN_RTN(err2, cleanup_and_exit(camera2), "Could not start camera iso transmission for camera number 2"); if (left_camera_mode || stereo_mode) { InitBrightnessSettings(camera, camera2); MatchBrightnessSettings(camera, camera2, true, force_brightness, force_exposure); } else { // use the right camera as the master for brightness // since we're calibrating that one InitBrightnessSettings(camera2, camera); MatchBrightnessSettings(camera2, camera, true); } // make opencv windows if (left_camera_mode || stereo_mode) { namedWindow("Input Left", CV_WINDOW_AUTOSIZE); moveWindow("Input Left", 100, 100); } if (right_camera_mode || stereo_mode) { namedWindow("Input Right", CV_WINDOW_AUTOSIZE); moveWindow("Input Right", 478, 100); } CvSize size; Mat cornersL, cornersR; int i; while (numFrames < MAX_FRAMES) { Mat chessL, chessR; // each loop dump a bunch of frames to clear the buffer MatchBrightnessSettings(camera, camera2, true, force_brightness, force_exposure); for (i=0;i<10;i++) { if (left_camera_mode || stereo_mode) { chessL = GetFrameFormat7(camera); } if (right_camera_mode || stereo_mode) { chessR = GetFrameFormat7(camera2); } } // copy the images for drawing/display size = chessL.size(); Mat chessLc; chessLc.create(size, CV_32FC3); Mat chessRc; chessRc.create(size, CV_32FC3); // attempt checkerboard matching bool foundPattern = true; // set to true so we can do an OR // later if we're only using one // camera if (left_camera_mode || stereo_mode) { foundPattern = findChessboardCorners(chessL, Size(CHESS_X, CHESS_Y), cornersL); } if (right_camera_mode || stereo_mode) { foundPattern = foundPattern & findChessboardCorners(chessR, Size(CHESS_X, CHESS_Y), cornersR); } if (left_camera_mode || stereo_mode) { cvtColor( chessL, chessLc, CV_GRAY2BGR ); drawChessboardCorners(chessLc, Size(CHESS_X, CHESS_Y), cornersL, foundPattern); imshow("Input Left", chessLc); } if (right_camera_mode || stereo_mode) { cvtColor(chessR, chessRc, CV_GRAY2BGR); drawChessboardCorners(chessRc, Size(CHESS_X, CHESS_Y), cornersR, foundPattern); imshow("Input Right", chessRc); } // key codes: // page up: 654365 // page down: 65366 // b: 98 char key = waitKey(); //printf("%d\n", (int)key); if (key == 98) { break; } else if (key == 86){ if (foundPattern) { // this was a good one -- save it frame_array_left[numFrames] = chessL; frame_array_right[numFrames] = chessR; // give the user some guidence on the number // of frames they should be using if (stereo_mode) { printf("Saved frame %d / about 10\n", numFrames); } else { printf("Saved frame %d / about 20-30\n", numFrames); } numFrames ++; } else { printf("Not saving frame since did not find a checkboard.\n"); } } else if (key == 'W') { force_brightness +=20; cout << "Brightness: " << force_brightness << "\n"; } else if (key == 'w') { force_brightness -=20; cout << "Brightness: " << force_brightness << "\n"; } else if (key == 'E') { force_exposure +=20; cout << "Exposure: " << force_exposure << "\n"; } else if (key == 'e') { force_exposure -=20; cout << "Exposure: " << force_exposure << "\n"; } } printf("\n\n"); // clear out the calibration directory printf("Deleting old images...\nrm calibrationImages/*.ppm\n"); int retval = system("rm calibrationImages/*.ppm"); if (retval != 0) { printf("Warning: Deleting images may have failed.\n"); } printf("done.\n"); char filename[1000]; for (i=0;i<numFrames;i++) { if (left_camera_mode || stereo_mode) { sprintf(filename, "calibrationImages/cam1-%05d.ppm", i+1); imwrite(filename, frame_array_left[i]); } if (right_camera_mode || stereo_mode) { sprintf(filename, "calibrationImages/cam2-%05d.ppm", i+1); imwrite(filename, frame_array_right[i]); } printf("Writing frame %d\n", i); } printf("\n\n"); destroyWindow("Input Left"); destroyWindow("Input Right"); // stop data transmission err = dc1394_video_set_transmission(camera, DC1394_OFF); DC1394_ERR_CLN_RTN(err,cleanup_and_exit(camera),"Could not stop the camera"); err2 = dc1394_video_set_transmission(camera2, DC1394_OFF); DC1394_ERR_CLN_RTN(err2,cleanup_and_exit(camera2),"Could not stop the camera 2"); // close camera cleanup_and_exit(camera); cleanup_and_exit(camera2); dc1394_free (d); dc1394_free (d2); return 0; }
int main(int argc, char *argv[]) { // get input arguments string configFile = ""; string video_file_left = "", video_file_right = "", video_directory = ""; int starting_frame_number = 0; bool enable_gamma = false; float random_results = -1.0; int last_frame_number = -1; int last_playback_frame_number = -2; ConciseArgs parser(argc, argv); parser.add(configFile, "c", "config", "Configuration file containing camera GUIDs, etc.", true); parser.add(show_display, "d", "show-dispaly", "Enable for visual debugging display. Will reduce framerate significantly."); parser.add(show_display_wait, "w", "show-display-wait", "Optional argument to decrease framerate for lower network traffic when forwarding the display."); parser.add(show_unrectified, "u", "show-unrectified", "When displaying images, do not apply rectification."); parser.add(disable_stereo, "s", "disable-stereo", "Disable online stereo processing."); parser.add(force_brightness, "b", "force-brightness", "Force a brightness setting."); parser.add(force_exposure, "e", "force-exposure", "Force an exposure setting."); parser.add(quiet_mode, "q", "quiet", "Reduce text output."); parser.add(video_file_left, "l", "video-file-left", "Do not use cameras, instead use this video file (also requires a right video file)."); parser.add(video_file_right, "t", "video-file-right", "Right video file, only for use with the -l option."); parser.add(video_directory, "i", "video-directory", "Directory to search for videos in (for playback)."); parser.add(starting_frame_number, "f", "starting-frame", "Frame to start at when playing back videos."); parser.add(display_hud, "v", "hud", "Overlay HUD on display images."); parser.add(record_hud, "x", "record-hud", "Record the HUD display."); parser.add(file_frame_skip, "p", "skip", "Number of frames skipped in recording (for playback)."); parser.add(enable_gamma, "g", "enable-gamma", "Turn gamma on for both cameras."); parser.add(random_results, "R", "random-results", "Number of random points to produce per frame. Can be a float in which case we'll take a random sample to decide if to produce the last one. Disables real stereo processing. Only for debugging / analysis!"); parser.add(publish_all_images, "P", "publish-all-images", "Publish all images to LCM"); parser.parse(); // parse the config file if (ParseConfigFile(configFile, &stereoConfig) != true) { fprintf(stderr, "Failed to parse configuration file, quitting.\n"); return -1; } if (video_file_left.length() > 0 && video_file_right.length() <= 0) { fprintf(stderr, "Error: for playback you must specify both " "a right and left video file. (Only got a left one.)\n"); return -1; } if (video_file_left.length() <= 0 && video_file_right.length() > 0) { fprintf(stderr, "Error: for playback you must specify both " "a right and left video file. (Only got a right one.)\n"); return -1; } recording_manager.Init(stereoConfig); // attempt to load video files / directories if (video_file_left.length() > 0) { if (recording_manager.LoadVideoFiles(video_file_left, video_file_right) != true) { // don't have videos, bail out. return -1; } } if (video_directory.length() > 0) { if (recording_manager.SetPlaybackVideoDirectory(video_directory) != true) { // bail return -1; } } recording_manager.SetQuietMode(quiet_mode); recording_manager.SetPlaybackFrameNumber(starting_frame_number); uint64 guid = stereoConfig.guidLeft; uint64 guid2 = stereoConfig.guidRight; // start up LCM lcm_t * lcm; lcm = lcm_create (stereoConfig.lcmUrl.c_str()); unsigned long elapsed; Hud hud; // --- setup control-c handling --- struct sigaction sigIntHandler; sigIntHandler.sa_handler = control_c_handler; sigemptyset(&sigIntHandler.sa_mask); sigIntHandler.sa_flags = 0; sigaction(SIGINT, &sigIntHandler, NULL); // --- end ctrl-c handling code --- dc1394error_t err; dc1394error_t err2; // tell opencv to use only one core so that we can manage our // own threading without a fight setNumThreads(1); if (recording_manager.UsingLiveCameras()) { d = dc1394_new (); if (!d) cerr << "Could not create dc1394 context" << endl; d2 = dc1394_new (); if (!d2) cerr << "Could not create dc1394 context for camera 2" << endl; camera = dc1394_camera_new (d, guid); if (!camera) { cerr << "Could not create dc1394 camera... quitting." << endl; exit(1); } camera2 = dc1394_camera_new (d2, guid2); if (!camera2) cerr << "Could not create dc1394 camera for camera 2" << endl; // reset the bus dc1394_reset_bus(camera); dc1394_reset_bus(camera2); // setup err = setup_gray_capture(camera, DC1394_VIDEO_MODE_FORMAT7_1); DC1394_ERR_CLN_RTN(err, cleanup_and_exit(camera), "Could not setup camera"); err2 = setup_gray_capture(camera2, DC1394_VIDEO_MODE_FORMAT7_1); DC1394_ERR_CLN_RTN(err2, cleanup_and_exit(camera2), "Could not setup camera number 2"); // enable camera err = dc1394_video_set_transmission(camera, DC1394_ON); DC1394_ERR_CLN_RTN(err, cleanup_and_exit(camera), "Could not start camera iso transmission"); err2 = dc1394_video_set_transmission(camera2, DC1394_ON); DC1394_ERR_CLN_RTN(err2, cleanup_and_exit(camera2), "Could not start camera iso transmission for camera number 2"); InitBrightnessSettings(camera, camera2, enable_gamma); } if (show_display) { namedWindow("Input", CV_WINDOW_AUTOSIZE | CV_WINDOW_KEEPRATIO); namedWindow("Input2", CV_WINDOW_AUTOSIZE | CV_WINDOW_KEEPRATIO); namedWindow("Stereo", CV_WINDOW_AUTOSIZE | CV_WINDOW_KEEPRATIO); namedWindow("Left Block", CV_WINDOW_AUTOSIZE | CV_WINDOW_KEEPRATIO); namedWindow("Right Block", CV_WINDOW_AUTOSIZE | CV_WINDOW_KEEPRATIO); namedWindow("Debug 1", CV_WINDOW_AUTOSIZE | CV_WINDOW_KEEPRATIO); namedWindow("Debug 2", CV_WINDOW_AUTOSIZE | CV_WINDOW_KEEPRATIO); setMouseCallback("Input", onMouse); // for drawing disparity lines setMouseCallback("Stereo", onMouseStereo, &hud); // for drawing disparity lines moveWindow("Input", stereoConfig.displayOffsetX + 100, stereoConfig.displayOffsetY + 100); moveWindow("Stereo", stereoConfig.displayOffsetX + 100, stereoConfig.displayOffsetY + 370); moveWindow("Input2", stereoConfig.displayOffsetX + 478, stereoConfig.displayOffsetY + 100); moveWindow("Left Block", stereoConfig.displayOffsetX + 900, stereoConfig.displayOffsetY + 100); moveWindow("Right Block", stereoConfig.displayOffsetX + 1400, stereoConfig.displayOffsetY + 100); moveWindow("Debug 1", stereoConfig.displayOffsetX + 900, stereoConfig.displayOffsetY + 670); moveWindow("Debug 2", stereoConfig.displayOffsetX + 1400, stereoConfig.displayOffsetY + 670); } // show display if (show_display || publish_all_images) { // if a channel exists, subscribe to it if (stereoConfig.stereo_replay_channel.length() > 0) { stereo_replay_sub = lcmt_stereo_subscribe(lcm, stereoConfig.stereo_replay_channel.c_str(), &stereo_replay_handler, &hud); } if (stereoConfig.pose_channel.length() > 0) { mav_pose_t_sub = mav_pose_t_subscribe(lcm, stereoConfig.pose_channel.c_str(), &mav_pose_t_handler, &hud); } if (stereoConfig.gps_channel.length() > 0) { mav_gps_data_t_sub = mav_gps_data_t_subscribe(lcm, stereoConfig.gps_channel.c_str(), &mav_gps_data_t_handler, &hud); } if (stereoConfig.baro_airspeed_channel.length() > 0) { baro_airspeed_sub = lcmt_baro_airspeed_subscribe(lcm, stereoConfig.baro_airspeed_channel.c_str(), &baro_airspeed_handler, &hud); } if (stereoConfig.servo_out_channel.length() > 0) { servo_out_sub = lcmt_deltawing_u_subscribe(lcm, stereoConfig.servo_out_channel.c_str(), &servo_out_handler, &hud); } if (stereoConfig.battery_status_channel.length() > 0) { battery_status_sub = lcmt_battery_status_subscribe(lcm, stereoConfig.battery_status_channel.c_str(), &battery_status_handler, &hud); } if (stereoConfig.cpu_info_channel1.length() > 0) { cpu_info_sub1 = lcmt_cpu_info_subscribe(lcm, stereoConfig.cpu_info_channel1.c_str(), &cpu_info_handler, &recording_manager); cpu_info_sub2 = lcmt_cpu_info_subscribe(lcm, stereoConfig.cpu_info_channel2.c_str(), &cpu_info_handler, &recording_manager); cpu_info_sub3 = lcmt_cpu_info_subscribe(lcm, stereoConfig.cpu_info_channel3.c_str(), &cpu_info_handler, &recording_manager); } if (stereoConfig.log_size_channel1.length() > 0) { log_size_sub1 = lcmt_log_size_subscribe(lcm, stereoConfig.log_size_channel1.c_str(), &log_size_handler, &hud); log_size_sub2 = lcmt_log_size_subscribe(lcm, stereoConfig.log_size_channel2.c_str(), &log_size_handler, &hud); log_size_sub3 = lcmt_log_size_subscribe(lcm, stereoConfig.log_size_channel3.c_str(), &log_size_handler, &hud); } } // end show_display || publish_all_images // load calibration OpenCvStereoCalibration stereoCalibration; if (LoadCalibration(stereoConfig.calibrationDir, &stereoCalibration) != true) { cerr << "Error: failed to read calibration files. Quitting." << endl; return -1; } int inf_disparity_tester, disparity_tester; disparity_tester = GetDisparityForDistance(10, stereoCalibration, &inf_disparity_tester); std::cout << "computed disparity is = " << disparity_tester << ", inf disparity = " << inf_disparity_tester << std::endl; // subscribe to the stereo control channel stereo_control_sub = lcmt_stereo_control_subscribe(lcm, stereoConfig.stereoControlChannel.c_str(), &lcm_stereo_control_handler, NULL); Mat imgDisp; Mat imgDisp2; // initilize default parameters //PushbroomStereoState state; // HACK state.disparity = stereoConfig.disparity; state.zero_dist_disparity = stereoConfig.infiniteDisparity; state.sobelLimit = stereoConfig.interestOperatorLimit; state.horizontalInvarianceMultiplier = stereoConfig.horizontalInvarianceMultiplier; state.blockSize = stereoConfig.blockSize; state.random_results = random_results; state.check_horizontal_invariance = true; if (state.blockSize > 10 || state.blockSize < 1) { fprintf(stderr, "Warning: block size is very large " "or small (%d). Expect trouble.\n", state.blockSize); } state.sadThreshold = stereoConfig.sadThreshold; state.mapxL = stereoCalibration.mx1fp; state.mapxR = stereoCalibration.mx2fp; state.Q = stereoCalibration.qMat; state.show_display = show_display; state.lastValidPixelRow = stereoConfig.lastValidPixelRow; Mat matL, matR; bool quit = false; if (recording_manager.UsingLiveCameras()) { matL = GetFrameFormat7(camera); matR = GetFrameFormat7(camera2); if (recording_manager.InitRecording(matL, matR) != true) { // failed to init recording, things are going bad. bail. return -1; } // before we start, turn the cameras on and set the brightness and exposure MatchBrightnessSettings(camera, camera2, true, force_brightness, force_exposure); // grab a few frames and send them over LCM for the user // to verify that everything is working if (!show_display && !publish_all_images) { printf("Sending init images over LCM... "); fflush(stdout); for (int i = 0; i < 5; i++) { matL = GetFrameFormat7(camera); SendImageOverLcm(lcm, "stereo_image_left", matL, 50); matR = GetFrameFormat7(camera2); SendImageOverLcm(lcm, "stereo_image_right", matR, 50); // don't send these too fast, otherwise we'll flood the ethernet link // and not actually be helpful // wait one second printf("."); fflush(stdout); sleep(1); } printf(" done.\n"); } } // recording_manager.UsingLiveCameras() // spool up worker threads PushbroomStereo pushbroom_stereo; // start the framerate clock struct timeval start, now; gettimeofday( &start, NULL ); while (quit == false) { // get the frames from the camera if (recording_manager.UsingLiveCameras()) { // we would like to match brightness every frame // but that would really hurt our framerate // match brightness every 10 frames instead if (numFrames % MATCH_BRIGHTNESS_EVERY_N_FRAMES == 0) { MatchBrightnessSettings(camera, camera2); } // capture images from the cameras matL = GetFrameFormat7(camera); matR = GetFrameFormat7(camera2); // record video recording_manager.AddFrames(matL, matR); } else { // using a video file -- get the next frame recording_manager.GetFrames(matL, matR); } cv::vector<Point3f> pointVector3d; cv::vector<uchar> pointColors; cv::vector<Point3i> pointVector2d; // for display cv::vector<Point3i> pointVector2d_inf; // for display // do the main stereo processing if (disable_stereo != true) { gettimeofday( &now, NULL ); double before = now.tv_usec + now.tv_sec * 1000 * 1000; pushbroom_stereo.ProcessImages(matL, matR, &pointVector3d, &pointColors, &pointVector2d, state); gettimeofday( &now, NULL ); double after = now.tv_usec + now.tv_sec * 1000 * 1000; timer_sum += after-before; timer_count ++; } // build an LCM message for the stereo data lcmt_stereo msg; if (recording_manager.UsingLiveCameras() || stereo_lcm_msg == NULL) { msg.timestamp = getTimestampNow(); } else { // if we are replaying videos, preserve the timestamp of the original video msg.timestamp = stereo_lcm_msg->timestamp; } msg.number_of_points = (int)pointVector3d.size(); float x[msg.number_of_points]; float y[msg.number_of_points]; float z[msg.number_of_points]; uchar grey[msg.number_of_points]; for (unsigned int i=0;i<pointVector3d.size();i++) { x[i] = pointVector3d[i].x / stereoConfig.calibrationUnitConversion; y[i] = pointVector3d[i].y / stereoConfig.calibrationUnitConversion; z[i] = pointVector3d[i].z / stereoConfig.calibrationUnitConversion; grey[i] = pointColors[i]; } msg.x = x; msg.y = y; msg.z = z; msg.grey = grey; msg.frame_number = recording_manager.GetFrameNumber(); if (recording_manager.UsingLiveCameras()) { msg.frame_number = msg.frame_number - 1; // minus one since recording manager has // already recorded this frame (above in // AddFrames) but we haven't made a message // for it yet } msg.video_number = recording_manager.GetRecVideoNumber(); // publish the LCM message if (last_frame_number != msg.frame_number) { lcmt_stereo_publish(lcm, "stereo", &msg); last_frame_number = msg.frame_number; } if (publish_all_images) { if (recording_manager.GetFrameNumber() != last_playback_frame_number) { SendImageOverLcm(lcm, "stereo_image_left", matL, 80); SendImageOverLcm(lcm, "stereo_image_right", matR, 80); last_playback_frame_number = recording_manager.GetFrameNumber(); } //process LCM until there are no more messages // this allows us to drop frames if we are behind while (NonBlockingLcm(lcm)) {} } Mat matDisp, remapL, remapR; if (show_display) { // we remap again here because we're just in display Mat remapLtemp(matL.rows, matL.cols, matL.depth()); Mat remapRtemp(matR.rows, matR.cols, matR.depth()); remapL = remapLtemp; remapR = remapRtemp; remap(matL, remapL, stereoCalibration.mx1fp, Mat(), INTER_NEAREST); remap(matR, remapR, stereoCalibration.mx2fp, Mat(), INTER_NEAREST); remapL.copyTo(matDisp); //process LCM until there are no more messages // this allows us to drop frames if we are behind while (NonBlockingLcm(lcm)) {} } // end show_display if (show_display) { for (unsigned int i=0;i<pointVector2d.size();i++) { int x2 = pointVector2d[i].x; int y2 = pointVector2d[i].y; //int sad = pointVector2d[i].z; rectangle(matDisp, Point(x2,y2), Point(x2+state.blockSize, y2+state.blockSize), 0, CV_FILLED); rectangle(matDisp, Point(x2+1,y2+1), Point(x2+state.blockSize-1, y2-1+state.blockSize), 255); } // draw pixel blocks if (lineLeftImgPosition >= 0 && lineLeftImgPositionY > 1) { DisplayPixelBlocks(remapL, remapR, lineLeftImgPosition - state.blockSize/2, lineLeftImgPositionY - state.blockSize/2, state, &pushbroom_stereo); } // draw a line for the user to show disparity DrawLines(remapL, remapR, matDisp, lineLeftImgPosition, lineLeftImgPositionY, state.disparity, state.zero_dist_disparity); if (visualize_stereo_hits == true && stereo_lcm_msg != NULL) { // transform the points from 3D space back onto the image's 2D space vector<Point3f> lcm_points; Get3DPointsFromStereoMsg(stereo_lcm_msg, &lcm_points); // draw the points on the unrectified image (to see these // you must pass the -u flag) Draw3DPointsOnImage(matL, &lcm_points, stereoCalibration.M1, stereoCalibration.D1, stereoCalibration.R1, 128); } if (show_unrectified == false) { imshow("Input", remapL); imshow("Input2", remapR); } else { imshow("Input", matL); imshow("Input2", matR); } if (display_hud) { Mat with_hud; recording_manager.SetHudNumbers(&hud); hud.DrawHud(matDisp, with_hud); if (record_hud) { // put this frame into the HUD recording recording_manager.RecFrameHud(with_hud); } imshow("Stereo", with_hud); } else { imshow("Stereo", matDisp); } char key = waitKey(show_display_wait); if (key != 255 && key != -1) { cout << endl << key << endl; } switch (key) { case 'T': state.disparity --; break; case 'R': state.disparity ++; break; case 'w': state.sobelLimit += 10; break; case 's': state.sobelLimit -= 10; break; case 'd': state.horizontalInvarianceMultiplier -= 0.1; break; case 'D': state.horizontalInvarianceMultiplier += 0.1; break; case 'g': state.blockSize ++; break; case 'b': state.blockSize --; if (state.blockSize < 1) { state.blockSize = 1; } break; case 'Y': state.sadThreshold += 50; break; case 'y': state.sadThreshold ++; break; case 'h': state.sadThreshold --; break; case 'H': state.sadThreshold -= 50; break; case 'm': if (recording_manager.UsingLiveCameras()) { MatchBrightnessSettings(camera, camera2, true, force_brightness, force_exposure); } break; case '1': force_brightness --; if (recording_manager.UsingLiveCameras()) { MatchBrightnessSettings(camera, camera2, true, force_brightness, force_exposure); } break; case '2': force_brightness ++; if (recording_manager.UsingLiveCameras()) { MatchBrightnessSettings(camera, camera2, true, force_brightness, force_exposure); } break; case '3': force_exposure --; if (recording_manager.UsingLiveCameras()) { MatchBrightnessSettings(camera, camera2, true, force_brightness, force_exposure); } break; case '4': force_exposure ++; if (recording_manager.UsingLiveCameras()) { MatchBrightnessSettings(camera, camera2, true, force_brightness, force_exposure); } break; case '5': // to show SAD boxes state.sobelLimit = 0; state.sadThreshold = 255; break; case 'I': state.check_horizontal_invariance = !state.check_horizontal_invariance; break; case '.': recording_manager.SetPlaybackFrameNumber(recording_manager.GetFrameNumber() + 1); break; case ',': recording_manager.SetPlaybackFrameNumber(recording_manager.GetFrameNumber() - 1); break; case '>': recording_manager.SetPlaybackFrameNumber(recording_manager.GetFrameNumber() + 50); break; case '<': recording_manager.SetPlaybackFrameNumber(recording_manager.GetFrameNumber() - 50); break; //case 'k': // state.zero_dist_disparity ++; // break; case 'l': state.zero_dist_disparity --; break; case 'o': inf_sad_add --; break; case 'p': inf_sad_add ++; break; case '[': y_offset --; if (y_offset < 0) { y_offset = 0; } break; case ']': y_offset ++; break; case 'v': display_hud = !display_hud; break; case 'c': hud.SetClutterLevel(hud.GetClutterLevel() + 1); break; case 'C': hud.SetClutterLevel(hud.GetClutterLevel() - 1); break; case '}': hud.SetPitchRangeOfLens(hud.GetPitchRangeOfLens() + 1); break; case '{': hud.SetPitchRangeOfLens(hud.GetPitchRangeOfLens() - 1); break; case 'S': // take a screen cap of the left and right images // useful for putting into a stereo tuner printf("\nWriting left.ppm..."); imwrite("left.ppm", remapL); printf("\nWriting right.ppm..."); imwrite("right.ppm", remapR); printf("\ndone."); break; case 'V': // record the HUD record_hud = true; recording_manager.RestartRecHud(); break; /* case 'j': state.debugJ --; break; case 'J': state.debugJ ++; break; case 'i': state.debugI --; break; case 'I': state.debugI ++; break; case 'k': state.debugDisparity --; break; case 'K': state.debugDisparity ++; break; */ case 'q': quit = true; break; } if (key != 255 && key != -1) { cout << "sadThreshold = " << state.sadThreshold << endl; cout << "sobelLimit = " << state.sobelLimit << endl; cout << "horizontalInvarianceMultiplier = " << state.horizontalInvarianceMultiplier << endl; cout << "brightness: " << force_brightness << endl; cout << "exposure: " << force_exposure << endl; cout << "disparity = " << state.disparity << endl; cout << "inf_disparity = " << state.zero_dist_disparity << endl; cout << "inf_sad_add = " << inf_sad_add << endl; cout << "blockSize = " << state.blockSize << endl; cout << "frame_number = " << recording_manager.GetFrameNumber() << endl; cout << "y offset = " << y_offset << endl; cout << "PitchRangeOfLens = " << hud.GetPitchRangeOfLens() << endl; } } // end show_display numFrames ++; // check for new LCM messages NonBlockingLcm(lcm); if (quiet_mode == false || numFrames % 100 == 0) { // compute framerate gettimeofday( &now, NULL ); elapsed = (now.tv_usec / 1000 + now.tv_sec * 1000) - (start.tv_usec / 1000 + start.tv_sec * 1000); printf("\r%d frames (%lu ms) - %4.1f fps | %4.1f ms/frame, stereo: %f", numFrames, elapsed, (float)numFrames/elapsed * 1000, elapsed/(float)numFrames, timer_sum/(double)timer_count); fflush(stdout); } } // end main while loop printf("\n\n"); destroyWindow("Input"); destroyWindow("Input2"); destroyWindow("Stereo"); // close camera if (recording_manager.UsingLiveCameras()) { StopCapture(d, camera); StopCapture(d2, camera2); } return 0; }
int main(int argc,char *argv[]) { XEvent xev; XGCValues xgcv; long background=0x010203; int i, j; dc1394_t * d; dc1394camera_list_t * list; get_options(argc,argv); /* process options */ switch(fps) { case 1: fps = DC1394_FRAMERATE_1_875; break; case 3: fps = DC1394_FRAMERATE_3_75; break; case 15: fps = DC1394_FRAMERATE_15; break; case 30: fps = DC1394_FRAMERATE_30; break; case 60: fps = DC1394_FRAMERATE_60; break; default: fps = DC1394_FRAMERATE_7_5; break; } switch(res) { case 1: res = DC1394_VIDEO_MODE_640x480_YUV411; device_width=640; device_height=480; format=XV_YUY2; break; case 2: res = DC1394_VIDEO_MODE_640x480_RGB8; device_width=640; device_height=480; format=XV_YUY2; break; case 3: res = DC1394_VIDEO_MODE_800x600_YUV422; device_width=800; device_height=600; format=XV_UYVY; break; default: res = DC1394_VIDEO_MODE_320x240_YUV422; device_width=320; device_height=240; format=XV_UYVY; break; } dc1394error_t err; d = dc1394_new (); if (!d) return 1; err=dc1394_camera_enumerate (d, &list); DC1394_ERR_RTN(err,"Failed to enumerate cameras"); if (list->num == 0) { dc1394_log_error("No cameras found"); return 1; } j = 0; for (i = 0; i < list->num; i++) { if (j >= MAX_CAMERAS) break; cameras[j] = dc1394_camera_new (d, list->ids[i].guid); if (!cameras[j]) { dc1394_log_warning("Failed to initialize camera with guid %llx", list->ids[i].guid); continue; } j++; } numCameras = j; dc1394_camera_free_list (list); if (numCameras == 0) { dc1394_log_error("No cameras found"); exit (1); } /* setup cameras for capture */ for (i = 0; i < numCameras; i++) { //err=dc1394_video_set_iso_speed(cameras[i], DC1394_ISO_SPEED_400); //dc1394_video_set_operation_mode(my_camera_ptr[i], DC1394_OPERATION_MODE_1394B); err= dc1394_video_set_operation_mode(cameras[i], DC1394_OPERATION_MODE_1394B); err=dc1394_video_set_iso_speed(cameras[i], DC1394_ISO_SPEED_800); DC1394_ERR_CLN_RTN(err,cleanup(),"Could not set ISO speed"); err=dc1394_video_set_mode(cameras[i], res); DC1394_ERR_CLN_RTN(err,cleanup(),"Could not set video mode"); err=dc1394_video_set_framerate(cameras[i], fps); DC1394_ERR_CLN_RTN(err,cleanup(),"Could not set framerate"); err=dc1394_capture_setup(cameras[i],NUM_BUFFERS, DC1394_CAPTURE_FLAGS_DEFAULT); DC1394_ERR_CLN_RTN(err,cleanup(),"Could not setup camera-\nmake sure that the video mode and framerate are\nsupported by your camera"); err=dc1394_video_set_transmission(cameras[i], DC1394_ON); DC1394_ERR_CLN_RTN(err,cleanup(),"Could not start camera iso transmission"); // Camera settings err = dc1394_feature_set_value(cameras[i],DC1394_FEATURE_SHUTTER,1400); err = dc1394_feature_set_value(cameras[i],DC1394_FEATURE_BRIGHTNESS,800); err = dc1394_feature_set_value(cameras[i],DC1394_FEATURE_EXPOSURE,150); err = dc1394_feature_whitebalance_set_value(cameras[i],500,400); } fflush(stdout); if (numCameras < 1) { perror("no cameras found :(\n"); cleanup(); exit(-1); } switch(format){ case XV_YV12: set_frame_length(device_width*device_height*3/2, numCameras); break; case XV_YUY2: case XV_UYVY: set_frame_length(device_width*device_height*2, numCameras); break; default: dc1394_log_error("Unknown format set (internal error)"); exit(255); } /* make the window */ display=XOpenDisplay(getenv("DISPLAY")); if(display==NULL) { dc1394_log_error("Could not open display \"%s\"",getenv("DISPLAY")); cleanup(); exit(-1); } QueryXv(); if ( adaptor < 0 ) { cleanup(); exit(-1); } width=device_width; height=device_height * numCameras; //width=device_width * numCameras; //height=device_height; window=XCreateSimpleWindow(display,DefaultRootWindow(display),0,0,width,height,0, WhitePixel(display,DefaultScreen(display)), background); XSelectInput(display,window,StructureNotifyMask|KeyPressMask); XMapWindow(display,window); connection=ConnectionNumber(display); gc=XCreateGC(display,window,0,&xgcv); /* main event loop */ while(1){ for (i = 0; i < numCameras; i++) { if (dc1394_capture_dequeue(cameras[i], DC1394_CAPTURE_POLICY_WAIT, &frames[i])!=DC1394_SUCCESS) dc1394_log_error("Failed to capture from camera %d", i); } display_frames(); XFlush(display); while(XPending(display)>0){ XNextEvent(display,&xev); switch(xev.type){ case ConfigureNotify: width=xev.xconfigure.width; height=xev.xconfigure.height; display_frames(); break; case KeyPress: switch(XKeycodeToKeysym(display,xev.xkey.keycode,0)){ case XK_q: case XK_Q: cleanup(); exit(0); break; case XK_comma: case XK_less: //width=width/2; //height=height/2; width--; XResizeWindow(display,window,width,height); display_frames(); break; case XK_period: case XK_greater: //width=2*width; //height=2*height; width++; XResizeWindow(display,window,width,height); display_frames(); break; case XK_0: freeze = !freeze; break; case XK_1: fps = DC1394_FRAMERATE_1_875; for (i = 0; i < numCameras; i++) dc1394_video_set_framerate(cameras[i], fps); break; case XK_2: fps = DC1394_FRAMERATE_3_75; for (i = 0; i < numCameras; i++) dc1394_video_set_framerate(cameras[i], fps); break; case XK_4: fps = DC1394_FRAMERATE_15; for (i = 0; i < numCameras; i++) dc1394_video_set_framerate(cameras[i], fps); break; case XK_5: fps = DC1394_FRAMERATE_30; for (i = 0; i < numCameras; i++) dc1394_video_set_framerate(cameras[i], fps); break; case XK_3: fps = DC1394_FRAMERATE_7_5; for (i = 0; i < numCameras; i++) dc1394_video_set_framerate(cameras[i], fps); break; } break; } } /* XPending */ for (i = 0; i < numCameras; i++) { if (frames[i]) dc1394_capture_enqueue (cameras[i], frames[i]); } } /* while not interrupted */ exit(0); }
int main(int argc, char *argv[]) { fitsfile *fptr; long fpixel=1, nelements, naxes[3]; dc1394camera_t *camera; int grab_n_frames; struct timeval start_time, end_time; time_t start_sec, end_sec; suseconds_t start_usec, end_usec; float elapsed_time, fps; int i, status; unsigned int min_bytes, max_bytes, max_height, max_width; unsigned int actual_bytes; uint64_t total_bytes = 0; unsigned int width, height; dc1394video_frame_t *frame=NULL; dc1394_t * d; dc1394camera_list_t * list; dc1394error_t err; char *filename; grab_n_frames = atoi(argv[1]); filename = argv[2]; width = 320; height = 240; naxes[0] = width; naxes[1] = height; naxes[2] = grab_n_frames; nelements = naxes[0]*naxes[1]*naxes[2]; stderr = freopen("grab_cube.log", "w", stderr); d = dc1394_new (); if (!d) return 1; err=dc1394_camera_enumerate (d, &list); DC1394_ERR_RTN(err,"Failed to enumerate cameras"); if (list->num == 0) { dc1394_log_error("No cameras found"); return 1; } camera = dc1394_camera_new (d, list->ids[0].guid); if (!camera) { dc1394_log_error("Failed to initialize camera with guid %"PRIx64, list->ids[0].guid); return 1; } dc1394_camera_free_list (list); printf("Using camera with GUID %"PRIx64"\n", camera->guid); /*----------------------------------------------------------------------- * setup capture for format 7 *-----------------------------------------------------------------------*/ // err=dc1394_video_set_operation_mode(camera, DC1394_OPERATION_MODE_1394B); // DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot operate at 1394B"); // libdc1394 doesn't work well with firewire800 yet so set to legacy 400 mode dc1394_video_set_iso_speed(camera, DC1394_ISO_SPEED_400); // configure camera for format7 err = dc1394_video_set_mode(camera, DC1394_VIDEO_MODE_FORMAT7_1); DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot choose format7_0"); printf ("I: video mode is format7_0\n"); err = dc1394_format7_get_max_image_size (camera, DC1394_VIDEO_MODE_FORMAT7_1, &max_width, &max_height); DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot get max image size for format7_0"); printf ("I: max image size is: height = %d, width = %d\n", max_height, max_width); printf ("I: current image size is: height = %d, width = %d\n", height, width); err = dc1394_format7_set_roi (camera, DC1394_VIDEO_MODE_FORMAT7_1, DC1394_COLOR_CODING_MONO16, // not sure why RAW8/16 don't work DC1394_USE_MAX_AVAIL, 0, 0, // left, top width, height); // width, height DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set roi"); printf ("I: roi is (0, 0) - (%d, %d)\n", width, height); // set the frame rate to absolute value in frames/sec err = dc1394_feature_set_mode(camera, DC1394_FEATURE_FRAME_RATE, DC1394_FEATURE_MODE_MANUAL); DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set framerate to manual"); err = dc1394_feature_set_absolute_control(camera, DC1394_FEATURE_FRAME_RATE, DC1394_TRUE); DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set framerate to absolute mode"); err = dc1394_feature_set_absolute_value(camera, DC1394_FEATURE_FRAME_RATE, 330.0); DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set framerate"); printf("I: framerate is %f fps\n", 330.0); // set the shutter speed to absolute value in seconds err = dc1394_feature_set_mode(camera, DC1394_FEATURE_SHUTTER, DC1394_FEATURE_MODE_MANUAL); DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set shutter to manual"); err = dc1394_feature_set_absolute_control(camera, DC1394_FEATURE_SHUTTER, DC1394_TRUE); DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set shutter to absolute mode"); err = dc1394_feature_set_absolute_value(camera, DC1394_FEATURE_SHUTTER, 3.0e-3); DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set shutter"); printf("I: exptime is %f s\n", 3.0e-3); // set gain manually. use relative value here in range 48 to 730. err = dc1394_feature_set_mode(camera, DC1394_FEATURE_GAIN, DC1394_FEATURE_MODE_MANUAL); DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set gain to manual"); err = dc1394_feature_set_value(camera, DC1394_FEATURE_GAIN, 400); DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set gain"); printf ("I: gain is %d\n", 400); // set brightness manually. use relative value in range 0 to 1023. err = dc1394_feature_set_mode(camera, DC1394_FEATURE_BRIGHTNESS, DC1394_FEATURE_MODE_MANUAL); DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set brightness to manual"); err = dc1394_feature_set_value(camera, DC1394_FEATURE_BRIGHTNESS, 100); DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set brightness"); printf ("I: brightness is %d\n", 100); err = dc1394_format7_get_total_bytes (camera, DC1394_VIDEO_MODE_FORMAT7_1, &total_bytes); DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot get total bytes"); printf ("I: total bytes is %"PRIu64" before SFF enabled\n", total_bytes); err=dc1394_capture_setup(camera, 16, DC1394_CAPTURE_FLAGS_DEFAULT); DC1394_ERR_CLN_RTN(err, dc1394_camera_free(camera), "Error capturing"); /*----------------------------------------------------------------------- * print allowed and used packet size *-----------------------------------------------------------------------*/ err=dc1394_format7_get_packet_parameters(camera, DC1394_VIDEO_MODE_FORMAT7_1, &min_bytes, &max_bytes); DC1394_ERR_RTN(err,"Packet para inq error"); printf( "camera reports allowed packet size from %d - %d bytes\n", min_bytes, max_bytes); err=dc1394_format7_get_packet_size(camera, DC1394_VIDEO_MODE_FORMAT7_1, &actual_bytes); DC1394_ERR_RTN(err,"dc1394_format7_get_packet_size error"); printf( "camera reports actual packet size = %d bytes\n", actual_bytes); err=dc1394_format7_get_total_bytes(camera, DC1394_VIDEO_MODE_FORMAT7_1, &total_bytes); DC1394_ERR_RTN(err,"dc1394_query_format7_total_bytes error"); printf( "camera reports total bytes per frame = %"PRId64" bytes\n", total_bytes); /*----------------------------------------------------------------------- * have the camera start sending us data *-----------------------------------------------------------------------*/ err=dc1394_video_set_transmission(camera,DC1394_ON); if (err!=DC1394_SUCCESS) { dc1394_log_error("unable to start camera iso transmission"); dc1394_capture_stop(camera); dc1394_camera_free(camera); exit(1); } // set up FITS image and capture fits_create_file(&fptr, filename, &status); dc1394_get_image_size_from_video_mode(camera, DC1394_VIDEO_MODE_FORMAT7_1, &width, &height); fits_create_img(fptr, USHORT_IMG, 3, naxes, &status); /*----------------------------------------------------------------------- * capture frames and measure the time for this operation *-----------------------------------------------------------------------*/ gettimeofday(&start_time, NULL); printf("Start capture:\n"); for( i = 0; i < grab_n_frames; ++i) { /*----------------------------------------------------------------------- * capture one frame *-----------------------------------------------------------------------*/ err=dc1394_capture_dequeue(camera, DC1394_CAPTURE_POLICY_WAIT, &frame); if (err!=DC1394_SUCCESS) { dc1394_log_error("unable to capture"); dc1394_capture_stop(camera); dc1394_camera_free(camera); exit(1); } // attempt to preallocate image array and write to memory before dumping to disk. // turns out to be slow due to large size of images. cfitsio buffering is far // more efficient. //memcpy(im_buffer+2*i*naxes[0]*naxes[1], //frame->image-1, //naxes[0]*naxes[1]*sizeof(short)); // just writing each frame to the FITS file goes pretty fast fits_write_img(fptr, TUSHORT, fpixel+i*naxes[0]*naxes[1], naxes[0]*naxes[1], frame->image-1, &status); // release buffer dc1394_capture_enqueue(camera,frame); } gettimeofday(&end_time, NULL); printf("End capture.\n"); /*----------------------------------------------------------------------- * stop data transmission *-----------------------------------------------------------------------*/ start_sec = start_time.tv_sec; start_usec = start_time.tv_usec; end_sec = end_time.tv_sec; end_usec = end_time.tv_usec; elapsed_time = (float)((end_sec + 1.0e-6*end_usec) - (start_sec + 1.0e-6*start_usec)); fps = grab_n_frames/elapsed_time; printf("Elapsed time = %g seconds.\n", elapsed_time); printf("Framerate = %g fps.\n", fps); err=dc1394_video_set_transmission(camera,DC1394_OFF); DC1394_ERR_RTN(err,"couldn't stop the camera?"); /*----------------------------------------------------------------------- * save FITS image to disk *-----------------------------------------------------------------------*/ //fits_write_img(fptr, TUSHORT, fpixel, naxes[0]*naxes[1]*naxes[2], im_buffer, &status); fits_close_file(fptr, &status); fits_report_error(stderr, status); //free(im_buffer); printf("wrote: %s\n", filename); printf("Image is %d bits/pixel.\n", frame->data_depth); /*----------------------------------------------------------------------- * close camera, cleanup *-----------------------------------------------------------------------*/ dc1394_capture_stop(camera); dc1394_video_set_transmission(camera, DC1394_OFF); dc1394_camera_free(camera); dc1394_free (d); return 0; }
int main(int argc, char *argv[]) { FILE* imagefile; dc1394camera_t *camera; unsigned int width, height; dc1394video_frame_t *frame=NULL; //dc1394featureset_t features; dc1394_t * d; dc1394camera_list_t * list; dc1394error_t err; d = dc1394_new (); err=dc1394_camera_enumerate (d, &list); DC1394_ERR_RTN(err,"Failed to enumerate cameras"); if (list->num == 0) { dc1394_log_error("No cameras found"); return 1; } camera = dc1394_camera_new (d, list->ids[0].guid); if (!camera) { dc1394_log_error("Failed to initialize camera with guid %llx", list->ids[0].guid); return 1; } dc1394_camera_free_list (list); printf("Using camera with GUID %"PRIx64"\n", camera->guid); /*----------------------------------------------------------------------- * setup capture *-----------------------------------------------------------------------*/ err=dc1394_video_set_iso_speed(camera, DC1394_ISO_SPEED_400); DC1394_ERR_CLN_RTN(err,cleanup_and_exit(camera),"Could not set iso speed"); err=dc1394_video_set_mode(camera, DC1394_VIDEO_MODE_1280x960_RGB8); DC1394_ERR_CLN_RTN(err,cleanup_and_exit(camera),"Could not set video mode\n"); err=dc1394_video_set_framerate(camera, DC1394_FRAMERATE_7_5); DC1394_ERR_CLN_RTN(err,cleanup_and_exit(camera),"Could not set framerate\n"); err=dc1394_capture_setup(camera,4, DC1394_CAPTURE_FLAGS_DEFAULT); DC1394_ERR_CLN_RTN(err,cleanup_and_exit(camera),"Could not setup camera-\nmake sure that the video mode and framerate are\nsupported by your camera\n"); /*----------------------------------------------------------------------- * have the camera start sending us data *-----------------------------------------------------------------------*/ err=dc1394_video_set_transmission(camera, DC1394_ON); DC1394_ERR_CLN_RTN(err,cleanup_and_exit(camera),"Could not start camera iso transmission\n"); /*----------------------------------------------------------------------- * capture one frame *-----------------------------------------------------------------------*/ err=dc1394_capture_dequeue(camera, DC1394_CAPTURE_POLICY_WAIT, &frame); DC1394_ERR_CLN_RTN(err,cleanup_and_exit(camera),"Could not capture a frame\n"); /*----------------------------------------------------------------------- * stop data transmission *-----------------------------------------------------------------------*/ err=dc1394_video_set_transmission(camera,DC1394_OFF); DC1394_ERR_CLN_RTN(err,cleanup_and_exit(camera),"Could not stop the camera?\n"); /*----------------------------------------------------------------------- * save image as 'Image.pgm' *-----------------------------------------------------------------------*/ imagefile=fopen(IMAGE_FILE_NAME, "wb"); if( imagefile == NULL) { perror( "Can't create output file"); cleanup_and_exit(camera); } dc1394_get_image_size_from_video_mode(camera, DC1394_VIDEO_MODE_1280x960_RGB8, &width, &height); fprintf(imagefile,"P6\n%u %u\n255\n", width, height); fwrite(frame->image, 1, height*width*3, imagefile); fclose(imagefile); printf("wrote: " IMAGE_FILE_NAME " (%d image bytes)\n",height*width*3); /*----------------------------------------------------------------------- * close camera *-----------------------------------------------------------------------*/ dc1394_video_set_transmission(camera, DC1394_OFF); dc1394_capture_stop(camera); dc1394_camera_free(camera); dc1394_free (d); return 0; }