static dc1394camera_t *open_camera(void) { dc1394_t *d; dc1394camera_list_t * list; dc1394camera_t *camera; dc1394error_t err; d = dc1394_new(); if (!d) { return NULL; } err = dc1394_camera_enumerate(d, &list); if (err != DC1394_SUCCESS) { dc1394_free(d); return NULL; } if (list->num == 0) { dc1394_free(d); return NULL; } camera = dc1394_camera_new(d, list->ids[0].guid); if (!camera) { dc1394_free(d); return NULL; } dc1394_camera_free_list(list); printf("Using camera with GUID %"PRIx64"\n", camera->guid); return camera; }
CameraInfo* FWCamera::getCameraInfos(int deviceNumber) { #ifdef AVG_ENABLE_1394_2 dc1394_t* pDC1394 = dc1394_new(); if (pDC1394 == 0) { AVG_ASSERT(false); return NULL; } dc1394camera_list_t * pCameraList; int err=dc1394_camera_enumerate(pDC1394, &pCameraList); if (err != DC1394_SUCCESS) { AVG_ASSERT(false); return NULL; } if (pCameraList->num != 0) { dc1394camera_id_t id = pCameraList->ids[deviceNumber]; dc1394camera_t * pCamera = dc1394_camera_new_unit(pDC1394, id.guid, id.unit); if (pCamera) { stringstream deviceID; deviceID << hex << id.guid;//pCamera->guid; CameraInfo* camInfo = new CameraInfo("Firewire", deviceID.str()); getCameraControls(pCamera, camInfo); getCameraImageFormats(pCamera, camInfo); dc1394_camera_free(pCamera); dc1394_camera_free_list(pCameraList); dc1394_free(pDC1394); return camInfo; } } #endif return NULL; }
void ofxLibdc::stopLibdcContext() { libdcCameras--; if(libdcCameras == 0) { ofLog(OF_LOG_VERBOSE, "No more cameras, destroying libdc1394 context."); dc1394_free(libdcContext); } }
int capture_final(capture_t *cap) { int i; for (i = 0; i < cap->num_active; ++i) { dc1394_video_set_transmission(cap->cameras[i], DC1394_OFF); dc1394_capture_stop(cap->cameras[i]); dc1394_camera_free(cap->cameras[i]); } cap->num_active = 0; free(cap->cameras); cap->cameras = NULL; if (cap->camera_list != NULL) { dc1394_camera_free_list(cap->camera_list); cap->camera_list = NULL; } cap->num_cameras = 0; if (cap->dc1394_cxt != NULL) { dc1394_free(cap->dc1394_cxt); cap->dc1394_cxt = NULL; } return CAPTURE_SUCCESS; }
vector<CameraInfo> CameraIIDC::getCameraList(){ dc1394_t *context = dc1394_new(); dc1394camera_list_t *camera_list; dc1394error_t err; err = dc1394_camera_enumerate(context, &camera_list); DC1394_WRN(err,"libdc1394: Failed to enumerate cameras!"); vector<CameraInfo> ret; for (unsigned int i=0; i<camera_list->num; i++) { CameraInfo info; dc1394camera_t *cam; cam = dc1394_camera_new(context, camera_list->ids[i].guid); //info.vendor = std::string(cam->vendor ? cam->vendor : ""); info.vendor = "IIDC"; info.model = string(cam->model ? cam->model : ""); info.busID = (unsigned int)cam->guid; dc1394_camera_free(cam); ret.push_back(info); } dc1394_camera_free_list(camera_list); dc1394_free(context); return ret; }
static void pdp_dc1394_free(t_pdp_dc1394 *x) { dc1394_video_set_transmission(x->camera, DC1394_OFF); dc1394_capture_stop(x->camera); dc1394_camera_free(x->camera); dc1394_free (x->d); }
int main (int argc, char **argv) { opts_t opts = OPTS_INIT; dc1394_t *dc1394_cxt = NULL; dc1394camera_list_t *cam_list = NULL; dc1394error_t err; if (argc < 2) { show_help (); return EXIT_SUCCESS; } parse_opts (&opts, argc, argv); dc1394_cxt = dc1394_new (); if (dc1394_cxt == NULL) { printf ("error: dc1394_new() failed.\n"); return EXIT_FAILURE; } err = dc1394_camera_enumerate (dc1394_cxt, &cam_list); if (err != DC1394_SUCCESS || cam_list == NULL) { printf ("error: dc1394_camera_enumerate() failed.\n"); dc1394_free (dc1394_cxt); return EXIT_FAILURE; } /* reset specified buses */ if (opts.show_list == 0) { reset_bus (&opts, dc1394_cxt, cam_list); } else /* show the list of detected cameras */ { list_cameras (dc1394_cxt, cam_list); } dc1394_camera_free_list (cam_list); dc1394_free (dc1394_cxt); return EXIT_SUCCESS; }
FWCamera::~FWCamera() { #ifdef AVG_ENABLE_1394_2 dc1394_video_set_transmission(m_pCamera, DC1394_OFF); dc1394_capture_stop(m_pCamera); dc1394_camera_free(m_pCamera); dc1394_free(m_pDC1394); #endif AVG_TRACE(Logger::category::CONFIG, Logger::severity::INFO, "Firewire camera closed."); }
FirewireVideo::~FirewireVideo() { Stop(); // Close camera dc1394_video_set_transmission(camera, DC1394_OFF); dc1394_capture_stop(camera); dc1394_camera_free(camera); dc1394_free (d); }
//-------------------------------------------------------------------- ofxVideoGrabberPtgrey::~ofxVideoGrabberPtgrey(){ close(); //free device enumeration dc1394_camera_free_list(deviceList); //free context dc1394_free(driver); }
static int dc1394_v2_close(AVFormatContext * context) { struct dc1394_data *dc1394 = context->priv_data; dc1394_video_set_transmission(dc1394->camera, DC1394_OFF); dc1394_capture_stop(dc1394->camera); dc1394_camera_free(dc1394->camera); dc1394_free(dc1394->d); return 0; }
int main(int argc, char *argv[]) { unsigned int i; dc1394_t * d; dc1394camera_list_t * list; dc1394error_t err; dc1394featureset_t features; d = dc1394_new(); if(!d) return 1; err = dc1394_camera_enumerate(d, &list); DC1394_ERR_RTN(err,"Failed to enumerate cameras"); if(list->num == 0) { dc1394_log_error("No cameras found"); return 1; } for(i = 0; i < list->num; i++) { dc1394camera_t *camera = dc1394_camera_new(d, list->ids[i].guid); if(camera) { unsigned int j; dc1394video_modes_t modes; // Print hardware informations. dc1394_camera_print_info(camera, stdout); // Print supported camera features. err = dc1394_feature_get_all(camera,&features); if(err != DC1394_SUCCESS) { dc1394_log_warning("Could not get feature set"); } else { dc1394_feature_print_all(&features, stdout); } // Print a list of supported modes. printf("------ Supported Video Modes ------\n"); err = dc1394_video_get_supported_modes(camera, &modes); DC1394_ERR_RTN(err,"Could not get list of modes"); for(j = 0; j < modes.num; j++) { print_video_mode_info(camera, modes.modes[j]); } dc1394_camera_free(camera); } } dc1394_camera_free_list (list); dc1394_free (d); return 0; }
CameraIIDC::~CameraIIDC(){ // Stop camera transmission if(capturing) stopCapture(); else dc1394_capture_stop(cam); // Gracefulle destruct the camera dc1394_camera_free(cam); dc1394_free(context); }
void Camera::stop() { eventLoopTimer->stop(); stopClockPulse(); dc1394_video_set_transmission(camera, DC1394_OFF); dc1394_capture_stop(camera); dc1394_camera_free(camera); dc1394_free (camDict); emit stopped(); }
//-------------------------------------------------------------------- of1394VideoGrabber::~of1394VideoGrabber() { if(!failedToInit) // if the init failed, then trying to do this stuff is a very bad idea { stopThread(); ofSleepMillis(200); dc1394_video_set_transmission(camera, DC1394_OFF); dc1394_capture_stop(camera); dc1394_camera_free(camera); dc1394_free (d); } }
void DC1394Camera::shutdownLibDC1394() { assert(s_camCount > 0 && "Invalid reference count"); // Decrement reference count s_camCount--; if (s_camCount == 0) { // Free library LOGGER.infoStream() << "Freeing dc1394 library handle"; dc1394_free(s_libHandle); s_libHandle = 0; } }
void DisplayStatsAndExit(int sig) { int i; fprintf(stderr,"\nFramerate statistics:\n"); fprintf(stderr,"bin centers:\t["); for (i=0;i<bin_n;i++) { if (i==0) fprintf(stderr,"<"); fprintf(stderr,"%3.1f",(float)bin_min+(float)(bin_max-bin_min)/(float)bin_n*((float)i+.5)); if (i==bin_n-1) fprintf(stderr,">"); if (i<bin_n) fprintf(stderr,"\t"); } fprintf(stderr,"]\n"); fprintf(stderr,"histogram:\t["); for (i=0;i<bin_n;i++) { if (bins[i]!=0) fprintf(stderr,"%d",bins[i]); else fprintf(stderr," "); if (i<bin_n) fprintf(stderr,"\t"); } fprintf(stderr,"]\n"); fprintf(stderr,"percentage:\t["); for (i=0;i<bin_n;i++) { if (bins[i]!=0) fprintf(stderr,"%3.1f",(float)bins[i]/nframes*100); else fprintf(stderr," "); if (i<bin_n) fprintf(stderr,"\t"); } fprintf(stderr,"]\n"); free(bins); SDL_Quit(); dc1394_video_set_transmission(camera, DC1394_OFF); dc1394_capture_stop(camera); dc1394_camera_free (camera); dc1394_free(d); exit(0); }
void ieee1394capture::cleanup() { int32_t i; if (CREATED_CAMS == true) { for (i = 0; i < numCameras; i++) { dc1394_video_set_transmission(cameras[i], DC1394_OFF); dc1394_capture_stop(cameras[i]); } CREATED_CAMS=false; } if ( CREATED_BUS == true ) { dc1394_free(d); CREATED_BUS=false; } }
int main(int argc, char *argv[]) { dc1394camera_t * camera; dc1394error_t err; dc1394video_frame_t * frame; dc1394_t * d; dc1394camera_list_t * list; d = dc1394_new (); /* Initialize libdc1394 */ if (!d) return 1; err=dc1394_camera_enumerate (d, &list); /* Find cameras */ DC1394_ERR_RTN(err,"Failed to enumerate cameras"); if (list->num == 0) { /* Verify that we have at least one camera */ dc1394_log_error("No cameras found"); return 1; } camera = dc1394_camera_new (d, list->ids[0].guid); /* Work with first camera */ if (!camera) { dc1394_log_error("Failed to initialize camera with guid %llx", list->ids[0].guid); return 1; } dc1394_camera_free_list (list); err=dc1394_capture_setup(camera, 4, DC1394_CAPTURE_FLAGS_DEFAULT); /* Setup capture */ err=dc1394_video_set_transmission(camera, DC1394_ON); /* Start transmission */ err=dc1394_capture_dequeue(camera, DC1394_CAPTURE_POLICY_WAIT, &frame);/* Capture */ DC1394_ERR_RTN(err,"Problem getting an image"); err=dc1394_capture_enqueue(camera, frame); /* Release the buffer */ err=dc1394_video_set_transmission(camera, DC1394_OFF); /* Stop transmission */ err=dc1394_capture_stop(camera); /* Stop capture */ printf("Hello World\n"); /* Hey, this is a HELLO WORLD program!! */ dc1394_camera_free (camera); /* cleanup and exit */ dc1394_free (d); return 0; }
void Libdc1394Grabber::cleanupCamera() { closeCamera = true; ofxThread::stopThread(); ofLog(OF_LOG_VERBOSE,"Stopped capture thread."); //this sleep seems necessary, at least on OSX, to avoid an occasional hang on exit ofSleepMillis(150); dc1394switch_t is_iso_on = DC1394_OFF; if(camera) { if (dc1394_video_get_transmission(camera, &is_iso_on)!=DC1394_SUCCESS) { is_iso_on = DC1394_ON; // try to shut ISO anyway } if (is_iso_on > DC1394_OFF) { if (dc1394_video_set_transmission(camera, DC1394_OFF)!=DC1394_SUCCESS) { ofLog(OF_LOG_ERROR, "Could not stop ISO transmission!"); } } } ofLog(OF_LOG_VERBOSE,"Stopped ISO transmission."); /* cleanup and exit */ if(cameraList) dc1394_camera_free_list (cameraList); if(camera) { dc1394_capture_stop(camera); dc1394_camera_free (camera); camera = NULL; } ofLog(OF_LOG_VERBOSE,"Stopped camera."); if(dc1394) { dc1394_free (dc1394); dc1394 = NULL; } if(pixels) { delete [] pixels; pixels = NULL; } printf("Closed!\n"); }
void FWCamera::resetBus() { #ifdef AVG_ENABLE_1394_2 dc1394_t* pDC1394 = dc1394_new(); if (pDC1394 == 0) { return; } dc1394camera_list_t * pCameraList; int err=dc1394_camera_enumerate(pDC1394, &pCameraList); if (err == DC1394_SUCCESS) { if (pCameraList->num != 0) { dc1394camera_t * pCam = dc1394_camera_new(pDC1394, pCameraList->ids[0].guid); if (pCam) { dc1394_reset_bus(pCam); dc1394_camera_free(pCam); } } dc1394_camera_free_list(pCameraList); } dc1394_free(pDC1394); #endif }
/***************************************************************************** * Close: *****************************************************************************/ static void Close( vlc_object_t *p_this ) { demux_t *p_demux = (demux_t*)p_this; demux_sys_t *p_sys = p_demux->p_sys; /* Stop data transmission */ if( dc1394_video_set_transmission( p_sys->camera, DC1394_OFF ) != DC1394_SUCCESS ) msg_Err( p_demux, "Unable to stop camera iso transmission" ); /* Close camera */ dc1394_capture_stop( p_sys->camera ); CloseAudioDev( p_demux ); dc1394_camera_free(p_sys->camera); dc1394_free(p_sys->p_dccontext); free( p_sys->video_device ); free( p_sys->audio_device ); free( p_sys ); }
Camera::~Camera() { // Boost Thread grab_thread->join(); // VideoCapture Interface cvReleaseImage(&cv_image_); // Stop transmission err = dc1394_video_set_transmission(camera, DC1394_OFF); // Stop capture err = dc1394_capture_stop(camera); // Power OFF dc1394_camera_set_power(camera, DC1394_OFF); // Cleanup camera dc1394_camera_free(camera); // Close dc1394 dc1394_free(d); }
CvDC1394::~CvDC1394() { if (dc) dc1394_free(dc); dc = 0; }
int main (int argc, char **argv) { char* ntry = (char*)""; if (argc > 1) { ntry = argv[1]; } double fps = FPS; double target_dur = 1.0/fps; double tol = 1.0e-3; double total_dur = 0.0; dc1394_t * d = dc1394_new(); if (!d) { return 1; } dc1394camera_list_t * list; dc1394error_t err = dc1394_camera_enumerate (d, &list); DC1394_ERR_RTN(err,"Failed to enumerate cameras"); if (list->num == 0) { /* Verify that we have at least one camera */ dc1394_log_error("No cameras found"); return 1; } gCamera.init(d, list->ids[0].guid); if (!gCamera.cam()) { dc1394_log_error("Failed to initialize camera with guid %ld", list->ids[0].guid); dc1394_camera_free_list (list); return 1; } dc1394_camera_free_list (list); /*----------------------------------------------------------------------- * have the camera start sending us data *-----------------------------------------------------------------------*/ err = gCamera.start_transmission(); DC1394_ERR_CLN_RTN(err,cleanup_and_exit(gCamera),"Could not start camera iso transmission"); /*----------------------------------------------------------------------- * capture one frame *-----------------------------------------------------------------------*/ uint32_t width = 0; uint32_t height = 0; gCamera.get_image_size(&width, &height); cv::Mat mapping = cv::getRotationMatrix2D(cv::Point2f(width/2.0, height/2.0), 180.0, 1.0); #ifdef USE_SDL static char *var = (char*)"SDL_VIDEO_WINDOW_POS=\"1280,480\""; int ret = putenv(var); if (SDL_Init(SDL_INIT_VIDEO) != 0) { std::cerr << "DC1394: Unable to initialize SDL: " << SDL_GetError() << std::endl; return 1; } atexit(SDL_Quit); SDL_Surface *screen; screen = SDL_SetVideoMode(width, height, 24, SDL_HWSURFACE); if (screen == NULL) { std::cerr << "DC1394: Unable to set SDL video mode:" << SDL_GetError() << std::endl; } SDL_Event event; #endif #ifndef LICKOMETER pthread_t save_thread, acq_thread; pthread_create( &save_thread, NULL, &thread_save_image, NULL); #endif pthread_t save_thread, acq_thread; pthread_create( &acq_thread, NULL, &thread_acq_image, NULL); timespec t_sleep, t_rem; t_sleep.tv_sec = 0; t_sleep.tv_nsec = 1000; #ifndef STANDALONE int s; if ((s = socket(SOCKTYPE, SOCK_STREAM, 0)) < 0) { perror("DC1394: client: socket"); cleanup_and_exit(gCamera); return 1; } /* * Create the address we will be connecting to. */ #ifndef INET sockaddr_un sa; sa.sun_family = AF_UNIX; std::ostringstream tmpfn; tmpfn << "fwsocket" << ntry; std::cout << "DC1394: socket name " << tmpfn.str() << std::endl; int nameLen = strlen(tmpfn.str().c_str()); if (nameLen >= (int) sizeof(sa.sun_path) -1) { /* too long? */ cleanup_and_exit(gCamera); return 1; } sa.sun_path[0] = '\0'; /* abstract namespace */ strcpy(sa.sun_path+1, tmpfn.str().c_str()); int len = 1 + nameLen + offsetof(struct sockaddr_un, sun_path); #else sockaddr_in sa; bzero((char *) &sa, sizeof(sa)); sa.sin_family = AF_INET; hostent *server = gethostbyname("128.40.156.129"); bcopy((char *)server->h_addr, (char *)&sa.sin_addr.s_addr, server->h_length); sa.sin_port = htons(35000); int len = sizeof(sa); #endif /* * Try to connect to the address. For this to * succeed, the server must already have bound * this address, and must have issued a listen() * request. * * The third argument indicates the "length" of * the structure, not just the length of the * socket name. */ std::cout << "DC1394: Waiting for connection... " << std::flush; while (true) { // wait for connection: if (connect(s, (sockaddr*)&sa, len) < 0) { nanosleep(&t_sleep, &t_rem); } else { break; } } std::cout << "done" << std::endl; bool connected = false; std::vector<char> data(BUFSIZE); int nrec = recv(s, &data[0], data.size(), 0); std::string datastr(data.begin(), data.end()); if (nrec<=0) { std::cerr << "DC1394: Didn't receive start message; exiting now" << std::endl; cleanup_and_exit(gCamera); close(s); return 1; } connected = true; std::string ready = "ready"; while (send(s, ready.c_str(), ready.size(), 0) < 0) { perror("DC1394: client: send"); } int flags = 0; if (-1 == (flags = fcntl(s, F_GETFL, 0))) flags = 0; if (fcntl(s, F_SETFL, flags | O_NONBLOCK)==-1) { perror("DC1394: client: unblock"); } #endif /* pthread_mutex_lock( &camera_mutex ); gCamera.wait_for_trigger(); pthread_mutex_unlock( &camera_mutex ); Wait for acq_frame_buffer to fill instead */ int ncount = 0; cv::Mat im(cv::Size(width, height), CV_8UC1); cv::Mat thresh = cv::Mat::ones(cv::Size(width, height), CV_8UC1); cv::Mat prevs(cv::Size(width, height), CV_8UC1); cv::Mat gray(cv::Size(width, height), CV_8UC1); // wait for image: int nframes = get_image(im, mapping, false, -1, "", ncount); std::cout << "DC1394: Waiting for first image to arrive... " << std::flush; int nwait = 0; while (!nframes) { nanosleep(&t_sleep, &t_rem); std::cout << "." << std::flush; nframes = get_image(im, mapping, false, -1, "", ncount); nwait++; #ifdef STANDALONE if (nwait > 1000) { #else if (nwait > 100000) { #endif std::cout << "Time out, stopping now\n"; cleanup_and_exit(gCamera); } } timespec time0; clock_gettime(CLOCK_REALTIME, &time0); std::cout << "DC1394: image arrived: " << IplImage(im).depth << " bits, " << IplImage(im).nChannels << " channels, " << IplImage(im).widthStep << " step width" << std::endl; #ifdef USE_SDL SDL_Surface *surface = SDL_CreateRGBSurfaceFrom((void*)im.data, im.cols, im.rows, IplImage(im).depth*IplImage(im).nChannels, IplImage(im).widthStep, 0xffffff, 0xffffff, 0xffffff, 0); screen = SDL_GetVideoSurface(); if(SDL_BlitSurface(surface, NULL, screen, NULL) == 0) SDL_UpdateRect(screen, 0, 0, 0, 0); #else cv::namedWindow("DC1394", CV_WINDOW_AUTOSIZE); cvMoveWindow("DC1394", 1280, 480); cv::imshow("DC1394", im); #endif timespec time1 = time0; timespec time2 = time0; timespec time3 = time0; timespec time4 = time0; timespec t_disconnect = time0; timespec t_notrigger = time0; #ifdef STANDALONE int s = -1; #endif std::string fn = ""; #ifdef LICKOMETER std::string fn_lick = ""; FILE* fp_lick = NULL; #endif int key = 0; int nloop = 0; while (true) { clock_gettime( CLOCK_REALTIME, &time1); #ifndef STANDALONE std::vector<char> data(BUFSIZE); int nrec = recv(s, &data[0], data.size(), 0); std::string datastr(data.begin(), data.end()); #endif nframes += get_image(im, mapping, false, s, fn, ncount); #ifndef STANDALONE // no update from blender in a long time, terminate process if (datastr.find("1")==std::string::npos) { if (connected) { t_disconnect = time1; connected = false; } else { if (tdiff(time1, t_disconnect) > TIMEOUT) { std::cout << "DC1394: Received termination signal" << std::endl; close(s); pthread_cancel(acq_thread); pthread_cancel(save_thread); return 0; } } } else { connected = true; } /* Explicit termination */ if (datastr.find("quit")!=std::string::npos) { std::cout << "DC1394: Game over signal." << std::endl; std::string sclose = "close"; while (send(s, sclose.c_str(), sclose.size(), 0) < 0) { perror("DC1394: client: send"); } close(s); pthread_cancel(acq_thread); pthread_cancel(save_thread); return 0; } // Stop recording if (datastr.find("stop") != std::string::npos && fn != "") { fn = ""; #ifdef LICKOMETER fn_lick = ""; if (fp_lick) { fclose(fp_lick); fp_lick = NULL; } #endif std::cout << "DC1394: Stopping video" << std::endl; connected = true; ncount = 0; } // Start recording if (datastr.find("avi") != std::string::npos && datastr.find("stop") == std::string::npos && fn == "") { std::size_t startpos = datastr.find("begin")+5; std::size_t endpos = datastr.find("end") - datastr.find("begin") - 5; fn = datastr.substr(startpos, endpos); fn = std::string(trunk) + "data/" + fn; #ifdef LICKOMETER fn_lick = fn + "_lick"; fp_lick = fopen(fn_lick.c_str(), "wb"); std::cout << "DC1394: Recording lick detection, writing to " << fn_lick << std::endl; #else boost::filesystem::path path(fn); boost::filesystem::path writepath(path); // Test whether dir exists: if (!boost::filesystem::exists(writepath)) { std::cout << "DC1394: Creating directory " << writepath << std::endl; boost::filesystem::create_directories(writepath); } fn += "/"; /* check save frame buffer */ std::size_t nfb = save_frame_buffer.size(); if (nfb) std::cerr << "DC1394: Frame buffer isn't empty!" << std::endl; std::cout << "DC1394: Starting video, writing to " << fn << std::endl; connected = true; ncount = 0; #endif } #endif // #nstandalone #ifdef USE_SDL if (SDL_PollEvent(&event)) { #ifdef STANDALONE /* Any of these event types will end the program */ if (event.type == SDL_QUIT || event.type == SDL_KEYDOWN || event.type == SDL_KEYUP) { std::cout << std::endl; std::cout << std::endl << "DC1394: Total number of frames was " << nframes << std::endl; std::cout << std::endl << "DC1394: Frame buffer: " << acq_frame_buffer.size() << " frames left" << std::endl; close(s); pthread_cancel(acq_thread); pthread_cancel(save_thread); return 0; } #endif // STANDALONE } surface->pixels = (void*)im.data; // SDL_CreateRGBSurfaceFrom((void*)IplImage(im).imageData, // IplImage(im).width, // IplImage(im).height, // IplImage(im).depth*IplImage(im).nChannels, // IplImage(im).widthStep, // 1, 1, 1, 0); screen = SDL_GetVideoSurface(); if(SDL_BlitSurface(surface, NULL, screen, NULL) == 0) SDL_UpdateRect(screen, 0, 0, 0, 0); #else // not SDL key = cv::waitKey(2); cv::imshow("DC1394", im); if (key == 1114155 || key == 65579 || key==43 /*+*/) { uint32_t gain = 0; err = dc1394_feature_get_value(gCamera.cam(), DC1394_FEATURE_GAIN, &gain); DC1394_ERR_CLN_RTN(err,cleanup_and_exit(gCamera),"Can't get gain"); if (gain < gCamera.get_maxgain()-10) { gain += 10; pthread_mutex_lock( &camera_mutex ); err = dc1394_feature_set_value(gCamera.cam(), DC1394_FEATURE_GAIN, gain); pthread_mutex_unlock( &camera_mutex ); std::cout << "DC1394: New gain value: " << gain << std::endl; DC1394_ERR_CLN_RTN(err,cleanup_and_exit(gCamera),"Can't set gain"); } } if (key == 1114207 || key == 45 /*-*/) { uint32_t gain = 0; err = dc1394_feature_get_value(gCamera.cam(), DC1394_FEATURE_GAIN, &gain); DC1394_ERR_CLN_RTN(err,cleanup_and_exit(gCamera),"Can't get gain"); if (gain > gCamera.get_mingain()+10) { gain -= 10; pthread_mutex_lock( &camera_mutex ); err = dc1394_feature_set_value(gCamera.cam(), DC1394_FEATURE_GAIN, gain); pthread_mutex_unlock( &camera_mutex ); DC1394_ERR_CLN_RTN(err,cleanup_and_exit(gCamera),"Can't set gain"); } } #endif // not SDL #ifdef LICKOMETER /* IS THIS ALL YOU NEED THEN? Lick detection */ /* Not required because the captured image is already gray cv::Mat gray = bgr2gray(im); */ gray = thresholding(im, LICK_FRAME_THRESHOLD); if (nloop != 0) { cv::absdiff(prevs, gray, thresh); double pixel_sum_thresh = cv::sum(thresh)[0]; double pixel_sum_gray = cv::sum(gray)[0]; if (pixel_sum_thresh > LICK_SUM_THRESHOLD) { std::cout << "DC1394: Lick" << std::endl; } if (fp_lick != NULL) { fwrite(&pixel_sum_thresh, sizeof(pixel_sum_thresh), 1, fp_lick); fwrite(&pixel_sum_gray, sizeof(pixel_sum_gray), 1, fp_lick); } } prevs = gray.clone(); nloop++; #endif #ifdef STANDALONE if (key == 1048689 || key == 113 /*q*/) { std::cout << "DC1394: Mean frame rate was " << nframes/total_dur << " fps" << std::endl; pthread_cancel(acq_thread); pthread_cancel(save_thread); return 0; } if (key == 1048691 /*s*/) { fn = ""; std::cout << "DC1394: Stopping video" << std::endl; ncount = 0; } if (key == 1048690 /*r*/) { fn = trunk + std::string("tmp/"); std::cout << "DC1394: Starting video, writing to " << fn << std::endl; ncount = 0; } #endif // #standalone clock_gettime( CLOCK_REALTIME, &time2); double loop_dur = tdiff(time2, time3); clock_gettime( CLOCK_REALTIME, &time3); double meanfps = 0; total_dur = tdiff(time3, time0); if (total_dur > 0) meanfps = nframes / total_dur; double currentfps = ret / loop_dur; std::cout << "DC1394: Current fps: " << std::setprecision(7) << currentfps << " Average fps: " << std::setprecision(7) << meanfps << "\r" << std::flush; #ifdef STANDALONE // std::cout << capture_dur << "\t" << target_dur << "\t" << rem << "\t" << loop_dur << std::endl; #endif } if (d) { dc1394_free(d); } #ifndef STANDALONE close(s); #endif return 0; }
int main(int argc, char *argv[]) { FILE* imagefile; dc1394camera_t *camera; unsigned int width, height; dc1394video_frame_t *frame=NULL; //dc1394featureset_t features; dc1394_t * d; dc1394camera_list_t * list; dc1394error_t err; int counter = 0; d = dc1394_new (); if (!d) return 1; err=dc1394_camera_enumerate (d, &list); DC1394_ERR_RTN(err,"Failed to enumerate cameras"); if (list->num == 0) { dc1394_log_error("No cameras found"); return 1; } camera = dc1394_camera_new (d, list->ids[0].guid); if (!camera) { dc1394_log_error("Failed to initialize camera with guid %llx", list->ids[0].guid); return 1; } dc1394_camera_free_list (list); printf("Using camera with GUID %" PRIx64 "\n", camera->guid); /*----------------------------------------------------------------------- * setup capture *-----------------------------------------------------------------------*/ err=dc1394_video_set_iso_speed(camera, DC1394_ISO_SPEED_400); DC1394_ERR_CLN_RTN(err,cleanup_and_exit(camera),"Could not set iso speed"); err=dc1394_video_set_mode(camera, DC1394_VIDEO_MODE_FORMAT7_7); DC1394_ERR_CLN_RTN(err,cleanup_and_exit(camera),"Could not set video mode\n"); err=dc1394_video_set_framerate(camera, DC1394_FRAMERATE_7_5); DC1394_ERR_CLN_RTN(err,cleanup_and_exit(camera),"Could not set framerate\n"); err=dc1394_capture_setup(camera,4, DC1394_CAPTURE_FLAGS_DEFAULT); DC1394_ERR_CLN_RTN(err,cleanup_and_exit(camera),"Could not setup camera-\nmake sure that the video mode and framerate are\nsupported by your camera\n"); /*----------------------------------------------------------------------- * have the camera start sending us data *-----------------------------------------------------------------------*/ err=dc1394_video_set_transmission(camera, DC1394_ON); DC1394_ERR_CLN_RTN(err,cleanup_and_exit(camera),"Could not start camera iso transmission\n"); for(;;){ /*----------------------------------------------------------------------- * capture one frame *-----------------------------------------------------------------------*/ err=dc1394_capture_dequeue(camera, DC1394_CAPTURE_POLICY_WAIT, &frame); DC1394_ERR_CLN_RTN(err,cleanup_and_exit(camera),"Could not capture a frame\n"); dc1394_get_image_size_from_video_mode(camera, DC1394_VIDEO_MODE_FORMAT7_7, &width, &height); cv::Mat img = cv::Mat(height, width, CV_8U, frame->image); err = dc1394_capture_enqueue(camera, frame); frame=NULL; cv::imshow("Image", img); printf("Frame %d\n", counter); counter++; cv::waitKey(10); } /*----------------------------------------------------------------------- * stop data transmission *-----------------------------------------------------------------------*/ err=dc1394_video_set_transmission(camera,DC1394_OFF); DC1394_ERR_CLN_RTN(err,cleanup_and_exit(camera),"Could not stop the camera?\n"); /*----------------------------------------------------------------------- * close camera *-----------------------------------------------------------------------*/ dc1394_video_set_transmission(camera, DC1394_OFF); dc1394_capture_stop(camera); dc1394_camera_free(camera); dc1394_free (d); return 0; }
int main(int argc, char *argv[]) { // parse configuration file // get input arguments OpenCvStereoConfig stereo_config; string config_file = ""; ConciseArgs parser(argc, argv); parser.add(config_file, "c", "config", "Configuration file containing camera GUIDs, etc.", true); parser.add(left_camera_mode, "l", "left-camera", "Calibrate just the left camera."); parser.add(right_camera_mode, "r", "right-camera", "Calibrate just the right camera."); parser.add(force_brightness, "b", "brightness", "set brightness to this level"); parser.add(force_exposure, "e", "exposure", "set exposure to this level"); parser.parse(); // parse the config file if (ParseConfigFile(config_file, &stereo_config) != true) { fprintf(stderr, "Failed to parse configuration file, quitting.\n"); return -1; } if (left_camera_mode || right_camera_mode) { stereo_mode = false; } uint64 guid = stereo_config.guidLeft; uint64 guid2 = stereo_config.guidRight; dc1394_t *d; dc1394camera_t *camera; dc1394error_t err; Mat frame_array_left[MAX_FRAMES]; Mat frame_array_right[MAX_FRAMES]; int numFrames = 0; // ----- cam 2 ----- dc1394_t *d2; dc1394camera_t *camera2; dc1394error_t err2; d = dc1394_new (); if (!d) g_critical("Could not create dc1394 context"); d2 = dc1394_new (); if (!d2) g_critical("Could not create dc1394 context for camera 2"); camera = dc1394_camera_new (d, guid); if (!camera) g_critical("Could not create dc1394 camera"); camera2 = dc1394_camera_new (d2, guid2); if (!camera2) g_critical("Could not create dc1394 camera for camera 2"); // setup err = setup_gray_capture(camera, DC1394_VIDEO_MODE_FORMAT7_1); DC1394_ERR_CLN_RTN(err, cleanup_and_exit(camera), "Could not setup camera"); err2 = setup_gray_capture(camera2, DC1394_VIDEO_MODE_FORMAT7_1); DC1394_ERR_CLN_RTN(err2, cleanup_and_exit(camera2), "Could not setup camera number 2"); // enable auto-exposure // turn on the auto exposure feature err = dc1394_feature_set_power(camera, DC1394_FEATURE_EXPOSURE, DC1394_ON); DC1394_ERR_RTN(err,"Could not turn on the exposure feature"); err = dc1394_feature_set_mode(camera, DC1394_FEATURE_EXPOSURE, DC1394_FEATURE_MODE_ONE_PUSH_AUTO); DC1394_ERR_RTN(err,"Could not turn on Auto-exposure"); // enable auto-exposure // turn on the auto exposure feature err = dc1394_feature_set_power(camera2, DC1394_FEATURE_EXPOSURE, DC1394_ON); DC1394_ERR_RTN(err,"Could not turn on the exposure feature for cam2"); err = dc1394_feature_set_mode(camera2, DC1394_FEATURE_EXPOSURE, DC1394_FEATURE_MODE_ONE_PUSH_AUTO); DC1394_ERR_RTN(err,"Could not turn on Auto-exposure for cam2"); // enable camera err = dc1394_video_set_transmission(camera, DC1394_ON); DC1394_ERR_CLN_RTN(err, cleanup_and_exit(camera), "Could not start camera iso transmission"); err2 = dc1394_video_set_transmission(camera2, DC1394_ON); DC1394_ERR_CLN_RTN(err2, cleanup_and_exit(camera2), "Could not start camera iso transmission for camera number 2"); if (left_camera_mode || stereo_mode) { InitBrightnessSettings(camera, camera2); MatchBrightnessSettings(camera, camera2, true, force_brightness, force_exposure); } else { // use the right camera as the master for brightness // since we're calibrating that one InitBrightnessSettings(camera2, camera); MatchBrightnessSettings(camera2, camera, true); } // make opencv windows if (left_camera_mode || stereo_mode) { namedWindow("Input Left", CV_WINDOW_AUTOSIZE); moveWindow("Input Left", 100, 100); } if (right_camera_mode || stereo_mode) { namedWindow("Input Right", CV_WINDOW_AUTOSIZE); moveWindow("Input Right", 478, 100); } CvSize size; Mat cornersL, cornersR; int i; while (numFrames < MAX_FRAMES) { Mat chessL, chessR; // each loop dump a bunch of frames to clear the buffer MatchBrightnessSettings(camera, camera2, true, force_brightness, force_exposure); for (i=0;i<10;i++) { if (left_camera_mode || stereo_mode) { chessL = GetFrameFormat7(camera); } if (right_camera_mode || stereo_mode) { chessR = GetFrameFormat7(camera2); } } // copy the images for drawing/display size = chessL.size(); Mat chessLc; chessLc.create(size, CV_32FC3); Mat chessRc; chessRc.create(size, CV_32FC3); // attempt checkerboard matching bool foundPattern = true; // set to true so we can do an OR // later if we're only using one // camera if (left_camera_mode || stereo_mode) { foundPattern = findChessboardCorners(chessL, Size(CHESS_X, CHESS_Y), cornersL); } if (right_camera_mode || stereo_mode) { foundPattern = foundPattern & findChessboardCorners(chessR, Size(CHESS_X, CHESS_Y), cornersR); } if (left_camera_mode || stereo_mode) { cvtColor( chessL, chessLc, CV_GRAY2BGR ); drawChessboardCorners(chessLc, Size(CHESS_X, CHESS_Y), cornersL, foundPattern); imshow("Input Left", chessLc); } if (right_camera_mode || stereo_mode) { cvtColor(chessR, chessRc, CV_GRAY2BGR); drawChessboardCorners(chessRc, Size(CHESS_X, CHESS_Y), cornersR, foundPattern); imshow("Input Right", chessRc); } // key codes: // page up: 654365 // page down: 65366 // b: 98 char key = waitKey(); //printf("%d\n", (int)key); if (key == 98) { break; } else if (key == 86){ if (foundPattern) { // this was a good one -- save it frame_array_left[numFrames] = chessL; frame_array_right[numFrames] = chessR; // give the user some guidence on the number // of frames they should be using if (stereo_mode) { printf("Saved frame %d / about 10\n", numFrames); } else { printf("Saved frame %d / about 20-30\n", numFrames); } numFrames ++; } else { printf("Not saving frame since did not find a checkboard.\n"); } } else if (key == 'W') { force_brightness +=20; cout << "Brightness: " << force_brightness << "\n"; } else if (key == 'w') { force_brightness -=20; cout << "Brightness: " << force_brightness << "\n"; } else if (key == 'E') { force_exposure +=20; cout << "Exposure: " << force_exposure << "\n"; } else if (key == 'e') { force_exposure -=20; cout << "Exposure: " << force_exposure << "\n"; } } printf("\n\n"); // clear out the calibration directory printf("Deleting old images...\nrm calibrationImages/*.ppm\n"); int retval = system("rm calibrationImages/*.ppm"); if (retval != 0) { printf("Warning: Deleting images may have failed.\n"); } printf("done.\n"); char filename[1000]; for (i=0;i<numFrames;i++) { if (left_camera_mode || stereo_mode) { sprintf(filename, "calibrationImages/cam1-%05d.ppm", i+1); imwrite(filename, frame_array_left[i]); } if (right_camera_mode || stereo_mode) { sprintf(filename, "calibrationImages/cam2-%05d.ppm", i+1); imwrite(filename, frame_array_right[i]); } printf("Writing frame %d\n", i); } printf("\n\n"); destroyWindow("Input Left"); destroyWindow("Input Right"); // stop data transmission err = dc1394_video_set_transmission(camera, DC1394_OFF); DC1394_ERR_CLN_RTN(err,cleanup_and_exit(camera),"Could not stop the camera"); err2 = dc1394_video_set_transmission(camera2, DC1394_OFF); DC1394_ERR_CLN_RTN(err2,cleanup_and_exit(camera2),"Could not stop the camera 2"); // close camera cleanup_and_exit(camera); cleanup_and_exit(camera2); dc1394_free (d); dc1394_free (d2); return 0; }
void ofxLibdc::stopLibdcContext() { libdcCameras--; if(libdcCameras == 0) dc1394_free(libdcContext); }
int main(int argc, char *argv[]) { fitsfile *fptr; long fpixel=1, nelements, naxes[3]; dc1394camera_t *camera; int grab_n_frames; struct timeval start_time, end_time; time_t start_sec, end_sec; suseconds_t start_usec, end_usec; float elapsed_time, fps; int i, status; unsigned int min_bytes, max_bytes, max_height, max_width; unsigned int actual_bytes; uint64_t total_bytes = 0; unsigned int width, height; dc1394video_frame_t *frame=NULL; dc1394_t * d; dc1394camera_list_t * list; dc1394error_t err; char *filename; grab_n_frames = atoi(argv[1]); filename = argv[2]; width = 320; height = 240; naxes[0] = width; naxes[1] = height; naxes[2] = grab_n_frames; nelements = naxes[0]*naxes[1]*naxes[2]; stderr = freopen("grab_cube.log", "w", stderr); d = dc1394_new (); if (!d) return 1; err=dc1394_camera_enumerate (d, &list); DC1394_ERR_RTN(err,"Failed to enumerate cameras"); if (list->num == 0) { dc1394_log_error("No cameras found"); return 1; } camera = dc1394_camera_new (d, list->ids[0].guid); if (!camera) { dc1394_log_error("Failed to initialize camera with guid %"PRIx64, list->ids[0].guid); return 1; } dc1394_camera_free_list (list); printf("Using camera with GUID %"PRIx64"\n", camera->guid); /*----------------------------------------------------------------------- * setup capture for format 7 *-----------------------------------------------------------------------*/ // err=dc1394_video_set_operation_mode(camera, DC1394_OPERATION_MODE_1394B); // DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot operate at 1394B"); // libdc1394 doesn't work well with firewire800 yet so set to legacy 400 mode dc1394_video_set_iso_speed(camera, DC1394_ISO_SPEED_400); // configure camera for format7 err = dc1394_video_set_mode(camera, DC1394_VIDEO_MODE_FORMAT7_1); DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot choose format7_0"); printf ("I: video mode is format7_0\n"); err = dc1394_format7_get_max_image_size (camera, DC1394_VIDEO_MODE_FORMAT7_1, &max_width, &max_height); DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot get max image size for format7_0"); printf ("I: max image size is: height = %d, width = %d\n", max_height, max_width); printf ("I: current image size is: height = %d, width = %d\n", height, width); err = dc1394_format7_set_roi (camera, DC1394_VIDEO_MODE_FORMAT7_1, DC1394_COLOR_CODING_MONO16, // not sure why RAW8/16 don't work DC1394_USE_MAX_AVAIL, 0, 0, // left, top width, height); // width, height DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set roi"); printf ("I: roi is (0, 0) - (%d, %d)\n", width, height); // set the frame rate to absolute value in frames/sec err = dc1394_feature_set_mode(camera, DC1394_FEATURE_FRAME_RATE, DC1394_FEATURE_MODE_MANUAL); DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set framerate to manual"); err = dc1394_feature_set_absolute_control(camera, DC1394_FEATURE_FRAME_RATE, DC1394_TRUE); DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set framerate to absolute mode"); err = dc1394_feature_set_absolute_value(camera, DC1394_FEATURE_FRAME_RATE, 330.0); DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set framerate"); printf("I: framerate is %f fps\n", 330.0); // set the shutter speed to absolute value in seconds err = dc1394_feature_set_mode(camera, DC1394_FEATURE_SHUTTER, DC1394_FEATURE_MODE_MANUAL); DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set shutter to manual"); err = dc1394_feature_set_absolute_control(camera, DC1394_FEATURE_SHUTTER, DC1394_TRUE); DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set shutter to absolute mode"); err = dc1394_feature_set_absolute_value(camera, DC1394_FEATURE_SHUTTER, 3.0e-3); DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set shutter"); printf("I: exptime is %f s\n", 3.0e-3); // set gain manually. use relative value here in range 48 to 730. err = dc1394_feature_set_mode(camera, DC1394_FEATURE_GAIN, DC1394_FEATURE_MODE_MANUAL); DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set gain to manual"); err = dc1394_feature_set_value(camera, DC1394_FEATURE_GAIN, 400); DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set gain"); printf ("I: gain is %d\n", 400); // set brightness manually. use relative value in range 0 to 1023. err = dc1394_feature_set_mode(camera, DC1394_FEATURE_BRIGHTNESS, DC1394_FEATURE_MODE_MANUAL); DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set brightness to manual"); err = dc1394_feature_set_value(camera, DC1394_FEATURE_BRIGHTNESS, 100); DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set brightness"); printf ("I: brightness is %d\n", 100); err = dc1394_format7_get_total_bytes (camera, DC1394_VIDEO_MODE_FORMAT7_1, &total_bytes); DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot get total bytes"); printf ("I: total bytes is %"PRIu64" before SFF enabled\n", total_bytes); err=dc1394_capture_setup(camera, 16, DC1394_CAPTURE_FLAGS_DEFAULT); DC1394_ERR_CLN_RTN(err, dc1394_camera_free(camera), "Error capturing"); /*----------------------------------------------------------------------- * print allowed and used packet size *-----------------------------------------------------------------------*/ err=dc1394_format7_get_packet_parameters(camera, DC1394_VIDEO_MODE_FORMAT7_1, &min_bytes, &max_bytes); DC1394_ERR_RTN(err,"Packet para inq error"); printf( "camera reports allowed packet size from %d - %d bytes\n", min_bytes, max_bytes); err=dc1394_format7_get_packet_size(camera, DC1394_VIDEO_MODE_FORMAT7_1, &actual_bytes); DC1394_ERR_RTN(err,"dc1394_format7_get_packet_size error"); printf( "camera reports actual packet size = %d bytes\n", actual_bytes); err=dc1394_format7_get_total_bytes(camera, DC1394_VIDEO_MODE_FORMAT7_1, &total_bytes); DC1394_ERR_RTN(err,"dc1394_query_format7_total_bytes error"); printf( "camera reports total bytes per frame = %"PRId64" bytes\n", total_bytes); /*----------------------------------------------------------------------- * have the camera start sending us data *-----------------------------------------------------------------------*/ err=dc1394_video_set_transmission(camera,DC1394_ON); if (err!=DC1394_SUCCESS) { dc1394_log_error("unable to start camera iso transmission"); dc1394_capture_stop(camera); dc1394_camera_free(camera); exit(1); } // set up FITS image and capture fits_create_file(&fptr, filename, &status); dc1394_get_image_size_from_video_mode(camera, DC1394_VIDEO_MODE_FORMAT7_1, &width, &height); fits_create_img(fptr, USHORT_IMG, 3, naxes, &status); /*----------------------------------------------------------------------- * capture frames and measure the time for this operation *-----------------------------------------------------------------------*/ gettimeofday(&start_time, NULL); printf("Start capture:\n"); for( i = 0; i < grab_n_frames; ++i) { /*----------------------------------------------------------------------- * capture one frame *-----------------------------------------------------------------------*/ err=dc1394_capture_dequeue(camera, DC1394_CAPTURE_POLICY_WAIT, &frame); if (err!=DC1394_SUCCESS) { dc1394_log_error("unable to capture"); dc1394_capture_stop(camera); dc1394_camera_free(camera); exit(1); } // attempt to preallocate image array and write to memory before dumping to disk. // turns out to be slow due to large size of images. cfitsio buffering is far // more efficient. //memcpy(im_buffer+2*i*naxes[0]*naxes[1], //frame->image-1, //naxes[0]*naxes[1]*sizeof(short)); // just writing each frame to the FITS file goes pretty fast fits_write_img(fptr, TUSHORT, fpixel+i*naxes[0]*naxes[1], naxes[0]*naxes[1], frame->image-1, &status); // release buffer dc1394_capture_enqueue(camera,frame); } gettimeofday(&end_time, NULL); printf("End capture.\n"); /*----------------------------------------------------------------------- * stop data transmission *-----------------------------------------------------------------------*/ start_sec = start_time.tv_sec; start_usec = start_time.tv_usec; end_sec = end_time.tv_sec; end_usec = end_time.tv_usec; elapsed_time = (float)((end_sec + 1.0e-6*end_usec) - (start_sec + 1.0e-6*start_usec)); fps = grab_n_frames/elapsed_time; printf("Elapsed time = %g seconds.\n", elapsed_time); printf("Framerate = %g fps.\n", fps); err=dc1394_video_set_transmission(camera,DC1394_OFF); DC1394_ERR_RTN(err,"couldn't stop the camera?"); /*----------------------------------------------------------------------- * save FITS image to disk *-----------------------------------------------------------------------*/ //fits_write_img(fptr, TUSHORT, fpixel, naxes[0]*naxes[1]*naxes[2], im_buffer, &status); fits_close_file(fptr, &status); fits_report_error(stderr, status); //free(im_buffer); printf("wrote: %s\n", filename); printf("Image is %d bits/pixel.\n", frame->data_depth); /*----------------------------------------------------------------------- * close camera, cleanup *-----------------------------------------------------------------------*/ dc1394_capture_stop(camera); dc1394_video_set_transmission(camera, DC1394_OFF); dc1394_camera_free(camera); dc1394_free (d); return 0; }
int main(int argc, char *argv[]) { fitsfile *fptr; long fpixel=1, nelements, naxes[2]; dc1394camera_t *camera; int grab_n_frames; struct timeval start_time, end_time; int i, j, status; unsigned int max_height, max_width; uint64_t total_bytes = 0; unsigned int width, height; dc1394video_frame_t *frame=NULL; dc1394_t * d; dc1394camera_list_t * list; dc1394error_t err; char *filename; unsigned char *buffer; float *average; grab_n_frames = atoi(argv[1]); filename = argv[2]; status = 0; width = 320; height = 240; naxes[0] = width; naxes[1] = height; nelements = naxes[0]*naxes[1]; stderr = freopen("grab_cube.log", "w", stderr); d = dc1394_new(); if (!d) return 1; err = dc1394_camera_enumerate(d, &list); DC1394_ERR_RTN(err, "Failed to enumerate cameras."); if (list->num == 0) { dc1394_log_error("No cameras found."); return 1; } camera = dc1394_camera_new (d, list->ids[0].guid); if (!camera) { dc1394_log_error("Failed to initialize camera with guid %"PRIx64, list->ids[0].guid); return 1; } dc1394_camera_free_list(list); printf("Using camera with GUID %"PRIx64"\n", camera->guid); /*----------------------------------------------------------------------- * setup capture for format 7 *-----------------------------------------------------------------------*/ // err=dc1394_video_set_operation_mode(camera, DC1394_OPERATION_MODE_1394B); // DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot operate at 1394B"); // libdc1394 doesn't work well with firewire800 yet so set to legacy 400 mode dc1394_video_set_iso_speed(camera, DC1394_ISO_SPEED_400); // configure camera for format7 err = dc1394_video_set_mode(camera, DC1394_VIDEO_MODE_FORMAT7_1); DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot choose format7_0"); err = dc1394_format7_get_max_image_size (camera, DC1394_VIDEO_MODE_FORMAT7_1, &max_width, &max_height); DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot get max image size for format7_0"); err = dc1394_format7_set_roi (camera, DC1394_VIDEO_MODE_FORMAT7_1, DC1394_COLOR_CODING_MONO8, // not sure why RAW8/16 don't work DC1394_USE_MAX_AVAIL, 0, 0, // left, top width, height); // width, height DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set roi"); // set the frame rate to absolute value in frames/sec err = dc1394_feature_set_mode(camera, DC1394_FEATURE_FRAME_RATE, DC1394_FEATURE_MODE_MANUAL); DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set framerate to manual"); err = dc1394_feature_set_absolute_control(camera, DC1394_FEATURE_FRAME_RATE, DC1394_TRUE); DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set framerate to absolute mode"); err = dc1394_feature_set_absolute_value(camera, DC1394_FEATURE_FRAME_RATE, 100.0); DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set framerate"); // set the shutter speed to absolute value in seconds err = dc1394_feature_set_mode(camera, DC1394_FEATURE_SHUTTER, DC1394_FEATURE_MODE_MANUAL); DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set shutter to manual"); err = dc1394_feature_set_absolute_control(camera, DC1394_FEATURE_SHUTTER, DC1394_TRUE); DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set shutter to absolute mode"); err = dc1394_feature_set_absolute_value(camera, DC1394_FEATURE_SHUTTER, 1.0e-2); DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set shutter"); // set gain manually. use relative value here in range 48 to 730. err = dc1394_feature_set_mode(camera, DC1394_FEATURE_GAIN, DC1394_FEATURE_MODE_MANUAL); DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set gain to manual"); err = dc1394_feature_set_value(camera, DC1394_FEATURE_GAIN, 200); DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set gain"); // set brightness manually. use relative value in range 0 to 1023. err = dc1394_feature_set_mode(camera, DC1394_FEATURE_BRIGHTNESS, DC1394_FEATURE_MODE_MANUAL); DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set brightness to manual"); err = dc1394_feature_set_value(camera, DC1394_FEATURE_BRIGHTNESS, 50); DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set brightness"); err = dc1394_format7_get_total_bytes (camera, DC1394_VIDEO_MODE_FORMAT7_1, &total_bytes); DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot get total bytes"); // err = dc1394_feature_set_value (camera, DC1394_FEATURE_GAIN, 24); //DC1394_ERR_CLN_RTN(err, dc1394_camera_free(camera), "Error setting gain"); err = dc1394_capture_setup(camera, 16, DC1394_CAPTURE_FLAGS_DEFAULT); DC1394_ERR_CLN_RTN(err, dc1394_camera_free(camera), "Error capturing"); /*----------------------------------------------------------------------- * have the camera start sending us data *-----------------------------------------------------------------------*/ err = dc1394_video_set_transmission(camera, DC1394_ON); if (err != DC1394_SUCCESS) { dc1394_log_error("unable to start camera iso transmission"); dc1394_capture_stop(camera); dc1394_camera_free(camera); exit(1); } /* allocate the buffers */ if (!(buffer = malloc(nelements*sizeof(char)))) { printf("Couldn't Allocate Image Buffer\n"); exit(-1); } if (!(average = calloc(nelements, sizeof(float)))) { printf("Couldn't Allocate Average Image Buffer\n"); exit(-1); } // set up FITS image and capture fits_create_file(&fptr, filename, &status); dc1394_get_image_size_from_video_mode(camera, DC1394_VIDEO_MODE_FORMAT7_1, &width, &height); fits_create_img(fptr, FLOAT_IMG, 2, naxes, &status); /*----------------------------------------------------------------------- * capture frames and measure the time for this operation *-----------------------------------------------------------------------*/ gettimeofday(&start_time, NULL); printf("Start capture:\n"); for (i=0; i<grab_n_frames; ++i) { /*----------------------------------------------------------------------- * capture one frame *-----------------------------------------------------------------------*/ err = dc1394_capture_dequeue(camera, DC1394_CAPTURE_POLICY_WAIT, &frame); if (err != DC1394_SUCCESS) { dc1394_log_error("unable to capture"); dc1394_capture_stop(camera); dc1394_camera_free(camera); exit(1); } memcpy(buffer, frame->image, nelements*sizeof(char)); // release buffer dc1394_capture_enqueue(camera,frame); for (j=0; j<nelements; j++) { average[j] += (1.0/grab_n_frames)*(buffer[j]); } } gettimeofday(&end_time, NULL); printf("End capture.\n"); /*----------------------------------------------------------------------- * stop data transmission *-----------------------------------------------------------------------*/ err = dc1394_video_set_transmission(camera, DC1394_OFF); DC1394_ERR_RTN(err,"couldn't stop the camera?"); /*----------------------------------------------------------------------- * save FITS image to disk *-----------------------------------------------------------------------*/ fits_write_img(fptr, TFLOAT, fpixel, naxes[0]*naxes[1], average, &status); fits_close_file(fptr, &status); fits_report_error(stderr, status); free(buffer); free(average); printf("wrote: %s\n", filename); printf("Readout is %d bits/pixel.\n", frame->data_depth); /*----------------------------------------------------------------------- * close camera, cleanup *-----------------------------------------------------------------------*/ dc1394_capture_stop(camera); dc1394_video_set_transmission(camera, DC1394_OFF); dc1394_camera_free(camera); dc1394_free(d); return 0; }