void Camera::printStatus() {

    float val;
    std::cout << "CAMERA STATUS: " << std::endl;

    dc1394_feature_get_absolute_value(camera, DC1394_FEATURE_FRAME_RATE, &val);
    std::cout << "  framerate : " << val << " fps" << std::endl;

    uint32_t uval32;
    dc1394_feature_get_value(camera, DC1394_FEATURE_SHUTTER, &uval32);
    std::cout << "  shutter  : " << uval32 << "\n" << std::endl;

    uint64_t pio_dir_addr = PIO_DIRECTION;
    pio_dir_reg32 pio_dir_reg = readRegisterContent(pio_dir_addr);
    std::cout << "  gpio 0 as output configured : " << pio_dir_reg.io0_mode << std::endl;
    std::cout << "  gpio 1 as output configured : " << pio_dir_reg.io1_mode << std::endl;
    std::cout << "  gpio 2 as output configured : " << pio_dir_reg.io2_mode << std::endl;
    std::cout << "  gpio 3 as output configured : " << pio_dir_reg.io3_mode << "\n" << std::endl;

    uint64_t strobe_ctrl_inq_addr = STROBE_CTRL_INQ;
    strobe_ctrl_inq_reg32 strobe_ctrl_ing_reg = readRegisterContent(strobe_ctrl_inq_addr);
    std::cout << "  strobe 0 present : " << strobe_ctrl_ing_reg.strobe_0_inq << std::endl;
    std::cout << "  strobe 1 present : " << strobe_ctrl_ing_reg.strobe_1_inq << std::endl;
    std::cout << "  strobe 2 present : " << strobe_ctrl_ing_reg.strobe_2_inq << std::endl;
    std::cout << "  strobe 3 present : " << strobe_ctrl_ing_reg.strobe_3_inq << "\n" << std::endl;

    uint64_t strobe_0_inq_addr = STROBE_0_INQ;
    strobe_inq_reg32 strobe_0_inq_reg = readRegisterContent(strobe_0_inq_addr);
    std::cout << "  strobe_0_inq presence_inq    : " << strobe_0_inq_reg.presence_inq << std::endl;
    std::cout << "  strobe_0_inq readout_inq     : " << strobe_0_inq_reg.readout_inq << std::endl;
    std::cout << "  strobe_0_inq on_off_inq      : " << strobe_0_inq_reg.on_off_inq << std::endl;
    std::cout << "  strobe_0_inq polarity_inq    : " << strobe_0_inq_reg.polarity_inq << std::endl;
    std::cout << "  strobe_0_inq min_value       : " << strobe_0_inq_reg.min_value << std::endl;
    std::cout << "  strobe_0_inq max_value       : " << strobe_0_inq_reg.max_value << "\n" << std::endl;

    uint64_t strobe_1_inq_addr = STROBE_1_INQ;
    strobe_inq_reg32 strobe_1_inq_reg = readRegisterContent(strobe_1_inq_addr);
    std::cout << "  strobe_1_inq presence_inq    : " << strobe_1_inq_reg.presence_inq << std::endl;
    std::cout << "  strobe_1_inq readout_inq     : " << strobe_1_inq_reg.readout_inq << std::endl;
    std::cout << "  strobe_1_inq on_off_inq      : " << strobe_1_inq_reg.on_off_inq << std::endl;
    std::cout << "  strobe_1_inq polarity_inq    : " << strobe_1_inq_reg.polarity_inq << std::endl;
    std::cout << "  strobe_1_inq min_value       : " << strobe_1_inq_reg.min_value << std::endl;
    std::cout << "  strobe_1_inq max_value       : " << strobe_1_inq_reg.max_value << "\n" << std::endl;

    uint64_t strobe_0_cnt_addr = STROBE_0_CNT;
    strobe_cnt_reg32 strobe_0_cnt_reg = readRegisterContent(strobe_0_cnt_addr);
    std::cout << "  strobe_0_cnt presence_inq    : " << strobe_0_cnt_reg.presence_inq << std::endl;
    std::cout << "  strobe_0_cnt on_off          : " << strobe_0_cnt_reg.on_off << std::endl;
    std::cout << "  strobe_0_cnt signal_polarity : " << strobe_0_cnt_reg.signal_polarity << std::endl;
    std::cout << "  strobe_0_cnt delay_value     : " << strobe_0_cnt_reg.delay_value << std::endl;
    std::cout << "  strobe_0_cnt duration_value  : " << strobe_0_cnt_reg.duration_value << "\n" << std::endl;

    uint64_t strobe_1_cnt_addr = STROBE_1_CNT;
    strobe_cnt_reg32 strobe_1_cnt_reg = readRegisterContent(strobe_1_cnt_addr);
    std::cout << "  strobe_1_cnt presence_inq    : " << strobe_1_cnt_reg.presence_inq << std::endl;
    std::cout << "  strobe_1_cnt on_off          : " << strobe_1_cnt_reg.on_off << std::endl;
    std::cout << "  strobe_1_cnt signal_polarity : " << strobe_1_cnt_reg.signal_polarity << std::endl;
    std::cout << "  strobe_1_cnt delay_value     : " << strobe_1_cnt_reg.delay_value << std::endl;
    std::cout << "  strobe_1_cnt duration_value  : " << strobe_1_cnt_reg.duration_value << std::endl;
}
示例#2
0
unsigned int getSetting(dc1394camera_t *cam, dc1394feature_t setting){
    dc1394error_t err;
    unsigned int value;
    err = dc1394_feature_get_value(cam, setting, &value);
    if(err != DC1394_SUCCESS)
        cerr << "Could not get setting!" << endl;

    return value;
}
示例#3
0
文件: libFW.c 项目: Linnara/yFirewire
int _getOffset(){
  if(camera==NULL) {
    dc1394_log_error("Camera is not initialised");
    return -1;
  }

  uint32_t yOffset = 0;
  err=dc1394_feature_get_value(camera, DC1394_FEATURE_BRIGHTNESS, &yOffset);
  DC1394_ERR_CLN_RTN(err,_unsetupCam(),"Could not get gain");

  return yOffset;
}
示例#4
0
文件: libFW.c 项目: Linnara/yFirewire
int _getGain(){
  if(camera==NULL) {
    dc1394_log_error("Camera is not initialised");
    return -1;
  }

  uint32_t gain = 0;
  err=dc1394_feature_get_value(camera, DC1394_FEATURE_GAIN, &gain);
  DC1394_ERR_CLN_RTN(err,_unsetupCam(),"Could not get gain");

  return gain;
}
示例#5
0
文件: libFW.c 项目: Linnara/yFirewire
int _getExposure(){
  if(camera==NULL) {
    dc1394_log_error("Camera is not initialised");
    return -1;
  }

  uint32_t expo = 0; 
  err=dc1394_feature_get_value(camera, DC1394_FEATURE_EXPOSURE, &expo);
  DC1394_ERR_CLN_RTN(err,_unsetupCam(),"Could not get shutter");
	
  return expo;
}
示例#6
0
double CvCaptureCAM_DC1394_v2_CPP::getProperty(int propId)
{
    switch (propId)
    {
    case CV_CAP_PROP_FRAME_WIDTH:
        return frameWidth ? frameWidth : frameHeight*4 / 3;
    case CV_CAP_PROP_FRAME_HEIGHT:
        return frameHeight ? frameHeight : frameWidth*3 / 4;
    case CV_CAP_PROP_FPS:
        return fps;
    case CV_CAP_PROP_RECTIFICATION:
        return rectify ? 1 : 0;
    case CV_CAP_PROP_WHITE_BALANCE_BLUE_U:
        if (dc1394_feature_whitebalance_get_value(dcCam,
                &feature_set.feature[DC1394_FEATURE_WHITE_BALANCE-DC1394_FEATURE_MIN].BU_value,
                &feature_set.feature[DC1394_FEATURE_WHITE_BALANCE-DC1394_FEATURE_MIN].RV_value) == DC1394_SUCCESS)
            return feature_set.feature[DC1394_FEATURE_WHITE_BALANCE-DC1394_FEATURE_MIN].BU_value;
        break;
    case CV_CAP_PROP_WHITE_BALANCE_RED_V:
        if (dc1394_feature_whitebalance_get_value(dcCam,
                &feature_set.feature[DC1394_FEATURE_WHITE_BALANCE-DC1394_FEATURE_MIN].BU_value,
                &feature_set.feature[DC1394_FEATURE_WHITE_BALANCE-DC1394_FEATURE_MIN].RV_value) == DC1394_SUCCESS)
            return feature_set.feature[DC1394_FEATURE_WHITE_BALANCE-DC1394_FEATURE_MIN].RV_value;
        break;
    case CV_CAP_PROP_GUID:
        //the least 32 bits are enough to identify the camera
        return (double) (guid & 0x00000000FFFFFFFF);
        break;
    case CV_CAP_PROP_MODE:
        return (double) userMode;
        break;
    case CV_CAP_PROP_ISO_SPEED:
        return (double) isoSpeed;
    default:
        if (propId<CV_CAP_PROP_MAX_DC1394 && dc1394properties[propId]!=-1
                && dcCam)
            //&& feature_set.feature[dc1394properties[propId]-DC1394_FEATURE_MIN].on_off_capable)
            if (dc1394_feature_get_value(dcCam,(dc1394feature_t)dc1394properties[propId],
                                         &feature_set.feature[dc1394properties[propId]-DC1394_FEATURE_MIN].value) == DC1394_SUCCESS)
                return feature_set.feature[dc1394properties[propId]-DC1394_FEATURE_MIN].value;
    }
    return -1; // the value of the feature can be 0, so returning 0 as an error is wrong
}
示例#7
0
float Libdc1394SequenceGrabber::getFeature(dc1394feature_t feature)
{
	if (!_camera) return -1;
	
	
	uint32_t min, max, value;
	dc1394feature_mode_t current_mode;
	dc1394bool_t is_present;
	
	dc1394_feature_is_present(_camera, feature, &is_present);
	if (is_present == DC1394_FALSE) 
		return -1.0; 
	
	dc1394_feature_get_mode(_camera, feature, &current_mode);
	
	if (current_mode == DC1394_FEATURE_MODE_AUTO) 
		return -1.0;
		
	dc1394_feature_get_boundaries(_camera, feature, &min, &max);
	dc1394_feature_get_value(_camera, feature, &value);
	
	return 1000.0 * (value-min) / static_cast<float>(max-min); 
}
示例#8
0
int main (int argc, char **argv)
{

    
    char* ntry = (char*)"";
    if (argc > 1) {
        ntry = argv[1];
    }

    double fps = FPS;
    double target_dur = 1.0/fps;
    double tol = 1.0e-3;
    double total_dur = 0.0;

    dc1394_t * d = dc1394_new(); 
    if (!d) {
        return 1;
    }
    dc1394camera_list_t * list;
    dc1394error_t err = dc1394_camera_enumerate (d, &list);
    DC1394_ERR_RTN(err,"Failed to enumerate cameras");
    if (list->num == 0) {                                                  /* Verify that we have at least one camera */
        dc1394_log_error("No cameras found");
        return 1;
    }

    gCamera.init(d, list->ids[0].guid);
    if (!gCamera.cam()) {
        dc1394_log_error("Failed to initialize camera with guid %ld", list->ids[0].guid);
        dc1394_camera_free_list (list);

        return 1;
    }
    dc1394_camera_free_list (list);

    /*-----------------------------------------------------------------------
     *  have the camera start sending us data
     *-----------------------------------------------------------------------*/
    err = gCamera.start_transmission();
    DC1394_ERR_CLN_RTN(err,cleanup_and_exit(gCamera),"Could not start camera iso transmission");

    
    /*-----------------------------------------------------------------------
     *  capture one frame
     *-----------------------------------------------------------------------*/
    uint32_t width = 0;
    uint32_t height = 0;
    gCamera.get_image_size(&width, &height);
    cv::Mat mapping = cv::getRotationMatrix2D(cv::Point2f(width/2.0, height/2.0), 180.0, 1.0);

#ifdef USE_SDL
    static char *var = (char*)"SDL_VIDEO_WINDOW_POS=\"1280,480\"";
    int ret = putenv(var);
    
    if (SDL_Init(SDL_INIT_VIDEO) != 0) {
        std::cerr << "DC1394: Unable to initialize SDL: " <<  SDL_GetError() << std::endl;
        return 1;
    }
    atexit(SDL_Quit);
    SDL_Surface *screen;
    screen = SDL_SetVideoMode(width, height, 24, SDL_HWSURFACE);
    if (screen == NULL) {
        std::cerr << "DC1394: Unable to set SDL video mode:" << SDL_GetError() << std::endl;
    }
    SDL_Event event;
#endif

#ifndef LICKOMETER    
    pthread_t save_thread, acq_thread;
    pthread_create( &save_thread, NULL, &thread_save_image, NULL);
#endif

    pthread_t save_thread, acq_thread;
    pthread_create( &acq_thread, NULL, &thread_acq_image, NULL);

    timespec t_sleep, t_rem;
    t_sleep.tv_sec = 0;
    t_sleep.tv_nsec = 1000;
    
#ifndef STANDALONE
    int s;
    if ((s = socket(SOCKTYPE, SOCK_STREAM, 0)) < 0) {
        perror("DC1394: client: socket");
        cleanup_and_exit(gCamera);
        return 1;
    }

    /*
     * Create the address we will be connecting to.
     */
#ifndef INET
    sockaddr_un sa;
    sa.sun_family = AF_UNIX;

    std::ostringstream tmpfn;
    tmpfn << "fwsocket" << ntry;
    std::cout << "DC1394: socket name " << tmpfn.str() << std::endl;
    
    int nameLen = strlen(tmpfn.str().c_str());
    if (nameLen >= (int) sizeof(sa.sun_path) -1) { /* too long? */
        cleanup_and_exit(gCamera);
        return 1;
    }
    
    sa.sun_path[0] = '\0';  /* abstract namespace */
    strcpy(sa.sun_path+1, tmpfn.str().c_str());
    int len = 1 + nameLen + offsetof(struct sockaddr_un, sun_path);
#else
    sockaddr_in sa;
    bzero((char *) &sa, sizeof(sa));
    sa.sin_family = AF_INET;
    hostent *server = gethostbyname("128.40.156.129");
    bcopy((char *)server->h_addr, 
          (char *)&sa.sin_addr.s_addr,
          server->h_length);
    sa.sin_port = htons(35000);
    int len = sizeof(sa);
#endif    
    /*
     * Try to connect to the address.  For this to
     * succeed, the server must already have bound
     * this address, and must have issued a listen()
     * request.
     *
     * The third argument indicates the "length" of
     * the structure, not just the length of the
     * socket name.
     */
    std::cout << "DC1394: Waiting for connection... " << std::flush;
    while (true) {
        // wait for connection:
        if (connect(s, (sockaddr*)&sa, len) < 0) {
            nanosleep(&t_sleep, &t_rem);
        } else {
            break;
        }
    }
    std::cout << "done" << std::endl;
    bool connected = false;
    std::vector<char> data(BUFSIZE);
    int nrec = recv(s, &data[0], data.size(), 0);
    std::string datastr(data.begin(), data.end());
    if (nrec<=0) {
        std::cerr << "DC1394: Didn't receive start message; exiting now" << std::endl;
        cleanup_and_exit(gCamera);
	close(s);
        return 1;
    }
    connected = true;
    
    std::string ready = "ready";
    while (send(s, ready.c_str(), ready.size(), 0) < 0) {
        perror("DC1394: client: send");
    }

    int flags = 0;
    if (-1 == (flags = fcntl(s, F_GETFL, 0)))
        flags = 0;

    if (fcntl(s, F_SETFL, flags | O_NONBLOCK)==-1) {
        perror("DC1394: client: unblock");
    }
#endif
    
    /* pthread_mutex_lock( &camera_mutex );
       gCamera.wait_for_trigger();
       pthread_mutex_unlock( &camera_mutex );

       Wait for acq_frame_buffer to fill instead
    */
    

    int ncount = 0;
    cv::Mat im(cv::Size(width, height), CV_8UC1);
    cv::Mat thresh = cv::Mat::ones(cv::Size(width, height), CV_8UC1);
    cv::Mat prevs(cv::Size(width, height), CV_8UC1);
    cv::Mat gray(cv::Size(width, height), CV_8UC1);
    
    // wait for image:
    int nframes = get_image(im, mapping, false, -1, "", ncount);
    std::cout << "DC1394: Waiting for first image to arrive... " << std::flush;
    int nwait = 0;
    while (!nframes) {
        nanosleep(&t_sleep, &t_rem);
        std::cout << "." << std::flush;
        nframes = get_image(im, mapping, false, -1, "", ncount);
        nwait++;
#ifdef STANDALONE
	if (nwait > 1000) {
#else
	if (nwait > 100000) {
#endif
            std::cout << "Time out, stopping now\n";
            cleanup_and_exit(gCamera);
	}
    }
    timespec time0;
    clock_gettime(CLOCK_REALTIME, &time0);
    std::cout << "DC1394: image arrived: "
              << IplImage(im).depth << " bits, "
              << IplImage(im).nChannels << " channels, "
              << IplImage(im).widthStep << " step width"  << std::endl;

#ifdef USE_SDL
    SDL_Surface *surface =
        SDL_CreateRGBSurfaceFrom((void*)im.data,
                                 im.cols,
                                 im.rows,
                                 IplImage(im).depth*IplImage(im).nChannels,
                                 IplImage(im).widthStep,
                                 0xffffff, 0xffffff, 0xffffff, 0);
    screen = SDL_GetVideoSurface();
    if(SDL_BlitSurface(surface, NULL, screen, NULL) == 0)
        SDL_UpdateRect(screen, 0, 0, 0, 0);
#else
    cv::namedWindow("DC1394", CV_WINDOW_AUTOSIZE);
    cvMoveWindow("DC1394", 1280, 480);

    cv::imshow("DC1394", im);
#endif

    timespec time1 = time0;
    timespec time2 = time0;
    timespec time3 = time0;
    timespec time4 = time0;
    timespec t_disconnect = time0;
    timespec t_notrigger = time0;

#ifdef STANDALONE
    int s = -1;
#endif

    std::string fn = "";
#ifdef LICKOMETER
    std::string fn_lick = "";
    FILE* fp_lick = NULL;
#endif
    int key = 0;
    int nloop = 0;
    while (true) {
        clock_gettime( CLOCK_REALTIME, &time1);
#ifndef STANDALONE
        std::vector<char> data(BUFSIZE);
        int nrec = recv(s, &data[0], data.size(), 0);
        std::string datastr(data.begin(), data.end());
#endif

        nframes += get_image(im, mapping, false, s, fn, ncount);

#ifndef STANDALONE

        // no update from blender in a long time, terminate process
        if (datastr.find("1")==std::string::npos) {
            if (connected) {
                t_disconnect = time1;
                connected = false;
            } else {
                if (tdiff(time1, t_disconnect) > TIMEOUT) {
                    std::cout << "DC1394: Received termination signal" << std::endl;
                    close(s);
                    pthread_cancel(acq_thread);
                    pthread_cancel(save_thread);
                    return 0;
                }
            }
        } else {
            connected = true;
        }

	/* Explicit termination */
        if (datastr.find("quit")!=std::string::npos) {
            std::cout << "DC1394: Game over signal." << std::endl;
            std::string sclose = "close";
            while (send(s, sclose.c_str(), sclose.size(), 0) < 0) {
                perror("DC1394: client: send");
            }
            close(s);
            pthread_cancel(acq_thread);
            pthread_cancel(save_thread);
            return 0;
        }

        // Stop recording
        if (datastr.find("stop") != std::string::npos && fn != "") {
            fn = "";
#ifdef LICKOMETER
	    fn_lick = "";
	    if (fp_lick) {
                fclose(fp_lick);
		fp_lick = NULL;
            }
#endif
            std::cout << "DC1394: Stopping video" << std::endl;
            connected = true;
            ncount = 0;
        }

        // Start recording
        if (datastr.find("avi") != std::string::npos && datastr.find("stop") == std::string::npos && fn == "") {
            std::size_t startpos = datastr.find("begin")+5; 
            std::size_t endpos = datastr.find("end") - datastr.find("begin") - 5; 
            fn = datastr.substr(startpos, endpos);
            fn = std::string(trunk) + "data/" + fn;
#ifdef LICKOMETER
	    fn_lick = fn + "_lick";
	    fp_lick = fopen(fn_lick.c_str(), "wb");
            std::cout << "DC1394: Recording lick detection, writing to " << fn_lick << std::endl;
#else
            boost::filesystem::path path(fn);
            boost::filesystem::path writepath(path);

            // Test whether dir exists:
            if (!boost::filesystem::exists(writepath)) {
                std::cout << "DC1394: Creating directory " << writepath << std::endl;
                boost::filesystem::create_directories(writepath);
            }
            fn += "/";

            /* check save frame buffer */
            std::size_t nfb = save_frame_buffer.size();
            if (nfb)
                std::cerr << "DC1394: Frame buffer isn't empty!" << std::endl;

            std::cout << "DC1394: Starting video, writing to " << fn << std::endl;
            connected = true;
            ncount = 0;
#endif
        }
#endif // #nstandalone

#ifdef USE_SDL
        if (SDL_PollEvent(&event)) {
#ifdef STANDALONE
            /* Any of these event types will end the program */
            if (event.type == SDL_QUIT
                || event.type == SDL_KEYDOWN
                || event.type == SDL_KEYUP) {
                std::cout << std::endl;
                std::cout << std::endl << "DC1394: Total number of frames was " << nframes << std::endl;
                std::cout << std::endl << "DC1394: Frame buffer: " << acq_frame_buffer.size() << " frames left" << std::endl;
                close(s);
                pthread_cancel(acq_thread);
                pthread_cancel(save_thread);
                return 0;
            }
#endif // STANDALONE
        }
        surface->pixels = (void*)im.data;
        // SDL_CreateRGBSurfaceFrom((void*)IplImage(im).imageData,
        //                          IplImage(im).width,
        //                          IplImage(im).height,
        //                          IplImage(im).depth*IplImage(im).nChannels,
        //                          IplImage(im).widthStep,
        //                          1, 1, 1, 0);
        screen = SDL_GetVideoSurface();
        if(SDL_BlitSurface(surface, NULL, screen, NULL) == 0)
            SDL_UpdateRect(screen, 0, 0, 0, 0);
#else // not SDL
        key = cv::waitKey(2);
        cv::imshow("DC1394", im);
        if (key == 1114155 || key == 65579 || key==43 /*+*/) {
            uint32_t gain = 0;
            err = dc1394_feature_get_value(gCamera.cam(), DC1394_FEATURE_GAIN, &gain);
            DC1394_ERR_CLN_RTN(err,cleanup_and_exit(gCamera),"Can't get gain");
            if (gain < gCamera.get_maxgain()-10) {
                gain += 10;
                pthread_mutex_lock( &camera_mutex );
                err = dc1394_feature_set_value(gCamera.cam(), DC1394_FEATURE_GAIN, gain);
                pthread_mutex_unlock( &camera_mutex );
                std::cout << "DC1394: New gain value: " << gain << std::endl;
                DC1394_ERR_CLN_RTN(err,cleanup_and_exit(gCamera),"Can't set gain");
            }
        }
        if (key == 1114207 || key == 45 /*-*/) {
            uint32_t gain = 0;
            err = dc1394_feature_get_value(gCamera.cam(), DC1394_FEATURE_GAIN, &gain);
            DC1394_ERR_CLN_RTN(err,cleanup_and_exit(gCamera),"Can't get gain");
            if (gain > gCamera.get_mingain()+10) {
                gain -= 10;
                pthread_mutex_lock( &camera_mutex );
                err = dc1394_feature_set_value(gCamera.cam(), DC1394_FEATURE_GAIN, gain);
                pthread_mutex_unlock( &camera_mutex );
                DC1394_ERR_CLN_RTN(err,cleanup_and_exit(gCamera),"Can't set gain");
            }
        }
#endif // not SDL

#ifdef LICKOMETER        
	/* IS THIS ALL YOU NEED THEN?
	   Lick detection */
	/* Not required because the captured image is already gray
	   cv::Mat gray = bgr2gray(im); */
	gray = thresholding(im, LICK_FRAME_THRESHOLD);

        if (nloop != 0) {
	    cv::absdiff(prevs, gray, thresh);
	    double pixel_sum_thresh = cv::sum(thresh)[0];
	    double pixel_sum_gray = cv::sum(gray)[0];
	    if (pixel_sum_thresh > LICK_SUM_THRESHOLD) {
	      std::cout << "DC1394: Lick" << std::endl;
	    }
	    if (fp_lick != NULL) {
                fwrite(&pixel_sum_thresh, sizeof(pixel_sum_thresh), 1, fp_lick);
	        fwrite(&pixel_sum_gray, sizeof(pixel_sum_gray), 1, fp_lick);
	    }
	}

	prevs = gray.clone();
	nloop++;
#endif
#ifdef STANDALONE
        if (key == 1048689 || key == 113 /*q*/) {
            std::cout << "DC1394: Mean frame rate was " << nframes/total_dur << " fps" << std::endl;
            pthread_cancel(acq_thread);
            pthread_cancel(save_thread);
            return 0;
        }
        if (key == 1048691 /*s*/) {
            fn = "";
            std::cout << "DC1394: Stopping video" << std::endl;
            ncount = 0;
        }
        if (key == 1048690 /*r*/) {
            fn = trunk + std::string("tmp/");
            std::cout << "DC1394: Starting video, writing to " << fn << std::endl;
            ncount = 0;
        }
#endif // #standalone
        clock_gettime( CLOCK_REALTIME, &time2);
        double loop_dur = tdiff(time2, time3);
        clock_gettime( CLOCK_REALTIME, &time3);
        double meanfps = 0;

        total_dur = tdiff(time3, time0);
        if (total_dur > 0)
            meanfps = nframes / total_dur;
        double currentfps = ret / loop_dur;
        std::cout << "DC1394: Current fps: " << std::setprecision(7) << currentfps
                  << " Average fps: " << std::setprecision(7) << meanfps << "\r" << std::flush;
#ifdef STANDALONE
        // std::cout << capture_dur << "\t" << target_dur << "\t" << rem << "\t" << loop_dur << std::endl;
#endif
    }

    if (d) {
        dc1394_free(d);
    }

#ifndef STANDALONE
    close(s);
#endif
    return 0;
}
示例#9
0
unsigned int ofxLibdc::getFeature(dc1394feature_t feature) {
	unsigned int value;
	dc1394_feature_get_value(camera, feature, &value);
	return value;
}
示例#10
0
/** Get feature values.
 *
 *  @pre feature_set_ initialized for this camera
 *
 *  @param finfo pointer to information for this feature
 *  @param value [out] pointer where parameter value stored
 *  @param value2 [out] optional pointer for second parameter value
 *               for white balance.  Otherwise NULL.
 */
void Features::getValues(dc1394feature_info_t *finfo,
                           double *value, double *value2)
{
  dc1394feature_t feature = finfo->id;
  dc1394error_t rc;

  if (!finfo->readout_capable)
    {
      ROS_INFO_STREAM("feature " << featureName(feature)
                      << " value not available from device");
      return;
    }

  if (feature == DC1394_FEATURE_WHITE_BALANCE)
    {
      // handle White Balance separately, it has two components
      if (finfo->absolute_capable && finfo->abs_control)
        {
          // supports reading and setting float value
          // @todo get absolute White Balance values
          rc = DC1394_FUNCTION_NOT_SUPPORTED;
        }
      else
        {
          // get integer White Balance values
          uint32_t bu_val;
          uint32_t rv_val;
          rc = dc1394_feature_whitebalance_get_value(camera_, &bu_val, &rv_val);
          if (DC1394_SUCCESS == rc)
            {
              // convert to double
              *value = bu_val;
              *value2 = rv_val;
            }
        }
      if (DC1394_SUCCESS == rc)
        {
          ROS_DEBUG_STREAM("feature " << featureName(feature)
                           << " Blue/U: " << *value
                           << " Red/V: " << *value2);
        }
      else
        {
          ROS_WARN_STREAM("failed to get values for feature "
                          << featureName(feature));
        }
    }
  else
    {
      // other features only have one component
      if (finfo->absolute_capable && finfo->abs_control)
        {
          // supports reading and setting float value
          float fval;
          rc = dc1394_feature_get_absolute_value(camera_, feature, &fval);
          if (DC1394_SUCCESS == rc)
            {
              *value = fval;                // convert to double
            }
        }
      else // no float representation
        {
          uint32_t ival;
          rc = dc1394_feature_get_value(camera_, feature, &ival);
          if (DC1394_SUCCESS == rc)
            {
              *value = ival;                // convert to double
            }
        }
      if (DC1394_SUCCESS == rc)
        {
          ROS_DEBUG_STREAM("feature " << featureName(feature)
                           << " has value " << *value);
        }
      else
        {
          ROS_WARN_STREAM("failed to get value of feature "
                          << featureName(feature));
        }
    }
}
int ar2VideoGetValue1394(AR2VideoParam1394T *vid, int paramName, int *value)
{
    unsigned int ub, vr;

    switch (paramName)
    {
    case AR_VIDEO_1394_BRIGHTNESS:
        if (dc1394_feature_get_value(vid->camera, DC1394_FEATURE_BRIGHTNESS, (uint32_t*)value) != DC1394_SUCCESS)
        {
            ARLOGe("unable to get brightness.\n");
            return -1;
        }

        return 0;

    case AR_VIDEO_1394_EXPOSURE:
        if (dc1394_feature_get_value(vid->camera, DC1394_FEATURE_EXPOSURE, (uint32_t*)value) != DC1394_SUCCESS)
        {
            ARLOGe("unable to get exposure.\n");
            return -1;
        }

        return 0;

    case AR_VIDEO_1394_WHITE_BALANCE_UB:
        if (dc1394_feature_whitebalance_get_value(vid->camera, (uint32_t*)value, &vr) != DC1394_SUCCESS)
        {
            ARLOGe("unable to get white balance ub.\n");
            return -1;
        }

        return 0;

    case AR_VIDEO_1394_WHITE_BALANCE_VR:
        if (dc1394_feature_whitebalance_get_value(vid->camera, &ub, (uint32_t*)value) != DC1394_SUCCESS)
        {
            ARLOGe("unable to get white balance vr.\n");
            return -1;
        }

        return 0;

    case AR_VIDEO_1394_SHUTTER_SPEED:
        if (dc1394_feature_get_value(vid->camera, DC1394_FEATURE_SHUTTER, (uint32_t*)value) != DC1394_SUCCESS)
        {
            ARLOGe("unable to get shutter speed.\n");
            return -1;
        }

        return 0;

    case AR_VIDEO_1394_GAIN:
        if (dc1394_feature_get_value(vid->camera, DC1394_FEATURE_GAIN, (uint32_t*)value) != DC1394_SUCCESS)
        {
            ARLOGe("unable to get gain.\n");
            return -1;
        }

        return 0;

    case AR_VIDEO_1394_FOCUS:
        if (dc1394_feature_get_value(vid->camera, DC1394_FEATURE_FOCUS, (uint32_t*)value) != DC1394_SUCCESS)
        {
            ARLOGe("unable to get focus.\n");
            return -1;
        }

        return 0;

    case AR_VIDEO_1394_GAMMA:
        if (dc1394_feature_get_value(vid->camera, DC1394_FEATURE_GAMMA, (uint32_t*)value) != DC1394_SUCCESS)
        {
            ARLOGe("unable to get gamma.\n");
            return -1;
        }

        return 0;
    }

    return -1;
}
示例#12
0
void FWCamera::getCameraControls(dc1394camera_t* pCamera, CameraInfo* camInfo)
{
    dc1394featureset_t featureSet;
    int err = dc1394_feature_get_all(pCamera, &featureSet);
    if (err != DC1394_SUCCESS) {
        AVG_ASSERT(false);
        return;
    }

    for (int i = DC1394_FEATURE_MIN; i <= DC1394_FEATURE_MAX; i++) {
        dc1394feature_info_t featureInfo = featureSet.feature[i - DC1394_FEATURE_MIN];

        dc1394bool_t bool_t;
        dc1394_feature_is_present(pCamera,featureInfo.id, &bool_t);
        if (bool_t != DC1394_TRUE) {
            continue;
        }

        uint32_t min = -1;
        uint32_t max = -1;
        uint32_t actValue = -1;

        //TODO: 428 (TRIGGER) doesnt have min max
        err = dc1394_feature_get_boundaries(pCamera, featureInfo.id, &min, &max);
        if (err != DC1394_SUCCESS) {
            continue;
        }

        switch(featureInfo.id) {
            case DC1394_FEATURE_TEMPERATURE: {
                uint32_t targetTemp = -1;
                uint32_t currentTemp = -1;
                err = dc1394_feature_temperature_get_value(pCamera,&targetTemp,&currentTemp);
                if (err != DC1394_SUCCESS) {
                    continue;
                }
                actValue = currentTemp;
                break;
            }
        //TODO: Think about a way to get this information into CameraInfo
            case DC1394_FEATURE_WHITE_BALANCE: {
                uint32_t ubValue = -1;
                uint32_t vrValue = -1;
                err = dc1394_feature_whitebalance_get_value(pCamera,&ubValue,&vrValue);
                if (err != DC1394_SUCCESS) {
                    continue;
                }
                //actValue = ubValue; //vrValue;
                //cout <<"UBlue Value: " << ubValue << " VRed Value: " << vrValue << endl;
                break;
            }
            default: {
                err = dc1394_feature_get_value(pCamera,featureInfo.id, &actValue);
                if (err != DC1394_SUCCESS) {
                    continue;
                }
                break;
            }
        }
        CameraFeature enumFeature = featureIDToEnum(featureInfo.id);
        if (enumFeature == CAM_FEATURE_UNSUPPORTED) {
            continue;
        }
        std::string controlName = cameraFeatureToString(enumFeature);

        CameraControl control = CameraControl(controlName,
                (int) min,
                (int) max,
                (int) actValue ); //TODO: isnt really a default value!?
        camInfo->addControl(control);
    }
}