Beispiel #1
0
    void Daemon::run() {
        while (!stop) {
            if (!requestQueue.size()) {
                sensor->generateRequest();
            }
            
            if (!requestQueue.size()) {
                timespec sleepDuration;
                sleepDuration.tv_sec = 0;
                sleepDuration.tv_nsec = 100e6; // 100 ms
                dprintf(5, "Dummy::Sensor::Daemon: Empty queue, sleeping for a bit\n");
                nanosleep(&sleepDuration, NULL);
                continue;
            }
            dprintf(4, "Dummy::Sensor::Daemon: Processing new request\n");
            _Frame *f = requestQueue.pull();

            f->exposureStartTime = Time::now();
            f->exposureEndTime = f->exposureStartTime + f->shot().exposure;
            f->exposure = f->shot().exposure;
            f->gain = f->shot().gain;
            f->whiteBalance = f->shot().whiteBalance;
            f->testPattern = f->shot().testPattern;
            f->srcFile = f->shot().srcFile;

            timespec frameDuration;
            int duration = (f->shot().exposure > f->shot().frameTime ?
                            f->shot().exposure : f->shot().frameTime);
            frameDuration.tv_sec = duration / 1000000;
            frameDuration.tv_nsec = 1000 * (duration % 1000000);

            dprintf(4, "Dummy::Sensor::Daemon: Sleeping for frame duration %d us (%d s %d nsec) at %s\n", duration, frameDuration.tv_sec, frameDuration.tv_nsec,f->exposureStartTime.toString().c_str() );
            nanosleep(&frameDuration, NULL);
            dprintf(4, "Dummy::Sensor::Daemon: Done sleeping at %s\n", Time::now().toString().c_str() );
            f->frameTime = Time::now() - f->exposureStartTime;

            f->image = f->shot().image;
            if (f->image.autoAllocate()) {
                f->image = Image(f->image.size(), f->image.type());
            }
            
            switch(f->testPattern) {
            case BARS:
            case CHECKERBOARD:
                dprintf(4, "Dummy::Sensor::Daemon: Drawing test pattern\n");
                if (!f->image.discard()) {
                    for(unsigned int y=0; y < f->image.height(); y++) {
                        for (unsigned int x=0; x < f->image.width(); x++) {
                            int fX = 10000*x / (f->image.width()-1);
                            int fY = 10000*y / (f->image.height()-1);

                            unsigned short lum;
                            unsigned short rawR=0, rawG=0, rawB=0;

                            switch (f->testPattern) {
                            case BARS:
                                if (fY < 5000) {
                                    // Vertical bars
                                    if (fX < 2500) {
                                        lum = (fX / 100) * 900 / 25 + 100;
                                        rawR = ((fX / 100) % 2) * lum;
                                        rawG = ((fX / 100) % 2) * lum;
                                        rawB = ((fX / 100) % 2) * lum;
                                    } else if (fX < 5000) {
                                        lum = ((fX - 2500)/ 100) * 900/ 25 + 100;
                                        rawR = ((fX / 100) % 2) * lum;
                                        rawG = ((fX / 100) % 2) * lum / 100;
                                        rawB = ((fX / 100) % 2) * lum / 100;
                                    } else if (fX < 7500) {
                                        lum = ((fX - 5000)/ 100) * 900/ 25 + 100;
                                        rawR = ((fX / 100) % 2) * lum / 100;
                                        rawG = ((fX / 100) % 2) * lum;
                                        rawB = ((fX / 100) % 2) * lum / 100;
                                    } else {
                                        lum = ((fX - 7500)/ 100) * 900/ 25 + 100;
                                        rawR = ((fX / 100) % 2) * lum / 100;
                                        rawG = ((fX / 100) % 2) * lum / 100;
                                        rawB = ((fX / 100) % 2) * lum;
                                    }
                                } else {
                                    // Horizontal bars
                                    if (fX < 2500) {
                                        rawR = ((fY / 100) % 2) * 1000;
                                        rawG = ((fY / 100) % 2) * 1000;
                                        rawB = ((fY / 100) % 2) * 1000;
                                    } else if (fX < 5000) {
                                        rawR = ((fY / 100) % 2) * 1000;
                                        rawG = 10;
                                        rawB = 10;
                                    } else if (fX < 7500) {
                                        rawR = 10;
                                        rawG = ((fY / 100) % 2) * 1000;
                                        rawB = 10;
                                    } else {
                                        rawR = 10;
                                        rawG = 10;
                                        rawB = ((fY / 100) % 2) * 1000;
                                    }
                                }
                                break;
                            case CHECKERBOARD:
                                if (fX < 5000) {
                                    if (fY < 5000) {
                                        lum = fX * 900 / 5000 + 100;
                                        rawR =
                                            (((fX / 250) % 2) ^ 
                                             ((fY / 250) % 2)) *
                                            lum;
                                        rawG = rawR;
                                        rawB = rawR;
                                    } else {
                                        lum = fX * 900 / 5000 + 100;
                                        rawR = 
                                            (((fX / 250) % 2) ^ 
                                             ((fY / 250) % 2)) *
                                            lum;
                                        rawG = rawR/100;
                                        rawB = rawR/100;
                                    }
                                } else {
                                    if (fY < 5000) {
                                        lum = (fX-5000) * 900 / 5000 + 100;
                                        rawG = 
                                            (((fX / 250) % 2) ^ 
                                             ((fY / 250) % 2)) *
                                            lum;
                                        rawR = rawG/100;
                                        rawB = rawG/100;
                                    } else {
                                        lum = (fX-5000) * 900 / 5000 + 100;
                                        rawB = 
                                            (((fX / 250) % 2) ^
                                             ((fY / 250) % 2)) *
                                            lum;
                                        rawR = rawB/100;
                                        rawG = rawB/100;
                                    }
                                }
                                break;
                            default:
                                break;
                            }

                            rawR *= f->gain*f->exposure/10000;
                            rawG *= f->gain*f->exposure/10000;
                            rawB *= f->gain*f->exposure/10000;

                            switch (f->image.type()) {
                            case RGB24: {
                                unsigned char *px = f->image(x,y);
                                px[0] = rawR > 1000 ? 250 : rawR / 4;
                                px[1] = rawG > 1000 ? 250 : rawG / 4;
                                px[2] = rawB > 1000 ? 250 : rawB / 4;
                                break;
                            }
                            case RGB16: {
                                unsigned short *px = (unsigned short *)f->image(x,y);
                                unsigned char r =rawR > 1000 ? 250 : rawR / 4;
                                unsigned char g = rawG > 1000 ? 250 : rawG / 4;
                                unsigned char b = rawB > 1000 ? 250 : rawB / 4;
                                *px = ( (r / 8) | 
                                        ( (g / 4) << 5) |  
                                        ( (b / 8) << 11) );
                                break;
                            }
                            case UYVY: {
                                unsigned char *px = (unsigned char *)f->image(x,y);
                                unsigned char r =rawR > 1000 ? 250 : rawR / 4;
                                unsigned char g = rawG > 1000 ? 250 : rawG / 4;
                                unsigned char b = rawB > 1000 ? 250 : rawB / 4;
                                unsigned char y = 0.299 * r + 0.587 * g + 0.114 * b;
                                unsigned char u = 128 - 0.168736 *r - 0.331264 * g + 0.5 * b;
                                unsigned char v = 128 + 0.5*r - 0.418688*g - 0.081312*b;
                                px[0] = (x % 2) ? u : v;
                                px[1] = y;
                                break;
                            }
                            case YUV24: {
                                unsigned char *px = (unsigned char *)f->image(x,y);
                                unsigned char r =rawR > 1000 ? 250 : rawR / 4;
                                unsigned char g = rawG > 1000 ? 250 : rawG / 4;
                                unsigned char b = rawB > 1000 ? 250 : rawB / 4;
                                px[0] = 0.299 * r + 0.587 * g + 0.114 * b;
                                px[1] = 128 - 0.168736 *r - 0.331264 * g + 0.5 * b;
                                px[2] = 128 + 0.5*r - 0.418688*g - 0.081312*b;
                                break;
                            }
                            case RAW: {
                                unsigned short rawVal;
                                if ((x % 2 == 0 && y % 2 == 0) ||
                                    (x % 2 == 1 && y % 2 == 1) ) {
                                    rawVal = rawG;
                                } else if (x % 2 == 1 && y % 2 == 0) {
                                    rawVal = rawR;
                                } else {
                                    rawVal = rawB;
                                }
                                    
                                *(unsigned short *)f->image(x,y) = rawVal;
                                break; 
                            }
                            default:
                                break;
                            }                                
                        }  
                    }
                }
                f->_bayerPattern = sensor->platform().bayerPattern();
                f->_minRawValue = sensor->platform().minRawValue();
                f->_maxRawValue = sensor->platform().maxRawValue();
                f->_manufacturer = sensor->platform().manufacturer();
                f->_model = sensor->platform().model();
                sensor->platform().rawToRGBColorMatrix(3200, f->rawToRGB3200K);
                sensor->platform().rawToRGBColorMatrix(7000, f->rawToRGB7000K);
                f->processingDoneTime = Time::now();
                break;
            case FILE:
                if (f->image.type() != RAW) {
                    error(Event::InternalError, sensor, "Dummy::Sensor: Non-RAW image requested from a source DNG file. Not supported.");
                    f->image = Image();                        
                } else {
                    dprintf(4, "Dummy::Sensor::Daemon: Loading %s\n", f->srcFile.c_str());
                    FCam::Frame dng = loadDNG(f->srcFile);
                    if (!dng.valid()) {
                        error(Event::InternalError, sensor, "Dummy::Sensor: Unable to load file %s as a source Frame.", f->srcFile.c_str());
                    } else {
                        if (!f->image.discard()) {
                            f->image = dng.image();
                        } else {
                            f->image = Image(dng.image().size(), dng.image().type(), Image::Discard);
                        }
                        f->exposureStartTime = dng.exposureStartTime();
                        f->exposureEndTime = dng.exposureEndTime();
                        f->processingDoneTime = dng.processingDoneTime();
                        f->exposure = dng.exposure();
                        f->frameTime = dng.frameTime();
                        f->gain = dng.gain();
                        f->whiteBalance = dng.whiteBalance();
                        f->histogram = dng.histogram();
                        f->sharpness = dng.sharpness();
                        f->tags = dng.tags();
                        f->_bayerPattern = dng.platform().bayerPattern();
                        f->_minRawValue = dng.platform().minRawValue();
                        f->_maxRawValue = dng.platform().maxRawValue();
                        f->_manufacturer = dng.platform().manufacturer();
                        f->_model = dng.platform().model();
                        dng.platform().rawToRGBColorMatrix(3200, f->rawToRGB3200K);
                        dng.platform().rawToRGBColorMatrix(7000, f->rawToRGB7000K);
                    }
                }                
            }
            frameQueue.push(f);
        }
    }
// This method is the main workhorse, and is run by the camera thread.
static void *FCamAppThread(void *ptr) {
    FCAM_INTERFACE_DATA *tdata = (FCAM_INTERFACE_DATA *)ptr;
    Timer timer;
    JNIEnv *env;
    tdata->javaVM->AttachCurrentThread(&env, 0);
    writer = 0; // Initialized on the first PARAM_OUTPUT_DIRECTORY set request.

    // Initialize FCam devices.
    FCam::Tegra::Sensor sensor;
    FCam::Tegra::Lens lens;
    FCam::Tegra::Flash flash;
    sensor.attach(&lens);
    sensor.attach(&flash);
    MyAutoFocus autofocus(&lens);
    MyFaceDetector faceDetector("/data/fcam/face.xml");

    FCam::Image previewImage(PREVIEW_IMAGE_WIDTH, PREVIEW_IMAGE_HEIGHT, FCam::YUV420p);
    FCam::Tegra::Shot shot;

    // Initialize FPS stat calculation.
    tdata->captureFps = 30; // assuming 30hz
    double fpsUpdateTime = timer.get();
    int frameCount = 0;

    // Local task queue that processes messages from the Android application.
    std::queue<ParamSetRequest> taskQueue;
    ParamSetRequest task;

    for (;;) {
        FCAM_SHOT_PARAMS *currentShot = &tdata->currentShot;
        FCAM_SHOT_PARAMS *previousShot = &tdata->previousShot;
        // Copy tasks to local queue
        sAppData->requestQueue.consumeAll(taskQueue);

        // Parse all tasks from the Android applications.
        while (!taskQueue.empty()) {
            task = taskQueue.front();
            taskQueue.pop();

            bool prevValue;
            int taskId = task.getId() & 0xffff;
            int *taskData = (int *)task.getData();
            int pictureId = task.getId() >> 16;

            switch (taskId) {
            case PARAM_SHOT:
                // Note: Exposure is bounded below at 1/1000 (FCam bug?)
                currentShot->captureSet[pictureId].exposure = taskData[SHOT_PARAM_EXPOSURE] < 1000 ? 1000 : taskData[SHOT_PARAM_EXPOSURE];
                currentShot->captureSet[pictureId].focus = taskData[SHOT_PARAM_FOCUS];
                currentShot->captureSet[pictureId].gain = taskData[SHOT_PARAM_GAIN];
                currentShot->captureSet[pictureId].wb = taskData[SHOT_PARAM_WB];
                currentShot->captureSet[pictureId].flashOn = taskData[SHOT_PARAM_FLASH];
                break;
            case PARAM_PREVIEW_EXPOSURE:
                currentShot->preview.user.exposure = taskData[0];
                break;
            case PARAM_PREVIEW_FOCUS:
                currentShot->preview.user.focus = taskData[0];
                break;
            case PARAM_PREVIEW_GAIN:
                currentShot->preview.user.gain = taskData[0];
                break;
            case PARAM_PREVIEW_WB:
                currentShot->preview.user.wb = taskData[0];
                break;
            case PARAM_PREVIEW_AUTO_EXPOSURE_ON:
                prevValue = currentShot->preview.autoExposure;
                currentShot->preview.autoExposure = taskData[0] != 0;
                if (!prevValue && prevValue ^ currentShot->preview.autoExposure != 0) {
                    previousShot->preview.evaluated.exposure = currentShot->preview.user.exposure;
                } else {
                    currentShot->preview.user.exposure = previousShot->preview.evaluated.exposure;
                }
                break;
            case PARAM_PREVIEW_AUTO_FOCUS_ON:
                prevValue = currentShot->preview.autoFocus;
                currentShot->preview.autoFocus = taskData[0] != 0;
                if (!prevValue && prevValue ^ currentShot->preview.autoFocus != 0) {
                    previousShot->preview.evaluated.focus = currentShot->preview.user.focus;
                } else {
                    currentShot->preview.user.focus = previousShot->preview.evaluated.focus;
                }
                break;
            case PARAM_PREVIEW_AUTO_GAIN_ON:
                prevValue = currentShot->preview.autoGain;
                currentShot->preview.autoGain = taskData[0] != 0;
                if (!prevValue && prevValue ^ currentShot->preview.autoGain != 0) {
                    previousShot->preview.evaluated.gain = currentShot->preview.user.gain;
                } else {
                    currentShot->preview.user.gain = previousShot->preview.evaluated.gain;
                }
                break;
            case PARAM_PREVIEW_AUTO_WB_ON:
                prevValue = currentShot->preview.autoWB;
                currentShot->preview.autoWB = taskData[0] != 0;
                if (!prevValue && prevValue ^ currentShot->preview.autoWB != 0) {
                    previousShot->preview.evaluated.wb = currentShot->preview.user.wb;
                } else {
                    currentShot->preview.user.wb = previousShot->preview.evaluated.wb;
                }
                break;
            case PARAM_RESOLUTION:
                break;
            case PARAM_BURST_SIZE:
                currentShot->burstSize = taskData[0];
                break;
            case PARAM_OUTPUT_FORMAT:
                break;
            case PARAM_VIEWER_ACTIVE:
                tdata->isViewerActive = taskData[0] != 0;
                break;
            case PARAM_OUTPUT_DIRECTORY:
                if (writer == 0) {
                    writer = new AsyncImageWriter((char *)task.getData());
                    writer->setOnFileSystemChangedCallback(OnFileSystemChanged);
                }
                break;
            case PARAM_OUTPUT_FILE_ID:
                AsyncImageWriter::SetFreeFileId(taskData[0]);
                break;
            case PARAM_TAKE_PICTURE:
                if (writer != 0 && task.getDataAsInt() != 0) { // Don't take picture if we can't write out.
                    // capture begin
                    tdata->isCapturing = true;
                    // notify capture start
                    env->CallVoidMethod(tdata->fcamInstanceRef, tdata->notifyCaptureStart);
                    OnCapture(tdata, writer, sensor, flash, lens);
                    // capture done
                    tdata->isCapturing = false;
                    // notify capture completion
                    env->CallVoidMethod(tdata->fcamInstanceRef, tdata->notifyCaptureComplete);
                }
                break;
            case PARAM_PRIV_FS_CHANGED:
                if (taskData[0] != 0) {
                    // notify fs change
                    env->CallVoidMethod(tdata->fcamInstanceRef, tdata->notifyFileSystemChange);
                }
                break;
            /* [CS478]
             * You will probably want extra cases here, to handle messages
             * that request autofocus to be activated. Define any new
             * message types in ParamSetRequestion.h.
             */
            case PARAM_AUTO_FOCUS_LOCAL_REG:
                //LOG("MYFOCUS local focus switch\n");
                autofocus.state = AUTO_FOCUS_FOCUS;
                autofocus.setRect(taskData[0] - RECT_EDGE_LEN / 2, taskData[1] - RECT_EDGE_LEN / 2);//hack TODO
                autofocus.startSweep();
                break;
            case PARAM_AUTO_FOCUS_GLOBAL:
                //LOG("MYFOCUS global focus switch\n");
                autofocus.state = AUTO_FOCUS_FOCUS;
                autofocus.setRect(0, 0, PREVIEW_IMAGE_WIDTH, PREVIEW_IMAGE_HEIGHT);
                autofocus.startSweep();
                break;

            /* [CS478] Assignment #2
             * You will probably yet another extra case here to handle face-
             * based autofocus. Recall that it might be useful to add a new
             * message type in ParamSetRequest.h
             */
            case PARAM_AUTO_FOCUS_FACE:
                LOG("MYFOCUS face focus switch\n");
                autofocus.state = AUTO_FOCUS_FACE_DETECT;
                autofocus.fdWait();
                //autofocus.startFaceDetect();
                break;
            // TODO TODO TODO
            default:
                ERROR("TaskDispatch(): received unsupported task id (%i)!", taskId);
            }
        }

        if (!tdata->isViewerActive) continue; // Viewer is inactive, so skip capture.

        // Setup preview shot parameters.
        shot.exposure = currentShot->preview.autoExposure ? previousShot->preview.evaluated.exposure : currentShot->preview.user.exposure;
        shot.gain = currentShot->preview.autoGain ? previousShot->preview.evaluated.gain : currentShot->preview.user.gain;
        shot.whiteBalance = currentShot->preview.autoWB ? previousShot->preview.evaluated.wb : currentShot->preview.user.wb;
        shot.image = previewImage;
        shot.histogram.enabled = true;
        shot.histogram.region = FCam::Rect(0, 0, PREVIEW_IMAGE_WIDTH, PREVIEW_IMAGE_HEIGHT);
        shot.sharpness.enabled = currentShot->preview.autoFocus;
        shot.sharpness.size = FCam::Size(16, 12);
        shot.fastMode = true;
        shot.clearActions();

        // If in manual focus mode, and the lens is not at the right place, add an action to move it.
        if (!currentShot->preview.autoFocus && previousShot->preview.user.focus != currentShot->preview.user.focus) {
            shot.clearActions();
            FCam::Lens::FocusAction focusAction(&lens);
            focusAction.time = 0;
            focusAction.focus = currentShot->preview.user.focus;
            shot.addAction(focusAction);
        }

        // Send the shot request to FCam.
        sensor.stream(shot);

        // Fetch the incoming frame from FCam.
        FCam::Frame frame = sensor.getFrame();

        // Process the incoming frame. If autoExposure or autoGain is enabled, update parameters based on the frame.
        if (currentShot->preview.autoExposure || currentShot->preview.autoGain) {
            FCam::autoExpose(&shot, frame, sensor.maxGain(), sensor.maxExposure(), sensor.minExposure(), 0.3);
            currentShot->preview.evaluated.exposure = shot.exposure;
            currentShot->preview.evaluated.gain = shot.gain;
        }

        // Process the incoming frame. If autoWB is enabled, update parameters based on the frame.
        if (currentShot->preview.autoWB) {
            FCam::autoWhiteBalance(&shot, frame);
            currentShot->preview.evaluated.wb = shot.whiteBalance;
        }

        if (autofocus.state == AUTO_FOCUS_FACE_DETECT) {
            std::vector<cv::Rect> facesFound = faceDetector.detectFace(frame.image());
            for (unsigned int i = 0; i < facesFound.size(); i++) {
                cv::Rect r = facesFound[i];
                for (int x = 0; x < r.width; x++) {
                    frame.image()(r.x + x, r.y)[0] = 254u;
                    frame.image()(r.x + x, r.y + r.height)[0] = 254u;
                }
                for (int y = 0; y < r.height; y++) {
                    frame.image()(r.x, r.y + y)[0] = 254u;
                    frame.image()(r.x + r.width, r.y + y)[0] = 254u;
                }
            }
            if (facesFound.size() != 0)
                autofocus.setRects(facesFound);

            autofocus.fdWait();
        }
        /* [CS478] Assignment #2
         * Above, facesFound contains the list of detected faces, for the given frame.
         * If applicable, you may pass these values to the MyAutoFocus instance.
         *
         * e.g. autofocus.setTarget(facesFound);
         * Note that MyAutoFocus currently has no setTarget method. You'd have
         * to write the appropriate interface.
         *
         * You should also only run faceDetector.detectFace(...) if it
         * is necessary (to save compute), so change "true" above to something else
         * appropriate.
         */
        // TODO TODO TODO

        /* [CS478] Assignment #1
         * You should process the incoming frame for autofocus, if necessary.
         * Your autofocus (MyAutoFocus.h) has a function called update(...).
         */

        if(autofocus.state == AUTO_FOCUS_FOCUS)
        {
            autofocus.update(frame);
            //LOG("MYFOCUS update called\n");
        }
        if(currentShot->preview.autoFocus)
        {
            currentShot->preview.evaluated.focus = (float) frame["lens.focus"];
        }
        // TODO TODO TODO

        // Update histogram data
        const FCam::Histogram &histogram = frame.histogram();
        int maxBinValue = 1;
        for (int i = 0; i < 64; i++) {
            int currBinValue = histogram(i);
            maxBinValue = (currBinValue > maxBinValue) ? currBinValue : maxBinValue;
            currentShot->histogramData[i * 4] = currBinValue;
        }
        float norm = 1.0f / maxBinValue;
        for (int i = 0; i < 64; i++) {
            currentShot->histogramData[i * 4] *= norm;
            currentShot->histogramData[i * 4 + 1] = 0.0f;
            currentShot->histogramData[i * 4 + 2] = 0.0f;
            currentShot->histogramData[i * 4 + 3] = 0.0f;
        }

        // Update the frame buffer.
        uchar *src = (uchar *)frame.image()(0, 0);
        FCam::Tegra::Hal::SharedBuffer *captureBuffer = tdata->tripleBuffer->getBackBuffer();
        uchar *dest = (uchar *)captureBuffer->lock();

        // Note: why do we need to shuffle U and V channels? It seems to be a bug.
        memcpy(dest, src, PI_PLANE_SIZE);
        memcpy(dest + PI_U_OFFSET, src + PI_V_OFFSET, PI_PLANE_SIZE >> 2);
        memcpy(dest + PI_V_OFFSET, src + PI_U_OFFSET, PI_PLANE_SIZE >> 2);
        captureBuffer->unlock();
        tdata->tripleBuffer->swapBackBuffer();

        // Frame capture complete, copy current shot data to previous one
        pthread_mutex_lock(&tdata->currentShotLock);
        memcpy(&tdata->previousShot, &tdata->currentShot, sizeof(FCAM_SHOT_PARAMS));
        pthread_mutex_unlock(&tdata->currentShotLock);
        frameCount++;

        // Update FPS
        double time = timer.get();
        double dt = time - fpsUpdateTime;
        if (dt > FPS_UPDATE_PERIOD) {
            float fps = frameCount * (1000.0 / dt);
            fpsUpdateTime = time;
            frameCount = 0;
            tdata->captureFps = fps;
        }
    }

    tdata->javaVM->DetachCurrentThread();

    // delete instance ref
    env->DeleteGlobalRef(tdata->fcamInstanceRef);

    return 0;
}
void CameraThread::run ()
{
  // Make an asynchronous file writer to save images in the background
  FCam::AsyncFileWriter writer;
  Plat::Sensor sensor;
  Plat::Lens lens;
  Plat::Flash flash;

  // tell the sensor that the flash and the lens will be tagging
  // frames that come back from it
  sensor.attach ( &flash );
  sensor.attach ( &lens );

  // Make a helper autofocus object
  FCam::AutoFocus autoFocus ( &lens );

  // The viewfinder shot
  FCam::Shot viewfinder;
  viewfinder.exposure = 40000;
  viewfinder.gain = 1.0f;
  // run at 25 fps
  viewfinder.frameTime = 40000;
  // dump image data directly into the frame buffer
  viewfinder.image = overlay;
  // enable histograms and sharpness maps
  viewfinder.histogram.enabled = true;
  viewfinder.histogram.region = FCam::Rect ( 0, 0, 640, 480 );
  viewfinder.sharpness.enabled = true;
  viewfinder.sharpness.size = FCam::Size ( 16, 12 );

  // A full 5MP photograph. We'll set the exposure, frameTime, and
  // gain later, after we meter. Default parameters apply (no
  // histograms or sharpness), image memory auto allocated for each
  // new photograph, so that we can have multiple unique photographs
  // saving at once.
  FCam::Shot photo;
  photo.image = FCam::Image ( 480, 360, FCam::UYVY, FCam::Image::AutoAllocate );

  bool takeSnapshot = false;
  bool halfDepress = false;
  bool fullDepress = false;

  // stream the viewfinder
  sensor.stream ( viewfinder );

  while ( keepGoing )
  {
    // deal with FCam events
    FCam::Event e;
    while ( FCam::getNextEvent ( &e ) )
    {
      cout << e.description << endl;
      switch ( e.type )
      {
      case FCam::Event::FocusPressed:
        if ( autoFocus.idle () )
        {
          autoFocus.startSweep ();
        }
        halfDepress = true;
        break;
      case FCam::Event::FocusReleased:
        halfDepress = false;
        break;
      case FCam::Event::ShutterPressed:
        takeSnapshot = true;
        fullDepress = true;
        break;
      case FCam::Event::ShutterReleased:
        fullDepress = false;
      }
      ;
    }

    // Take a picture once autofocus completes and we have space to store the frame
    if ( takeSnapshot && autoFocus.idle () && writer.savesPending () < 8 )
    {
      // use the metering the viewfinder has been doing
      photo.exposure = viewfinder.exposure;
      photo.gain = viewfinder.gain;
      photo.whiteBalance = viewfinder.whiteBalance;
      sensor.capture ( photo );
      takeSnapshot = false;
    }

    // Drain the queue
    FCam::Frame f;
    do
    {
      f = sensor.getFrame ();

      if ( f.shot ().id == photo.id )
      {
        // Our photo came back, asynchronously save it to disk
        // with a unique filename. We use the exposure start
        // time for now just so we don't have to keep a
        // globally unique numbering.
        if ( !f.image ().valid () )
        {
          printf ( "ERROR: Photo dropped!\n" );
          continue;
        }
        else
        {
          printf ( "Got a frame\n" );
        }

        emit imageCaptured ( f );
      }
      else
      {
        // update the autofocus and metering algorithms
        autoFocus.update ( f );
        autoExpose ( &viewfinder, f );
        autoWhiteBalance ( &viewfinder, f );
        sensor.stream ( viewfinder );
      }
    }
    while ( sensor.framesPending () );
  }
}