Exemple #1
0
UEyeCaptureInterface::FramePair UEyeCaptureInterface::getFrame()
{
    CaptureStatistics  stats;
    PreciseTimer start = PreciseTimer::currentTime();
    FramePair result( NULL, NULL);

//    printf("Called getFrame\n");

    protectFrame.lock();
        decodeData(&leftCamera , currentLeft,  &(result.bufferLeft));
        decodeData(&rightCamera, currentRight, &(result.bufferRight));

        result.timeStampLeft = result.timeStampRight = currentRight->usecsTimeStamp();

        stats.framesSkipped = skippedCount > 0 ? skippedCount - 1 : 0;
        skippedCount = 0;

        stats.triggerSkipped = triggerSkippedCount;
        triggerSkippedCount = 0;

        int64_t internalDesync = currentLeft->internalTimestamp - currentRight->internalTimestamp;
    protectFrame.unlock();

    stats.values[CaptureStatistics::DECODING_TIME]    = start.usecsToNow();
    stats.values[CaptureStatistics::INTERFRAME_DELAY] = frameDelay;

    int64_t desync = result.diffTimeStamps();
    stats.values[CaptureStatistics::DESYNC_TIME]          = CORE_ABS(desync);
    stats.values[CaptureStatistics::INTERNAL_DESYNC_TIME] = CORE_ABS(internalDesync);

    /* Get temperature data */
    stats.temperature[0] = leftCamera .getTemperature();
    stats.temperature[1] = rightCamera.getTemperature();

    //stats.values[CaptureStatistics::DATA_SIZE] = currentLeft.bytesused;

    emit newStatisticsReady(stats);

//    printf("Finished getFrame\n");
    return result;
}
V4L2CaptureInterface::FramePair V4L2CaptureInterface::getFrameRGB24()
{
    CaptureStatistics  stats;

    PreciseTimer start = PreciseTimer::currentTime();

    protectFrame.lock();

    FramePair result;

    RGB24Buffer **results[MAX_INPUTS_NUMBER] = {
        &result.buffers[LEFT_FRAME  ].rgbBuffer,
        &result.buffers[RIGHT_FRAME ].rgbBuffer,
        &result.buffers[THIRD_FRAME ].rgbBuffer,
        &result.buffers[FOURTH_FRAME].rgbBuffer
    };

    for (int i = 0; i < MAX_INPUTS_NUMBER; i++)
    {
        decodeDataRGB24(&camera[i],  &currentFrame[i],  results[i]);

        if ((*results[i]) == NULL) {
            printf("V4L2CaptureInterface::getFrameRGB24(): Precrash condition at %d (%s)\n", i, getFrameSourceName((FrameSourceId)i));
        }
    }

    if (result.rgbBufferLeft() != NULL) {
        result.setBufferLeft  ( result.rgbBufferLeft() ->toG12Buffer() ); // FIXME
    }
    if (result.rgbBufferRight() != NULL) {
        result.setBufferRight ( result.rgbBufferRight()->toG12Buffer() );
    }

    if (currentFrame[LEFT_FRAME].isFilled)
        result.setTimeStampLeft  (currentFrame[LEFT_FRAME].usecsTimeStamp());

    if (currentFrame[RIGHT_FRAME].isFilled)
        result.setTimeStampRight (currentFrame[RIGHT_FRAME].usecsTimeStamp());

    if (skippedCount == 0)
    {
   //     SYNC_PRINT(("Warning: Requested same frames twice. Is this by design?\n"));
    }

    stats.framesSkipped = skippedCount > 0 ? skippedCount - 1 : 0;
    skippedCount = 0;
    protectFrame.unlock();
    stats.values[CaptureStatistics::DECODING_TIME]    = start.usecsToNow();
    stats.values[CaptureStatistics::INTERFRAME_DELAY] = frameDelay;

    int64_t desync =  currentFrame[LEFT_FRAME ].usecsTimeStamp() -
                      currentFrame[RIGHT_FRAME].usecsTimeStamp();

    stats.values[CaptureStatistics::DESYNC_TIME] = CORE_ABS(desync);
    stats.values[CaptureStatistics::DATA_SIZE]   = currentFrame[LEFT_FRAME].bytesused;
    if (imageInterfaceReceiver != NULL) {
        imageInterfaceReceiver->newStatisticsReadyCallback(stats);
    } else {
        SYNC_PRINT(("Warning:  V4L2CaptureInterface::getFrameRGB24(): imageInterfaceReceiver is NULL\n"));
    }

    return result;
}