Пример #1
0
ImageCaptureInterface::FramePair RTSPCapture::getFrame()
{
    CaptureStatistics  stats;
    PreciseTimer start = PreciseTimer::currentTime();

    FramePair result = fcb.dequeue();

    stats.values[CaptureStatistics::DECODING_TIME] = start.usecsToNow();

    if (mLastFrameTime.usecsTo(PreciseTimer()) != 0)
    {
        stats.values[CaptureStatistics::INTERFRAME_DELAY] = mLastFrameTime.usecsToNow();
    }
    mLastFrameTime = PreciseTimer::currentTime();
    stats.values[CaptureStatistics::DATA_SIZE] = 0;
    emit newStatisticsReady(stats);

    if (!mIsPaused)
    {        
        frame_data_t frameData;
        frameData.timestamp = fcb.secondFrameTimestamp();
        //SYNC_PRINT(("RTSPCapture::getFrame(): sending notification ts = %d\n", frameData.timestamp));
        notifyAboutNewFrame(frameData);
    } else {
        SYNC_PRINT(("RTSPCapture::getFrame(): Paused\n"));
    }

    return result;
}
Пример #2
0
void V4L2CaptureInterface::SpinThread::run()
{
    while (mInterface->spinRunning.tryLock())
    {
        V4L2BufferDescriptor newBufferLeft;
        V4L2BufferDescriptor newBufferRight;

        V4L2CameraDescriptor* left  = &(mInterface->camera[LEFT_FRAME ]);
        V4L2CameraDescriptor* right = &(mInterface->camera[RIGHT_FRAME]);

        V4L2BufferDescriptor* currentLeft  = &(mInterface->currentFrame[LEFT_FRAME ]);
        V4L2BufferDescriptor* currentRight = &(mInterface->currentFrame[RIGHT_FRAME]);

        /* First we block till we will get a new frame */
        /*TODO: Wonder if dequeue should be static */
        left->dequeue(newBufferLeft);

        /* If we have only one camera, we assume this is the left camera */
        if (right->deviceHandle != V4L2CameraDescriptor::INVALID_HANDLE) {
            right->dequeue(newBufferRight);
        } else {
           // SYNC_PRINT(("V4L2CaptureInterface::SpinThread::run(): No right cam, not waiting or the new frame\n"));
        }

        uint64_t leftStamp = newBufferLeft.usecsTimeStamp();

        if (right->deviceHandle != V4L2CameraDescriptor::INVALID_HANDLE)
        {
            uint64_t rightStamp = newBufferRight.usecsTimeStamp();

            /*Reducing desync only  one frame at a time*/
            if (leftStamp > rightStamp && leftStamp - rightStamp > mInterface->maxDesync)
            {
                right->enqueue(newBufferRight);
                right->dequeue(newBufferRight);
                rightStamp = newBufferRight.usecsTimeStamp();
            }

            if (rightStamp > leftStamp && rightStamp - leftStamp > mInterface->maxDesync)
            {
                left->enqueue(newBufferLeft);
                left->dequeue(newBufferLeft);
                leftStamp = newBufferLeft.usecsTimeStamp();
            }
        }

        /**
         *  Now we have two new buffers
         *  So we do some thread protected operations.
         *  1. Enqueue the previously used buffers
         *  2. Replace them with the new ones
         **/
        mInterface->protectFrame.lock();
        {
            if (left->deviceHandle != -1)
            {
                left->enqueue(*currentLeft);
                *currentLeft = newBufferLeft;
            }

            if (right->deviceHandle != -1)
            {
                right->enqueue(*currentRight);
                *currentRight = newBufferRight;
            }
            mInterface->skippedCount++;
        }
        mInterface->protectFrame.unlock();


        /* For statistics */
        if (mInterface->lastFrameTime.usecsTo(PreciseTimer()) != 0)
        {
            mInterface->frameDelay = mInterface->lastFrameTime.usecsToNow();
        }
        mInterface->lastFrameTime = PreciseTimer::currentTime();

//        cout << "Frame notification" << endl;
        frame_data_t frameData;
        frameData.timestamp = (currentLeft->usecsTimeStamp() / 2) + (currentRight->usecsTimeStamp() / 2);

        mInterface->notifyAboutNewFrame(frameData);

        mInterface->spinRunning.unlock();
        if (mInterface->shouldStopSpinThread)
        {
            break;
        }
    }
}
Пример #3
0
ALIGN_STACK_SSE void DirectShowCaptureInterface::memberCallback(DSCapDeviceId dev, FrameData data)
{
    //SYNC_PRINT(("Received new frame in a member %d\n", dev));
    mProtectFrame.lock();

    DirectShowCameraDescriptor *camera = NULL;
    if (mCameras[0].deviceHandle == dev) camera = &mCameras[0];
    else
    if (mCameras[1].deviceHandle == dev) camera = &mCameras[1];
    else
        goto exit;

    {
        PreciseTimer timer = PreciseTimer::currentTime();
        camera->gotBuffer = true;
        camera->timestamp = (data.timestamp + 5) / 10;
        delete_safe (camera->buffer);
        delete_safe (camera->buffer24);

        if (data.format.type == CAP_YUV)
        {
            if (mIsRgb) {
                camera->buffer24 = new RGB24Buffer(data.format.height, data.format.width, false);
                camera->buffer24->fillWithYUYV((uint8_t *)data.data);
            }
            else {
                camera->buffer = new G12Buffer(data.format.height, data.format.width, false);
                camera->buffer->fillWithYUYV((uint16_t *)data.data);
            }
        }
        else if (data.format.type == CAP_MJPEG)
        {
            MjpegDecoderLazy *lazyDecoder = new MjpegDecoderLazy;   // don't place it at stack, it's too huge!
            if (mIsRgb)
                camera->buffer24 = lazyDecoder->decodeRGB24((uchar *)data.data);
            else
                camera->buffer   = lazyDecoder->decode((uchar *)data.data);
            delete lazyDecoder;
        }
        else if (data.format.type == CAP_RGB)
        {
            if (mIsRgb) {
                camera->buffer24 = new RGB24Buffer(data.format.height, data.format.width, true);
                int w = camera->buffer24->w;
                int h = camera->buffer24->h;
                for (int i = 0; i < h; i++) {
                    uint8_t  *rgbData = ((uint8_t *)data.data) + 3 * (h - i - 1) * w;
                    RGBColor *rgb24Data = &(camera->buffer24->element(i, 0));
                    for (int j = 0; j < w; j++) {
                        RGBColor rgb(rgbData[2], rgbData[1], rgbData[0]);   // the given data format has B,G,R order
                        *rgb24Data++ = rgb;
                        rgbData += 3;
                    }
                }
            }
            else {
                camera->buffer = new G12Buffer(data.format.height, data.format.width, false);
                int w = camera->buffer->w;
                int h = camera->buffer->h;
                for (int i = 0; i < h; i++) {
                    uint8_t  *rgbData = ((uint8_t *)data.data) + 3 * (h - i - 1) * w;
                    uint16_t *greyData = &(camera->buffer->element(i, 0));
                    for (int j = 0; j < w; j++) {
                        RGBColor rgb(rgbData[2], rgbData[1], rgbData[0]);   // the given data format has B,G,R order
                        *greyData++ = rgb.luma12();
                        rgbData += 3;
                    }
                }
            }
        }
        else {
            camera->buffer = new G12Buffer(data.format.height, data.format.width, false);
        }

        camera->decodeTime = timer.usecsToNow();
        /* If both frames are in place */

        if (mCameras[0].gotBuffer && mCameras[1].gotBuffer)
        {
            mCameras[0].gotBuffer = false;
            mCameras[1].gotBuffer = false;

            CaptureStatistics stats;
            int64_t desync = mCameras[0].timestamp - mCameras[1].timestamp;
            stats.values[CaptureStatistics::DESYNC_TIME] = desync > 0 ? desync : -desync;
            stats.values[CaptureStatistics::DECODING_TIME] = mCameras[0].decodeTime + mCameras[1].decodeTime;
            if (lastFrameTime.usecsTo(PreciseTimer()) != 0)
            {
                stats.values[CaptureStatistics::INTERFRAME_DELAY] = lastFrameTime.usecsToNow();
            }
            lastFrameTime = PreciseTimer::currentTime();

            frame_data_t frameData;
            frameData.timestamp = mCameras[0].timestamp / 2 + mCameras[1].timestamp / 2;
            newFrameReady(frameData);
            newStatisticsReady(stats);
        }
        else {
            frame_data_t frameData;
            frameData.timestamp = mCameras[0].timestamp;
            newFrameReady(frameData);
            //newStatisticsReady(stats);
            skippedCount++;
        }
    }
exit:
    mProtectFrame.unlock();
}
Пример #4
0
void UEyeCaptureInterface::SpinThread::run()
{
    qDebug("new frame thread running");
    while (capInterface->spinRunning.tryLock()) {

    	//usleep(20000);
        if (capInterface->sync == SOFT_SYNC || capInterface->sync == FRAME_HARD_SYNC) {
    	   // printf("Both cameras fire!!!\n");
            ueyeTrace(is_FreezeVideo (capInterface->rightCamera.mCamera, IS_DONT_WAIT));
            ueyeTrace(is_FreezeVideo (capInterface->leftCamera .mCamera, IS_DONT_WAIT));
        }

        int result = IS_SUCCESS;

        while ((result = capInterface->rightCamera.waitUEyeFrameEvent(INFINITE)) != IS_SUCCESS)
        {
            SYNC_PRINT(("WaitFrameEvent failed for right camera\n"));
            ueyeTrace(result);
        }
        //SYNC_PRINT(("Got right frame\n"));

        while ((result = capInterface->leftCamera .waitUEyeFrameEvent(INFINITE)) != IS_SUCCESS)
        {
            SYNC_PRINT(("WaitFrameEvent failed for left camera\n"));
            ueyeTrace(result);
        }
        //SYNC_PRINT(("Got left frame\n"));


        /* If we are here seems like both new cameras produced frames*/

        int bufIDL, bufIDR;
        char *rawBufferLeft  = NULL;
        char *rawBufferRight = NULL;
        HIDS mCameraLeft;
        HIDS mCameraRight;

        mCameraLeft = capInterface->leftCamera.mCamera;
        is_GetActSeqBuf(mCameraLeft, &bufIDL, NULL, &rawBufferLeft);
        is_LockSeqBuf (mCameraLeft, IS_IGNORE_PARAMETER, rawBufferLeft);
        mCameraRight = capInterface->rightCamera.mCamera;
        is_GetActSeqBuf(mCameraRight, &bufIDR, NULL, &rawBufferRight);
        is_LockSeqBuf (mCameraRight, IS_IGNORE_PARAMETER, rawBufferRight);

       // SYNC_PRINT(("We have locked buffers [%d and %d]\n", bufIDL, bufIDR));

        /* Now exchange the buffer that is visible from */
        capInterface->protectFrame.lock();
            UEYEIMAGEINFO imageInfo;

            if (capInterface->currentLeft)
                is_UnlockSeqBuf (mCameraLeft, IS_IGNORE_PARAMETER, (char *)capInterface->currentLeft->buffer);
            is_GetImageInfo (mCameraLeft, bufIDL, &imageInfo, sizeof(UEYEIMAGEINFO));
            capInterface->currentLeft = capInterface->leftCamera.getDescriptorByAddress(rawBufferLeft);
            capInterface->currentLeft->internalTimestamp = imageInfo.u64TimestampDevice;
            capInterface->currentLeft->pcTimestamp = imageInfo.TimestampSystem;

            if (capInterface->currentRight)
                is_UnlockSeqBuf (mCameraRight, IS_IGNORE_PARAMETER, (char *)capInterface->currentRight->buffer);
            is_GetImageInfo (mCameraRight, bufIDR, &imageInfo, sizeof(UEYEIMAGEINFO));
            capInterface->currentRight = capInterface->rightCamera.getDescriptorByAddress(rawBufferRight);
            capInterface->currentRight->internalTimestamp = imageInfo.u64TimestampDevice;
            capInterface->currentRight->pcTimestamp = imageInfo.TimestampSystem;

            capInterface->skippedCount++;

            capInterface->triggerSkippedCount = is_CameraStatus (mCameraRight, IS_TRIGGER_MISSED, IS_GET_STATUS);
        capInterface->protectFrame.unlock();
       /* For statistics */
        if (capInterface->lastFrameTime.usecsTo(PreciseTimer()) != 0)
        {
           capInterface->frameDelay = capInterface->lastFrameTime.usecsToNow();
        }
        capInterface->lastFrameTime = PreciseTimer::currentTime();



        frame_data_t frameData;
        frameData.timestamp = (capInterface->currentLeft->usecsTimeStamp() / 2) + (capInterface->currentRight->usecsTimeStamp() / 2);
        capInterface->notifyAboutNewFrame(frameData);

        capInterface->spinRunning.unlock();
        if (capInterface->shouldStopSpinThread)
        {
            qDebug("Break command received");

            break;
        }
    }
    qDebug("new frame thread finished");
}
Пример #5
0
ImageCaptureInterface::FramePair AviCapture::getFrame()
{
    CaptureStatistics  stats;
    PreciseTimer start = PreciseTimer::currentTime();

    //SYNC_PRINT(("AviCapture::getFrame(): called\n"));
    //mProtectFrame.lock();
        FramePair result(NULL, NULL);
        int res;
        while ( (res = av_read_frame(mFormatContext, &mPacket)) >= 0)
        {
            if (mPacket.stream_index == mVideoStream)
            {
                int frame_finished;
                avcodec_decode_video2(mCodecContext, mFrame, &frame_finished, &mPacket);
                av_free_packet(&mPacket);
                if (frame_finished) {
//                    SYNC_PRINT(("AviCapture::getFrame(): Frame ready\n"));
                    break;
                }
            } else {
                av_free_packet(&mPacket);
            }
        }

        if (res >= 0)
        {            
            if (mFrame->format == AV_PIX_FMT_YUV420P ||
                mFrame->format != AV_PIX_FMT_YUVJ420P)
            {
                result.setRgbBufferLeft(new RGB24Buffer(mFrame->height, mFrame->width));
                result.setBufferLeft   (new G12Buffer  (mFrame->height, mFrame->width));
                for (int i = 0; i < mFrame->height; i++)
                {
                    for (int j = 0; j < mFrame->width; j++)
                    {
                        uint8_t y = (mFrame->data[0])[i * mFrame->linesize[0] + j];

                        uint8_t u = (mFrame->data[1])[(i / 2) * mFrame->linesize[1] + (j / 2)];
                        uint8_t v = (mFrame->data[2])[(i / 2) * mFrame->linesize[2] + (j / 2)];

                        result.rgbBufferLeft()->element(i,j) = RGBColor::FromYUV(y,u,v);
                        result.bufferLeft()   ->element(i,j) = (int)y << 4;
                    }
                }

                result.setRgbBufferRight (new RGB24Buffer(result.rgbBufferLeft()));
                result.setBufferRight    (new G12Buffer(result.bufferLeft()));
             } else if (mFrame->format == AV_PIX_FMT_YUV422P ) {
                SYNC_PRINT(("AviCapture::getFrame(): format AV_PIX_FMT_YUV422P \n"));

                return result;
             } else {
                SYNC_PRINT(("AviCapture::getFrame(): Not supported format %d\n", mFrame->format));
                return result;
             }
        } else {
            SYNC_PRINT(("AviCapture::getFrame(): av_read_frame failed with %d", res));
        }


        result.setTimeStampLeft (count * 10);
        result.setTimeStampRight(count * 10);

    //mProtectFrame.unlock();
    stats.values[CaptureStatistics::DECODING_TIME] = start.usecsToNow();

    if (mLastFrameTime.usecsTo(PreciseTimer()) != 0)
    {
        stats.values[CaptureStatistics::INTERFRAME_DELAY] = mLastFrameTime.usecsToNow();
    }
    mLastFrameTime = PreciseTimer::currentTime();
    stats.values[CaptureStatistics::DATA_SIZE] = 0;
    if (imageInterfaceReceiver != NULL)
    {
        imageInterfaceReceiver->newStatisticsReadyCallback(stats);
    }

    if (!mIsPaused)
    {
        //SYNC_PRINT(("AviCapture::getFrame(): New notification sending\n"));
        count++;
        frame_data_t frameData;
        frameData.timestamp = (count * 10);
        notifyAboutNewFrame(frameData);
    } else {
        SYNC_PRINT(("AviCapture::getFrame(): Paused\n"));
    }

    //count++;

    return result;
}