Exemplo n.º 1
0
ImageCaptureInterface::FramePair RTSPCapture::getFrame()
{
    CaptureStatistics  stats;
    PreciseTimer start = PreciseTimer::currentTime();

    FramePair result = fcb.dequeue();

    stats.values[CaptureStatistics::DECODING_TIME] = start.usecsToNow();

    if (mLastFrameTime.usecsTo(PreciseTimer()) != 0)
    {
        stats.values[CaptureStatistics::INTERFRAME_DELAY] = mLastFrameTime.usecsToNow();
    }
    mLastFrameTime = PreciseTimer::currentTime();
    stats.values[CaptureStatistics::DATA_SIZE] = 0;
    emit newStatisticsReady(stats);

    if (!mIsPaused)
    {        
        frame_data_t frameData;
        frameData.timestamp = fcb.secondFrameTimestamp();
        //SYNC_PRINT(("RTSPCapture::getFrame(): sending notification ts = %d\n", frameData.timestamp));
        notifyAboutNewFrame(frameData);
    } else {
        SYNC_PRINT(("RTSPCapture::getFrame(): Paused\n"));
    }

    return result;
}
Exemplo n.º 2
0
ImageCaptureInterface::CapErrorCode RTSPCapture::nextFrame()
{
    frame_data_t frameData;
    frameData.timestamp = fcb.firstFrameTimestamp();

    SYNC_PRINT(("RTSPCapture::nextFrame(): sending notification ts = %" PRIu64 "\n", frameData.timestamp));
    notifyAboutNewFrame(frameData);
    return ImageCaptureInterface::SUCCESS;
}
Exemplo n.º 3
0
ImageCaptureInterface::CapErrorCode AviCapture::nextFrame()
{
    count++;
    frame_data_t frameData;
    frameData.timestamp = (count * 10);
    SYNC_PRINT(("AviCapture::nextFrame(): sending notification\n"));
    notifyAboutNewFrame(frameData);

    return ImageCaptureInterface::SUCCESS;
}
Exemplo n.º 4
0
ImageCaptureInterface::CapErrorCode RTSPCapture::startCapture()
{
    SYNC_PRINT(("RTSPCapture::startCapture(): called\n"));
    mIsPaused = false;

    frame_data_t frameData;
    frameData.timestamp = 0;
    SYNC_PRINT(("RTSPCapture::startCapture(): sending notification\n"));
    notifyAboutNewFrame(frameData);

    spin.start();

    SYNC_PRINT(("RTSPCapture::startCapture(): exited\n"));
    return ImageCaptureInterface::SUCCESS;
}
Exemplo n.º 5
0
ImageCaptureInterface::CapErrorCode AviCapture::startCapture()
{
//  return ImageCaptureInterface::CapSuccess1Cam;
    SYNC_PRINT(("AviCapture::startCapture(): called\n"));
    frame_data_t frameData;

    //mIsPaused = false;

    count++;
    frameData.timestamp = (count * 10);
    SYNC_PRINT(("AviCapture::startCapture(): sending notification\n"));
    notifyAboutNewFrame(frameData);


    SYNC_PRINT(("AviCapture::startCapture(): exited\n"));
    return ImageCaptureInterface::SUCCESS;
}
Exemplo n.º 6
0
ImageCaptureInterface::FramePair AviCapture::getFrame()
{
    CaptureStatistics  stats;
    PreciseTimer start = PreciseTimer::currentTime();

    //SYNC_PRINT(("AviCapture::getFrame(): called\n"));
    //mProtectFrame.lock();
        FramePair result(NULL, NULL);
        int res;
        while ( (res = av_read_frame(mFormatContext, &mPacket)) >= 0)
        {
            if (mPacket.stream_index == mVideoStream)
            {
                int frame_finished;
                avcodec_decode_video2(mCodecContext, mFrame, &frame_finished, &mPacket);
                av_free_packet(&mPacket);
                if (frame_finished) {
//                    SYNC_PRINT(("AviCapture::getFrame(): Frame ready\n"));
                    break;
                }
            } else {
                av_free_packet(&mPacket);
            }
        }

        if (res >= 0)
        {            
            if (mFrame->format == AV_PIX_FMT_YUV420P ||
                mFrame->format != AV_PIX_FMT_YUVJ420P)
            {
                result.setRgbBufferLeft(new RGB24Buffer(mFrame->height, mFrame->width));
                result.setBufferLeft   (new G12Buffer  (mFrame->height, mFrame->width));
                for (int i = 0; i < mFrame->height; i++)
                {
                    for (int j = 0; j < mFrame->width; j++)
                    {
                        uint8_t y = (mFrame->data[0])[i * mFrame->linesize[0] + j];

                        uint8_t u = (mFrame->data[1])[(i / 2) * mFrame->linesize[1] + (j / 2)];
                        uint8_t v = (mFrame->data[2])[(i / 2) * mFrame->linesize[2] + (j / 2)];

                        result.rgbBufferLeft()->element(i,j) = RGBColor::FromYUV(y,u,v);
                        result.bufferLeft()   ->element(i,j) = (int)y << 4;
                    }
                }

                result.setRgbBufferRight (new RGB24Buffer(result.rgbBufferLeft()));
                result.setBufferRight    (new G12Buffer(result.bufferLeft()));
             } else if (mFrame->format == AV_PIX_FMT_YUV422P ) {
                SYNC_PRINT(("AviCapture::getFrame(): format AV_PIX_FMT_YUV422P \n"));

                return result;
             } else {
                SYNC_PRINT(("AviCapture::getFrame(): Not supported format %d\n", mFrame->format));
                return result;
             }
        } else {
            SYNC_PRINT(("AviCapture::getFrame(): av_read_frame failed with %d", res));
        }


        result.setTimeStampLeft (count * 10);
        result.setTimeStampRight(count * 10);

    //mProtectFrame.unlock();
    stats.values[CaptureStatistics::DECODING_TIME] = start.usecsToNow();

    if (mLastFrameTime.usecsTo(PreciseTimer()) != 0)
    {
        stats.values[CaptureStatistics::INTERFRAME_DELAY] = mLastFrameTime.usecsToNow();
    }
    mLastFrameTime = PreciseTimer::currentTime();
    stats.values[CaptureStatistics::DATA_SIZE] = 0;
    if (imageInterfaceReceiver != NULL)
    {
        imageInterfaceReceiver->newStatisticsReadyCallback(stats);
    }

    if (!mIsPaused)
    {
        //SYNC_PRINT(("AviCapture::getFrame(): New notification sending\n"));
        count++;
        frame_data_t frameData;
        frameData.timestamp = (count * 10);
        notifyAboutNewFrame(frameData);
    } else {
        SYNC_PRINT(("AviCapture::getFrame(): Paused\n"));
    }

    //count++;

    return result;
}