Ejemplo n.º 1
0
double movie_player::get_presentation_time_for_frame(AVFrame* frame, int streamIndex) const
{
    int64_t pts;
#ifdef CORSIX_TH_USE_LIBAV
    pts = frame->pts;
    if (pts == AV_NOPTS_VALUE)
    {
        pts = frame->pkt_dts;
    }
#else
#if LIBAVCODEC_VERSION_INT < AV_VERSION_INT(54, 18, 100)
    pts = *(int64_t*)av_opt_ptr(avcodec_get_frame_class(), frame, "best_effort_timestamp");
#elif LIBAVCODEC_VERSION_INT < AV_VERSION_INT(58, 18, 100)
    pts = av_frame_get_best_effort_timestamp(frame);
#else
    pts = frame->best_effort_timestamp;
#endif //LIBAVCODEC_VERSION_INT
#endif //CORSIX_T_USE_LIBAV

    if (pts == AV_NOPTS_VALUE)
    {
        pts = 0;
    }

    return pts * av_q2d(format_context->streams[streamIndex]->time_base);
}
Ejemplo n.º 2
0
int THMovie::getVideoFrame(AVFrame *pFrame, int64_t *piPts)
{
    int iGotPicture = 0;
    int iError;

    AVPacket *pPacket = m_pVideoQueue->pull(true);
    if(pPacket == nullptr)
    {
        return -1;
    }

    if(pPacket->data == m_flushPacket->data)
    {
        //TODO: Flush

        return 0;
    }

    iError = avcodec_decode_video2(m_pVideoCodecContext, pFrame, &iGotPicture, pPacket);
    av_packet_unref(pPacket);
    av_free(pPacket);

    if(iError < 0)
    {
        return 0;
    }

    if(iGotPicture)
    {
        iError = 1;

#ifdef CORSIX_TH_USE_LIBAV
        *piPts = pFrame->pts;
        if (*piPts == AV_NOPTS_VALUE)
        {
            *piPts = pFrame->pkt_dts;
        }
#else
#if LIBAVCODEC_VERSION_INT < AV_VERSION_INT(54, 18, 100)
        *piPts = *(int64_t*)av_opt_ptr(avcodec_get_frame_class(), pFrame, "best_effort_timestamp");
#else
        *piPts = av_frame_get_best_effort_timestamp(pFrame);
#endif //LIBAVCODEC_VERSION_INT
#endif //CORSIX_T_USE_LIBAV

        if(*piPts == AV_NOPTS_VALUE)
        {
            *piPts = 0;
        }
        return iError;
    }
    return 0;
}
void SoftFFmpegVideo::onQueueFilled(OMX_U32 portIndex) {
    int err = 0;

    if (mSignalledError || mOutputPortSettingsChange != NONE) {
        return;
    }

    List<BufferInfo *> &inQueue = getPortQueue(0);
    List<BufferInfo *> &outQueue = getPortQueue(1);

    while (!inQueue.empty() && !outQueue.empty()) {
        BufferInfo *inInfo = *inQueue.begin();
        OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;

        BufferInfo *outInfo = *outQueue.begin();
        OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;

        if (mCtx->width != mWidth || mCtx->height != mHeight) {
            mCtx->width = mWidth;
            mCtx->height = mHeight;
            mStride = mWidth;

            updatePortDefinitions();

            notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
            mOutputPortSettingsChange = AWAITING_DISABLED;
            return;
        }

        if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
            inQueue.erase(inQueue.begin());
            inInfo->mOwnedByUs = false;
            notifyEmptyBufferDone(inHeader);

            outHeader->nFilledLen = 0;
            outHeader->nFlags = OMX_BUFFERFLAG_EOS;

            outQueue.erase(outQueue.begin());
            outInfo->mOwnedByUs = false;
            notifyFillBufferDone(outHeader);
            return;
        }

        if (inHeader->nFlags & OMX_BUFFERFLAG_CODECCONFIG) {
            LOGI("got extradata, ignore: %d, size: %lu", mIgnoreExtradata, inHeader->nFilledLen);
            hexdump(inHeader->pBuffer + inHeader->nOffset, inHeader->nFilledLen);
            if (!mExtradataReady && !mIgnoreExtradata) {
                //if (mMode == MODE_H264)
                // it is possible to receive multiple input buffer with OMX_BUFFERFLAG_CODECCONFIG flag.
                // for example, H264, the first input buffer is SPS, and another is PPS!
                int orig_extradata_size = mCtx->extradata_size;
                mCtx->extradata_size += inHeader->nFilledLen;
                mCtx->extradata = (uint8_t *)realloc(mCtx->extradata,
                        mCtx->extradata_size + FF_INPUT_BUFFER_PADDING_SIZE);
                if (!mCtx->extradata) {
                    LOGE("ffmpeg video decoder failed to alloc extradata memory.");
                    notify(OMX_EventError, OMX_ErrorInsufficientResources, 0, NULL);
                    mSignalledError = true;
                    return;
                }

                memcpy(mCtx->extradata + orig_extradata_size,
                        inHeader->pBuffer + inHeader->nOffset, inHeader->nFilledLen);
                memset(mCtx->extradata + mCtx->extradata_size, 0, FF_INPUT_BUFFER_PADDING_SIZE);

                inInfo->mOwnedByUs = false;
                inQueue.erase(inQueue.begin());
                inInfo = NULL;
                notifyEmptyBufferDone(inHeader);
                inHeader = NULL;

                continue;
            }

            if (mIgnoreExtradata) {
                LOGI("got extradata, size: %lu, but ignore it", inHeader->nFilledLen);
                inInfo->mOwnedByUs = false;
                inQueue.erase(inQueue.begin());
                inInfo = NULL;
                notifyEmptyBufferDone(inHeader);
                inHeader = NULL;

                continue;
            }
        }

        AVPacket pkt;
        av_init_packet(&pkt);
        pkt.data = (uint8_t *)inHeader->pBuffer + inHeader->nOffset;
        pkt.size = inHeader->nFilledLen;
        pkt.pts = inHeader->nTimeStamp;
#if DEBUG_PKT
        LOGV("pkt size: %d, pts: %lld", pkt.size, pkt.pts);
#endif
        if (!mExtradataReady) {
            LOGI("extradata is ready");
            hexdump(mCtx->extradata, mCtx->extradata_size);
            LOGI("open ffmpeg decoder now");
            mExtradataReady = true;

            err = avcodec_open2(mCtx, mCtx->codec, NULL);
            if (err < 0) {
                LOGE("ffmpeg video decoder failed to initialize. (%d)", err);
                notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
                mSignalledError = true;
                return;
            }
        }

        int gotPic = false;
        AVFrame *frame = avcodec_alloc_frame();
        err = avcodec_decode_video2(mCtx, frame, &gotPic, &pkt);
        if (err < 0) {
            LOGE("ffmpeg video decoder failed to decode frame. (%d)", err);
            notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
            mSignalledError = true;
            av_free(frame);
            return;
        }

        if (gotPic) {
            AVPicture pict;
            int64_t pts = AV_NOPTS_VALUE;
            uint8_t *dst = outHeader->pBuffer;

            memset(&pict, 0, sizeof(AVPicture));
            pict.data[0] = dst;
            pict.data[1] = dst + mStride * mHeight;
            pict.data[2] = pict.data[1] + (mStride / 2  * mHeight / 2);
            pict.linesize[0] = mStride;
            pict.linesize[1] = mStride / 2;
            pict.linesize[2] = mStride / 2;

            int sws_flags = SWS_BICUBIC;
            mImgConvertCtx = sws_getCachedContext(mImgConvertCtx,
                   mWidth, mHeight, mCtx->pix_fmt, mWidth, mHeight,
                   PIX_FMT_YUV420P, sws_flags, NULL, NULL, NULL);
            if (mImgConvertCtx == NULL) {
                LOGE("Cannot initialize the conversion context");
                notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
                mSignalledError = true;
                av_free(frame);
                return;
            }
            sws_scale(mImgConvertCtx, frame->data, frame->linesize,
                  0, mHeight, pict.data, pict.linesize);

            outHeader->nOffset = 0;
            outHeader->nFilledLen = (mStride * mHeight * 3) / 2;
            outHeader->nFlags = 0;
            if (frame->key_frame)
                outHeader->nFlags |= OMX_BUFFERFLAG_SYNCFRAME;

            //  process timestamps
            if (decoder_reorder_pts == -1) {
                pts = *(int64_t*)av_opt_ptr(avcodec_get_frame_class(),
                        frame, "best_effort_timestamp");
            } else if (decoder_reorder_pts) {
                pts = frame->pkt_pts;
            } else {
                pts = frame->pkt_dts;
            }

            if (pts == AV_NOPTS_VALUE) {
                pts = 0;
            }
            outHeader->nTimeStamp = pts;

#if DEBUG_FRM
            LOGV("frame pts: %lld", pts);
#endif

            outInfo->mOwnedByUs = false;
            outQueue.erase(outQueue.begin());
            outInfo = NULL;
            notifyFillBufferDone(outHeader);
            outHeader = NULL;
        }

        inInfo->mOwnedByUs = false;
        inQueue.erase(inQueue.begin());
        inInfo = NULL;
        notifyEmptyBufferDone(inHeader);
        inHeader = NULL;
        av_free(frame);
    }
}