示例#1
0
HRESULT CDXVADecoderH264::DisplayStatus()
{
    HRESULT hr = E_INVALIDARG;
    DXVA_Status_H264 Status;

    memset(&Status, 0, sizeof(Status));
    CHECK_HR_TRACE(CDXVADecoder::QueryStatus(&Status, sizeof(Status)));

    TRACE_H264("CDXVADecoderH264::DisplayStatus() : Status for the frame %u : bBufType = %u, bStatus = %u, wNumMbsAffected = %u\n",
               Status.StatusReportFeedbackNumber,
               Status.bBufType,
               Status.bStatus,
               Status.wNumMbsAffected);

    return hr;
}
示例#2
0
// === Public functions
HRESULT CDXVADecoderMpeg2::DecodeFrame(BYTE* pDataIn, UINT nSize, REFERENCE_TIME rtStart, REFERENCE_TIME rtStop)
{
    HRESULT hr;
    int nFieldType;
    int nSliceType;

    FFMpeg2DecodeFrame(&m_PictureParams, &m_QMatrixData, m_SliceInfo, &m_nSliceCount, m_pFilter->GetAVCtx(),
                       m_pFilter->GetFrame(), &m_nNextCodecIndex, &nFieldType, &nSliceType, pDataIn, nSize);

    if (m_PictureParams.bSecondField && !m_bSecondField) {
        m_bSecondField = true;
    }

    // Wait I frame after a flush
    if (m_bFlushed && (!m_PictureParams.bPicIntra || (m_bSecondField && m_PictureParams.bSecondField))) {
        TRACE_MPEG2("CDXVADecoderMpeg2::DecodeFrame() : Flush - wait I frame\n");
        return S_FALSE;
    }

    if (m_bSecondField) {
        if (!m_PictureParams.bSecondField) {
            m_rtStart = rtStart;
            m_rtStop  = rtStop;
            m_pSampleToDeliver = NULL;
            hr = GetFreeSurfaceIndex(m_nSurfaceIndex, &m_pSampleToDeliver, rtStart, rtStop);
            if (FAILED(hr)) {
                ASSERT(hr == VFW_E_NOT_COMMITTED);      // Normal when stop playing
                return hr;
            }
        }
    } else {
        m_rtStart = rtStart;
        m_rtStop  = rtStop;
        m_pSampleToDeliver = NULL;
        hr = GetFreeSurfaceIndex(m_nSurfaceIndex, &m_pSampleToDeliver, rtStart, rtStop);
        if (FAILED(hr)) {
            ASSERT(hr == VFW_E_NOT_COMMITTED);      // Normal when stop playing
            return hr;
        }
    }

    if (m_pSampleToDeliver == NULL) {
        return S_FALSE;
    }

    CHECK_HR_TRACE(BeginFrame(m_nSurfaceIndex, m_pSampleToDeliver));

    if (m_bSecondField) {
        if (!m_PictureParams.bSecondField) {
            UpdatePictureParams(m_nSurfaceIndex);
        }
    } else {
        UpdatePictureParams(m_nSurfaceIndex);
    }

    TRACE_MPEG2("CDXVADecoderMpeg2::DecodeFrame() : Surf = %d, PictureType = %d, SecondField = %d, m_nNextCodecIndex = %d, rtStart = [%I64d]\n",
                m_nSurfaceIndex, nSliceType, m_PictureParams.bSecondField, m_nNextCodecIndex, rtStart);

    CHECK_HR_TRACE(AddExecuteBuffer(DXVA2_PictureParametersBufferType, sizeof(m_PictureParams), &m_PictureParams));
    CHECK_HR_TRACE(AddExecuteBuffer(DXVA2_InverseQuantizationMatrixBufferType, sizeof(m_QMatrixData), &m_QMatrixData));

    // Send bitstream to accelerator
    CHECK_HR_TRACE(AddExecuteBuffer(DXVA2_SliceControlBufferType, sizeof(DXVA_SliceInfo)*m_nSliceCount, &m_SliceInfo));
    CHECK_HR_TRACE(AddExecuteBuffer(DXVA2_BitStreamDateBufferType, nSize, pDataIn, &nSize));

    // Decode frame
    CHECK_HR_TRACE(Execute());
    CHECK_HR_TRACE(EndFrame(m_nSurfaceIndex));

    if (m_bSecondField) {
        if (m_PictureParams.bSecondField) {
            AddToStore(m_nSurfaceIndex, m_pSampleToDeliver, (m_PictureParams.bPicBackwardPrediction != 1), m_rtStart, m_rtStop,
                       false, (FF_FIELD_TYPE)nFieldType, (FF_SLICE_TYPE)nSliceType, FFGetCodedPicture(m_pFilter->GetAVCtx()));
            hr = DisplayNextFrame();
        }
    } else {
        AddToStore(m_nSurfaceIndex, m_pSampleToDeliver, (m_PictureParams.bPicBackwardPrediction != 1), m_rtStart, m_rtStop,
                   false, (FF_FIELD_TYPE)nFieldType, (FF_SLICE_TYPE)nSliceType, FFGetCodedPicture(m_pFilter->GetAVCtx()));
        hr = DisplayNextFrame();
    }

    m_bFlushed = false;

    return hr;
}
示例#3
0
HRESULT CDXVADecoderH264::DecodeFrame(BYTE* pDataIn, UINT nSize, REFERENCE_TIME rtStart, REFERENCE_TIME rtStop)
{
    HRESULT hr = S_FALSE;
    UINT nSlices = 0;
    int nSurfaceIndex = -1;
    int nFieldType = -1;
    int nSliceType = -1;
    int nFramePOC = INT_MIN;
    int nOutPOC = INT_MIN;
    REFERENCE_TIME rtOutStart = _I64_MIN;
    CH264Nalu Nalu;
    UINT nNalOffset = 0;
    CComPtr<IMediaSample> pSampleToDeliver;
    CComQIPtr<IMPCDXVA2Sample> pDXVA2Sample;
    int slice_step = 1;

    if (FFH264DecodeBuffer(m_pFilter->GetAVCtx(), pDataIn, nSize, &nFramePOC, &nOutPOC, &rtOutStart) == -1) {
        return S_FALSE;
    }

    while (!nSlices && slice_step <= 2) {
        Nalu.SetBuffer(pDataIn, nSize, slice_step == 1 ? m_nNALLength : 0);
        while (Nalu.ReadNext()) {
            switch (Nalu.GetType()) {
                case NALU_TYPE_SLICE:
                case NALU_TYPE_IDR:
                    if (m_bUseLongSlice) {
                        m_pSliceLong[nSlices].BSNALunitDataLocation = nNalOffset;
                        m_pSliceLong[nSlices].SliceBytesInBuffer = (UINT)Nalu.GetDataLength() + 3; //.GetRoundedDataLength();
                        m_pSliceLong[nSlices].slice_id = nSlices;
                        FF264UpdateRefFrameSliceLong(&m_DXVAPicParams, &m_pSliceLong[nSlices], m_pFilter->GetAVCtx());

                        if (nSlices > 0) {
                            m_pSliceLong[nSlices - 1].NumMbsForSlice = m_pSliceLong[nSlices].NumMbsForSlice = m_pSliceLong[nSlices].first_mb_in_slice - m_pSliceLong[nSlices - 1].first_mb_in_slice;
                        }
                    }
                    nSlices++;
                    nNalOffset += (UINT)(Nalu.GetDataLength() + 3);
                    if (nSlices > MAX_SLICES) {
                        break;
                    }
                    break;
            }
        }
        slice_step++;
    }

    if (!nSlices) {
        return S_FALSE;
    }

    m_nMaxWaiting = min(max(m_DXVAPicParams.num_ref_frames, 3), 8);

    // If parsing fail (probably no PPS/SPS), continue anyway it may arrived later (happen on truncated streams)
    if (FAILED(FFH264BuildPicParams(&m_DXVAPicParams, &m_DXVAScalingMatrix, &nFieldType, &nSliceType, m_pFilter->GetAVCtx(), m_pFilter->GetPCIVendor()))) {
        return S_FALSE;
    }

    TRACE_H264("CDXVADecoderH264::DecodeFrame() : nFramePOC = %11d, nOutPOC = %11d[%11d], [%d - %d], rtOutStart = [%20I64d]\n", nFramePOC, nOutPOC, m_nOutPOC, m_DXVAPicParams.field_pic_flag, m_DXVAPicParams.RefPicFlag, rtOutStart);

    // Wait I frame after a flush
    if (m_bFlushed && !m_DXVAPicParams.IntraPicFlag) {
        TRACE_H264("CDXVADecoderH264::DecodeFrame() : Flush - wait I frame\n");
        m_nBrokenFramesFlag = 0;
        m_nBrokenFramesFlag_POC = 0;
        m_nfield_pic_flag = m_DXVAPicParams.field_pic_flag;
        m_nRefPicFlag = m_DXVAPicParams.RefPicFlag;
        m_nPrevOutPOC = INT_MIN;
        return S_FALSE;
    }

    /* Disabled, because that causes serious problems.
        // Some magic code for detecting the incorrect decoding of interlaced frames ...
        // TODO : necessary to make it better, and preferably on the side of ffmpeg ...
        if (m_nfield_pic_flag && m_nfield_pic_flag == m_DXVAPicParams.field_pic_flag && m_nRefPicFlag == m_DXVAPicParams.RefPicFlag) {
            if (m_nPrevOutPOC == m_nOutPOC && m_nOutPOC == INT_MIN) {
                m_nBrokenFramesFlag_POC++;
            }
            m_nBrokenFramesFlag++;
        } else {
            m_nBrokenFramesFlag     = 0;
            m_nBrokenFramesFlag_POC = 0;
        }
        m_nfield_pic_flag   = m_DXVAPicParams.field_pic_flag;
        m_nRefPicFlag       = m_DXVAPicParams.RefPicFlag;
        m_nPrevOutPOC       = m_nOutPOC;

        if (m_nBrokenFramesFlag > 4) {
            m_nBrokenFramesFlag = 0;
            if (m_nBrokenFramesFlag_POC > 1) {
                TRACE_H264("CDXVADecoderH264::DecodeFrame() : Detected broken frames ... flush data\n");
                m_nBrokenFramesFlag_POC = 0;
                Flush();
                return S_FALSE;
            }
        }
        //
    */

    CHECK_HR_TRACE(GetFreeSurfaceIndex(nSurfaceIndex, &pSampleToDeliver, rtStart, rtStop));
    FFH264SetCurrentPicture(nSurfaceIndex, &m_DXVAPicParams, m_pFilter->GetAVCtx());

    CHECK_HR_TRACE(BeginFrame(nSurfaceIndex, pSampleToDeliver));

    m_DXVAPicParams.StatusReportFeedbackNumber++;

    // Send picture parameters
    CHECK_HR_TRACE(AddExecuteBuffer(DXVA2_PictureParametersBufferType, sizeof(m_DXVAPicParams), &m_DXVAPicParams));
    CHECK_HR_TRACE(Execute());

    // Add bitstream, slice control and quantization matrix
    CHECK_HR_TRACE(AddExecuteBuffer(DXVA2_BitStreamDateBufferType, nSize, pDataIn, &nSize));

    if (m_bUseLongSlice) {
        CHECK_HR_TRACE(AddExecuteBuffer(DXVA2_SliceControlBufferType, sizeof(DXVA_Slice_H264_Long)*nSlices, m_pSliceLong));
    } else {
        CHECK_HR_TRACE(AddExecuteBuffer(DXVA2_SliceControlBufferType, sizeof(DXVA_Slice_H264_Short)*nSlices, m_pSliceShort));
    }

    CHECK_HR_TRACE(AddExecuteBuffer(DXVA2_InverseQuantizationMatrixBufferType, sizeof(DXVA_Qmatrix_H264), (void*)&m_DXVAScalingMatrix));

    // Decode bitstream
    CHECK_HR_TRACE(Execute());
    CHECK_HR_TRACE(EndFrame(nSurfaceIndex));

#if defined(_DEBUG) && 0
    DisplayStatus();
#endif

    bool bAdded = AddToStore(nSurfaceIndex, pSampleToDeliver, m_DXVAPicParams.RefPicFlag, rtStart, rtStop,
                             m_DXVAPicParams.field_pic_flag, (FF_FIELD_TYPE)nFieldType,
                             (FF_SLICE_TYPE)nSliceType, nFramePOC);

    FFH264UpdateRefFramesList(&m_DXVAPicParams, m_pFilter->GetAVCtx());
    ClearUnusedRefFrames();

    if (bAdded) {
        hr = DisplayNextFrame();
    }

    if (nOutPOC != INT_MIN) {
        m_nOutPOC = nOutPOC;
        m_rtOutStart = rtOutStart;
    }

    m_bFlushed = false;
    return hr;
}
示例#4
0
// === Public functions
HRESULT CDXVADecoderVC1::DecodeFrame(BYTE* pDataIn, UINT nSize, REFERENCE_TIME rtStart, REFERENCE_TIME rtStop)
{
    HRESULT hr;
    int nSurfaceIndex;
    CComPtr<IMediaSample> pSampleToDeliver;
    int nFieldType, nSliceType;
    UINT nFrameSize, nSize_Result;

    FFVC1UpdatePictureParam(&m_PictureParams, m_pFilter->GetAVCtx(), &nFieldType, &nSliceType, pDataIn, nSize, &nFrameSize, FALSE, &m_bFrame_repeat_pict);

    if (FFIsSkipped(m_pFilter->GetAVCtx())) {
        return S_OK;
    }

    // Wait I frame after a flush
    if (m_bFlushed && !m_PictureParams.bPicIntra) {
        return S_FALSE;
    }

    hr = GetFreeSurfaceIndex(nSurfaceIndex, &pSampleToDeliver, rtStart, rtStop);
    if (FAILED(hr)) {
        ASSERT(hr == VFW_E_NOT_COMMITTED);      // Normal when stop playing
        return hr;
    }

    CHECK_HR_TRACE(BeginFrame(nSurfaceIndex, pSampleToDeliver));

    TRACE_VC1("CDXVADecoderVC1::DecodeFrame() : PictureType = %d, rtStart = %I64d, Surf = %d\n", nSliceType, rtStart, nSurfaceIndex);

    m_PictureParams.wDecodedPictureIndex = nSurfaceIndex;
    m_PictureParams.wDeblockedPictureIndex = m_PictureParams.wDecodedPictureIndex;

    // Manage reference picture list
    if (!m_PictureParams.bPicBackwardPrediction) {
        if (m_wRefPictureIndex[0] != NO_REF_FRAME) {
            RemoveRefFrame(m_wRefPictureIndex[0]);
        }
        m_wRefPictureIndex[0] = m_wRefPictureIndex[1];
        m_wRefPictureIndex[1] = nSurfaceIndex;
    }
    m_PictureParams.wForwardRefPictureIndex = (m_PictureParams.bPicIntra == 0) ? m_wRefPictureIndex[0] : NO_REF_FRAME;
    m_PictureParams.wBackwardRefPictureIndex = (m_PictureParams.bPicBackwardPrediction == 1) ? m_wRefPictureIndex[1] : NO_REF_FRAME;

    m_PictureParams.bPic4MVallowed = (m_PictureParams.wBackwardRefPictureIndex == NO_REF_FRAME && m_PictureParams.bPicStructure == 3) ? 1 : 0;
    m_PictureParams.bPicDeblockConfined |= (m_PictureParams.wBackwardRefPictureIndex == NO_REF_FRAME) ? 0x04 : 0;

    m_PictureParams.bPicScanMethod++;       // Use for status reporting sections 3.8.1 and 3.8.2

    TRACE_VC1("CDXVADecoderVC1::DecodeFrame() : Decode frame %d\n", m_PictureParams.bPicScanMethod);

    // Send picture params to accelerator
    CHECK_HR_TRACE(AddExecuteBuffer(DXVA2_PictureParametersBufferType, sizeof(m_PictureParams), &m_PictureParams));

    // Send bitstream to accelerator
    CHECK_HR_TRACE(AddExecuteBuffer(DXVA2_BitStreamDateBufferType, nFrameSize ? nFrameSize : nSize, pDataIn, &nSize_Result));

    m_SliceInfo.wQuantizerScaleCode = 1;    // TODO : 1->31 ???
    m_SliceInfo.dwSliceBitsInBuffer = nSize_Result * 8;
    CHECK_HR_TRACE(AddExecuteBuffer(DXVA2_SliceControlBufferType, sizeof(m_SliceInfo), &m_SliceInfo));

    // Decode frame
    CHECK_HR_TRACE(Execute());
    CHECK_HR_TRACE(EndFrame(nSurfaceIndex));

    // ***************
    if (nFrameSize) { // Decoding Second Field
        FFVC1UpdatePictureParam(&m_PictureParams, m_pFilter->GetAVCtx(), nullptr, nullptr, pDataIn, nSize, nullptr, TRUE, &m_bFrame_repeat_pict);

        CHECK_HR_TRACE(BeginFrame(nSurfaceIndex, pSampleToDeliver));

        TRACE_VC1("CDXVADecoderVC1::DecodeFrame() : PictureType = %d\n", nSliceType);

        CHECK_HR_TRACE(AddExecuteBuffer(DXVA2_PictureParametersBufferType, sizeof(m_PictureParams), &m_PictureParams));

        // Send bitstream to accelerator
        CHECK_HR_TRACE(AddExecuteBuffer(DXVA2_BitStreamDateBufferType, nSize - nFrameSize, pDataIn + nFrameSize, &nSize_Result));

        m_SliceInfo.wQuantizerScaleCode = 1;        // TODO : 1->31 ???
        m_SliceInfo.dwSliceBitsInBuffer = nSize_Result * 8;
        CHECK_HR_TRACE(AddExecuteBuffer(DXVA2_SliceControlBufferType, sizeof(m_SliceInfo), &m_SliceInfo));

        // Decode frame
        CHECK_HR_TRACE(Execute());
        CHECK_HR_TRACE(EndFrame(nSurfaceIndex));
    }
    // ***************

#ifdef _DEBUG
    DisplayStatus();
#endif

    // Update timestamp & Re-order B frames
    if (m_bFrame_repeat_pict || m_pFilter->IsReorderBFrame()) {
        if (m_bFrame_repeat_pict || m_pFilter->GetCodec() == AV_CODEC_ID_WMV3) {
            m_pFilter->UpdateFrameTime(rtStart, rtStop, !!m_bFrame_repeat_pict);
        }
        if (m_pFilter->IsReorderBFrame() || m_pFilter->IsEvo()) {
            if (m_PictureParams.bPicBackwardPrediction == 1) {
                SwapRT(rtStart, m_rtStartDelayed);
                SwapRT(rtStop,  m_rtStopDelayed);
            } else {
                // Save I or P reference time (swap later)
                if (!m_bFlushed) {
                    if (m_nDelayedSurfaceIndex != -1) {
                        UpdateStore(m_nDelayedSurfaceIndex, m_rtStartDelayed, m_rtStopDelayed);
                    }
                    m_rtStartDelayed = m_rtStopDelayed = _I64_MAX;
                    SwapRT(rtStart, m_rtStartDelayed);
                    SwapRT(rtStop,  m_rtStopDelayed);
                    m_nDelayedSurfaceIndex  = nSurfaceIndex;
                }
            }
        }
    }

    AddToStore(nSurfaceIndex, pSampleToDeliver, (m_PictureParams.bPicBackwardPrediction != 1), rtStart, rtStop,
               false, (FF_FIELD_TYPE)nFieldType, (FF_SLICE_TYPE)nSliceType, 0);
    m_bFlushed = false;

    return DisplayNextFrame();
}