Example #1
0
void CDXVADecoderMpeg2::Flush()
{
    m_nNextCodecIndex = INT_MIN;

    if (m_wRefPictureIndex[0] != NO_REF_FRAME) {
        RemoveRefFrame(m_wRefPictureIndex[0]);
    }
    if (m_wRefPictureIndex[1] != NO_REF_FRAME) {
        RemoveRefFrame(m_wRefPictureIndex[1]);
    }

    m_wRefPictureIndex[0] = NO_REF_FRAME;
    m_wRefPictureIndex[1] = NO_REF_FRAME;

    m_nSurfaceIndex = 0;
    m_pSampleToDeliver = NULL;
    m_bSecondField = false;

    m_rtStart = _I64_MIN;
    m_rtStop  = _I64_MIN;

    m_rtLastStart = 0;

    __super::Flush();
}
Example #2
0
void CDXVADecoderMpeg2::Flush()
{
	m_nNextCodecIndex		= INT_MIN;

	if (m_wRefPictureIndex[0] != NO_REF_FRAME) RemoveRefFrame (m_wRefPictureIndex[0]);
	if (m_wRefPictureIndex[1] != NO_REF_FRAME) RemoveRefFrame (m_wRefPictureIndex[1]);

	m_wRefPictureIndex[0] = NO_REF_FRAME;
	m_wRefPictureIndex[1] = NO_REF_FRAME;

	__super::Flush();
}
Example #3
0
void CDXVADecoderVC1::Flush()
{
	m_nDelayedSurfaceIndex	= -1;
	m_rtStartDelayed		= _I64_MAX;
	m_rtStopDelayed			= _I64_MAX;

	if (m_wRefPictureIndex[0] != NO_REF_FRAME) RemoveRefFrame (m_wRefPictureIndex[0]);
	if (m_wRefPictureIndex[1] != NO_REF_FRAME) RemoveRefFrame (m_wRefPictureIndex[1]);

	m_wRefPictureIndex[0] = NO_REF_FRAME;
	m_wRefPictureIndex[1] = NO_REF_FRAME;

	__super::Flush();
}
Example #4
0
void CDXVADecoderH264::ClearRefFramesList()
{
    for (int i = 0; i < m_nPicEntryNumber; i++) {
        if (m_pPictureStore[i].bInUse) {
            m_pPictureStore[i].bDisplayed = true;
            RemoveRefFrame(i);
        }
    }
}
Example #5
0
void CDXVADecoderH264::ClearUnusedRefFrames()
{
	// Remove old reference frames (not anymore a short or long ref frame)
	for (int i=0; i<m_nPicEntryNumber; i++)
	{
		if (m_pPictureStore[i].bRefPicture && m_pPictureStore[i].bDisplayed)
			if (!FFH264IsRefFrameInUse (i, m_pFilter->GetAVCtx()))
				RemoveRefFrame (i);
	}
}
Example #6
0
void CDXVADecoderH264::RemoveUndisplayedFrame(int nPOC)
{
    // Find frame with given POC, and free the slot
    for (int i = 0; i < m_nPicEntryNumber; i++) {
        if (m_pPictureStore[i].bInUse && m_pPictureStore[i].nCodecSpecific == nPOC) {
            m_pPictureStore[i].bDisplayed = true;
            RemoveRefFrame(i);
            return;
        }
    }
}
Example #7
0
void CDXVADecoderMpeg2::UpdatePictureParams(int nSurfaceIndex)
{
    DXVA2_ConfigPictureDecode* cpd = GetDXVA2Config();     // Ok for DXVA1 too (parameters have been copied)

    m_PictureParams.wDecodedPictureIndex = nSurfaceIndex;

    // Manage reference picture list
    if (!m_PictureParams.bPicBackwardPrediction) {
        if (m_wRefPictureIndex[0] != NO_REF_FRAME) {
            RemoveRefFrame(m_wRefPictureIndex[0]);
        }
        m_wRefPictureIndex[0] = m_wRefPictureIndex[1];
        m_wRefPictureIndex[1] = nSurfaceIndex;
    }
    m_PictureParams.wForwardRefPictureIndex = (m_PictureParams.bPicIntra == 0) ? m_wRefPictureIndex[0] : NO_REF_FRAME;
    m_PictureParams.wBackwardRefPictureIndex = (m_PictureParams.bPicBackwardPrediction == 1) ? m_wRefPictureIndex[1] : NO_REF_FRAME;

    // Shall be 0 if bConfigResidDiffHost is 0 or if BPP > 8
    if (cpd->ConfigResidDiffHost == 0 || m_PictureParams.bBPPminus1 > 7) {
        m_PictureParams.bPicSpatialResid8 = 0;
    } else {
        if (m_PictureParams.bBPPminus1 == 7 && m_PictureParams.bPicIntra && cpd->ConfigResidDiffHost)
            // Shall be 1 if BPP is 8 and bPicIntra is 1 and bConfigResidDiffHost is 1
        {
            m_PictureParams.bPicSpatialResid8 = 1;
        } else
            // Shall be 1 if bConfigSpatialResid8 is 1
        {
            m_PictureParams.bPicSpatialResid8 = cpd->ConfigSpatialResid8;
        }
    }

    // Shall be 0 if bConfigResidDiffHost is 0 or if bConfigSpatialResid8 is 0 or if BPP > 8
    if (cpd->ConfigResidDiffHost == 0 || cpd->ConfigSpatialResid8 == 0 || m_PictureParams.bBPPminus1 > 7) {
        m_PictureParams.bPicOverflowBlocks = 0;
    }

    // Shall be 1 if bConfigHostInverseScan is 1 or if bConfigResidDiffAccelerator is 0.

    if (cpd->ConfigHostInverseScan == 1 || cpd->ConfigResidDiffAccelerator == 0) {
        m_PictureParams.bPicScanFixed   = 1;

        if (cpd->ConfigHostInverseScan != 0) {
            m_PictureParams.bPicScanMethod  = 3;    // 11 = Arbitrary scan with absolute coefficient address.
        } else if (FFGetAlternateScan(m_pFilter->GetAVCtx())) {
            m_PictureParams.bPicScanMethod  = 1;    // 00 = Zig-zag scan (MPEG-2 Figure 7-2)
        } else {
            m_PictureParams.bPicScanMethod  = 0;    // 01 = Alternate-vertical (MPEG-2 Figure 7-3),
        }
    }
}
Example #8
0
// === Public functions
HRESULT TDXVADecoderVC1::DecodeFrame(BYTE* pDataIn, UINT nSize, REFERENCE_TIME rtStart, REFERENCE_TIME rtStop)
{
    HRESULT               hr;
    int                   nSurfaceIndex;
    CComPtr<IMediaSample> pSampleToDeliver;
    int                   nFieldType, nSliceType;
    UINT                  nFrameSize, nSize_Result;

    m_pCodec->libavcodec->FFVC1UpdatePictureParam(&m_PictureParams, m_pCodec->avctx, &nFieldType, &nSliceType, pDataIn, nSize, &nFrameSize, FALSE, &m_bFrame_repeat_pict);

    if (m_pCodec->libavcodec->FFIsSkipped(m_pCodec->avctx)) {
        return S_OK;
    }

    // Wait I frame after a flush
    if (m_bFlushed && ! m_PictureParams.bPicIntra) {
        return S_FALSE;
    }

    hr = GetFreeSurfaceIndex(nSurfaceIndex, &pSampleToDeliver, rtStart, rtStop);
    if (FAILED(hr)) {
        ASSERT(hr == VFW_E_NOT_COMMITTED);  // Normal when stop playing
        return hr;
    }

    CHECK_HR(BeginFrame(nSurfaceIndex, pSampleToDeliver));

    DPRINTF(_l("TDXVADecoderVC1::DecodeFrame - PictureType = %s, rtStart = %I64d  Surf=%d\n"), m_pCodec->libavcodec->GetFFMpegPictureType(nSliceType), rtStart, nSurfaceIndex);

    m_PictureParams.wDecodedPictureIndex    = nSurfaceIndex;
    m_PictureParams.wDeblockedPictureIndex  = m_PictureParams.wDecodedPictureIndex;

    // Manage reference picture list
    if (!m_PictureParams.bPicBackwardPrediction) {
        if (m_wRefPictureIndex[0] != NO_REF_FRAME) {
            RemoveRefFrame(m_wRefPictureIndex[0]);
        }
        m_wRefPictureIndex[0] = m_wRefPictureIndex[1];
        m_wRefPictureIndex[1] = nSurfaceIndex;
    }
    m_PictureParams.wForwardRefPictureIndex  = (m_PictureParams.bPicIntra == 0)                ? m_wRefPictureIndex[0] : NO_REF_FRAME;
    m_PictureParams.wBackwardRefPictureIndex = (m_PictureParams.bPicBackwardPrediction == 1) ? m_wRefPictureIndex[1] : NO_REF_FRAME;

    m_PictureParams.bPic4MVallowed       = (m_PictureParams.wBackwardRefPictureIndex == NO_REF_FRAME && m_PictureParams.bPicStructure == 3) ? 1 : 0;
    m_PictureParams.bPicDeblockConfined |= (m_PictureParams.wBackwardRefPictureIndex == NO_REF_FRAME) ? 0x04 : 0;

    m_PictureParams.bPicScanMethod++; // Use for status reporting sections 3.8.1 and 3.8.2

    DPRINTF(_l("TDXVADecoderVC1::DecodeFrame - Decode frame %i\n"), m_PictureParams.bPicScanMethod);

    // Send picture params to accelerator
    CHECK_HR(AddExecuteBuffer(DXVA2_PictureParametersBufferType, sizeof(m_PictureParams), &m_PictureParams));

    // Send bitstream to accelerator
    CHECK_HR(AddExecuteBuffer(DXVA2_BitStreamDateBufferType, nFrameSize ? nFrameSize : nSize, pDataIn, &nSize_Result));

    m_SliceInfo.wQuantizerScaleCode = 1;    // TODO : 1->31 ???
    m_SliceInfo.dwSliceBitsInBuffer = nSize_Result * 8;
    CHECK_HR(AddExecuteBuffer(DXVA2_SliceControlBufferType, sizeof(m_SliceInfo), &m_SliceInfo));

    // Decode frame
    CHECK_HR(Execute());
    CHECK_HR(EndFrame(nSurfaceIndex));

    // ***************
    if (nFrameSize) { // Decoding Second Field
        m_pCodec->libavcodec->FFVC1UpdatePictureParam(&m_PictureParams, m_pCodec->avctx, NULL, NULL, pDataIn, nSize, NULL, TRUE, &m_bFrame_repeat_pict);

        CHECK_HR(BeginFrame(nSurfaceIndex, pSampleToDeliver));

        DPRINTF(_l("TDXVADecoderVC1::DecodeFrame - PictureType = %s\n"), m_pCodec->libavcodec->GetFFMpegPictureType(nSliceType));

        CHECK_HR(AddExecuteBuffer(DXVA2_PictureParametersBufferType, sizeof(m_PictureParams), &m_PictureParams));

        // Send bitstream to accelerator
        CHECK_HR(AddExecuteBuffer(DXVA2_BitStreamDateBufferType, nSize - nFrameSize, pDataIn + nFrameSize, &nSize_Result));

        m_SliceInfo.wQuantizerScaleCode = 1;        // TODO : 1->31 ???
        m_SliceInfo.dwSliceBitsInBuffer = nSize_Result * 8;
        CHECK_HR(AddExecuteBuffer(DXVA2_SliceControlBufferType, sizeof(m_SliceInfo), &m_SliceInfo));

        // Decode frame
        CHECK_HR(Execute());
        CHECK_HR(EndFrame(nSurfaceIndex));
    }
    // ***************

#ifdef _DEBUG
    DisplayStatus();
#endif

    // Re-order B frames
    if (m_pCodec->isReorderBFrame()) {
        if (m_PictureParams.bPicBackwardPrediction == 1) {
            SwapRT(rtStart, m_rtStartDelayed);
            SwapRT(rtStop,  m_rtStopDelayed);
        } else {
            // Save I or P reference time (swap later)
            if (!m_bFlushed) {
                if (m_nDelayedSurfaceIndex != -1) {
                    UpdateStore(m_nDelayedSurfaceIndex, m_rtStartDelayed, m_rtStopDelayed);
                }
                m_rtStartDelayed = m_rtStopDelayed = _I64_MAX;
                SwapRT(rtStart, m_rtStartDelayed);
                SwapRT(rtStop,  m_rtStopDelayed);
                m_nDelayedSurfaceIndex = nSurfaceIndex;
            }
        }
    }

    AddToStore(nSurfaceIndex, pSampleToDeliver, (m_PictureParams.bPicBackwardPrediction != 1), rtStart, rtStop,
               false, (FF_FIELD_TYPE)nFieldType, (FF_SLICE_TYPE)nSliceType, 0);

    m_bFlushed = false;

    return DisplayNextFrame();
}