HRESULT CDXVADecoderMpeg2::DecodeFrameInternal (BYTE* pDataIn, UINT nSize, REFERENCE_TIME rtStart, REFERENCE_TIME rtStop) { HRESULT hr = S_FALSE; int nSurfaceIndex = -1; CComPtr<IMediaSample> pSampleToDeliver; int nFieldType = -1; int nSliceType = -1; bool bIsField = false; int bFrame_repeat_pict = 0; CHECK_HR_FALSE (FFMpeg2DecodeFrame (&m_PictureParams, &m_QMatrixData, m_SliceInfo, &m_nSliceCount, m_pFilter->GetAVCtx(), m_pFilter->GetFrame(), &m_nNextCodecIndex, &nFieldType, &nSliceType, pDataIn, nSize, &bIsField, &bFrame_repeat_pict)); // Wait I frame after a flush if (m_bFlushed && (!m_PictureParams.bPicIntra || (bIsField && m_PictureParams.bSecondField))) { TRACE_MPEG2 ("CDXVADecoderMpeg2::DecodeFrame() : Flush - wait I frame, %ws\n", FrameType(bIsField, m_PictureParams.bSecondField)); return S_FALSE; } CHECK_HR (GetFreeSurfaceIndex (nSurfaceIndex, &pSampleToDeliver, rtStart, rtStop)); if (!bIsField || (bIsField && !m_PictureParams.bSecondField)) { UpdatePictureParams(nSurfaceIndex); } TRACE_MPEG2 ("CDXVADecoderMpeg2::DecodeFrame() : Surf = %d, PictureType = %d, %ws, m_nNextCodecIndex = %d, rtStart = [%I64d]\n", nSurfaceIndex, nSliceType, FrameType(bIsField, m_PictureParams.bSecondField), m_nNextCodecIndex, rtStart); { CHECK_HR (BeginFrame(nSurfaceIndex, pSampleToDeliver)); // Send picture parameters CHECK_HR (AddExecuteBuffer (DXVA2_PictureParametersBufferType, sizeof(m_PictureParams), &m_PictureParams)); // Add quantization matrix CHECK_HR (AddExecuteBuffer (DXVA2_InverseQuantizationMatrixBufferType, sizeof(m_QMatrixData), &m_QMatrixData)); // Add slice control CHECK_HR (AddExecuteBuffer (DXVA2_SliceControlBufferType, sizeof (DXVA_SliceInfo)*m_nSliceCount, &m_SliceInfo)); // Add bitstream CHECK_HR (AddExecuteBuffer (DXVA2_BitStreamDateBufferType, nSize, pDataIn, &nSize)); // Decode frame CHECK_HR (Execute()); CHECK_HR (EndFrame(nSurfaceIndex)); } bool bAdded = AddToStore (nSurfaceIndex, pSampleToDeliver, (m_PictureParams.bPicBackwardPrediction != 1), rtStart, rtStop, bIsField, (FF_FIELD_TYPE)nFieldType, (FF_SLICE_TYPE)nSliceType, FFGetCodedPicture(m_pFilter->GetAVCtx())); if (bAdded) { hr = DisplayNextFrame(); } m_bFlushed = false; return hr; }
// === Public functions HRESULT CDXVADecoderMpeg2::DecodeFrame (BYTE* pDataIn, UINT nSize, REFERENCE_TIME rtStart, REFERENCE_TIME rtStop) { HRESULT hr; int nSurfaceIndex; CComPtr<IMediaSample> pSampleToDeliver; int nFieldType; int nSliceType; FFMpeg2DecodeFrame (&m_PictureParams, &m_QMatrixData, m_SliceInfo, &m_nSliceCount, m_pFilter->GetAVCtx(), m_pFilter->GetFrame(), &m_nNextCodecIndex, &nFieldType, &nSliceType, pDataIn, nSize); // Wait I frame after a flush if (m_bFlushed && ! m_PictureParams.bPicIntra) return S_FALSE; hr = GetFreeSurfaceIndex (nSurfaceIndex, &pSampleToDeliver, rtStart, rtStop); if (FAILED (hr)) { ASSERT (hr == VFW_E_NOT_COMMITTED); // Normal when stop playing return hr; } CHECK_HR (BeginFrame(nSurfaceIndex, pSampleToDeliver)); UpdatePictureParams(nSurfaceIndex); TRACE_MPEG2 ("=> %s %I64d Surf=%d\n", GetFFMpegPictureType(nSliceType), rtStart, nSurfaceIndex); TRACE_MPEG2("CDXVADecoderMpeg2 : Decode frame %i\n", m_PictureParams.bPicScanMethod); CHECK_HR (AddExecuteBuffer (DXVA2_PictureParametersBufferType, sizeof(m_PictureParams), &m_PictureParams)); CHECK_HR (AddExecuteBuffer (DXVA2_InverseQuantizationMatrixBufferType, sizeof(m_QMatrixData), &m_QMatrixData)); // Send bitstream to accelerator CHECK_HR (AddExecuteBuffer (DXVA2_SliceControlBufferType, sizeof (DXVA_SliceInfo)*m_nSliceCount, &m_SliceInfo)); CHECK_HR (AddExecuteBuffer (DXVA2_BitStreamDateBufferType, nSize, pDataIn, &nSize)); // Decode frame CHECK_HR (Execute()); CHECK_HR (EndFrame(nSurfaceIndex)); AddToStore (nSurfaceIndex, pSampleToDeliver, (m_PictureParams.bPicBackwardPrediction != 1), rtStart, rtStop, false,(FF_FIELD_TYPE)nFieldType, (FF_SLICE_TYPE)nSliceType, FFGetCodedPicture(m_pFilter->GetAVCtx())); m_bFlushed = false; return DisplayNextFrame(); }
// === Public functions HRESULT CDXVADecoderMpeg2::DecodeFrame(BYTE* pDataIn, UINT nSize, REFERENCE_TIME rtStart, REFERENCE_TIME rtStop) { HRESULT hr; int nFieldType; int nSliceType; FFMpeg2DecodeFrame(&m_PictureParams, &m_QMatrixData, m_SliceInfo, &m_nSliceCount, m_pFilter->GetAVCtx(), m_pFilter->GetFrame(), &m_nNextCodecIndex, &nFieldType, &nSliceType, pDataIn, nSize); if (m_PictureParams.bSecondField && !m_bSecondField) { m_bSecondField = true; } // Wait I frame after a flush if (m_bFlushed && (!m_PictureParams.bPicIntra || (m_bSecondField && m_PictureParams.bSecondField))) { TRACE_MPEG2("CDXVADecoderMpeg2::DecodeFrame() : Flush - wait I frame\n"); return S_FALSE; } if (m_bSecondField) { if (!m_PictureParams.bSecondField) { m_rtStart = rtStart; m_rtStop = rtStop; m_pSampleToDeliver = NULL; hr = GetFreeSurfaceIndex(m_nSurfaceIndex, &m_pSampleToDeliver, rtStart, rtStop); if (FAILED(hr)) { ASSERT(hr == VFW_E_NOT_COMMITTED); // Normal when stop playing return hr; } } } else { m_rtStart = rtStart; m_rtStop = rtStop; m_pSampleToDeliver = NULL; hr = GetFreeSurfaceIndex(m_nSurfaceIndex, &m_pSampleToDeliver, rtStart, rtStop); if (FAILED(hr)) { ASSERT(hr == VFW_E_NOT_COMMITTED); // Normal when stop playing return hr; } } if (m_pSampleToDeliver == NULL) { return S_FALSE; } CHECK_HR_TRACE(BeginFrame(m_nSurfaceIndex, m_pSampleToDeliver)); if (m_bSecondField) { if (!m_PictureParams.bSecondField) { UpdatePictureParams(m_nSurfaceIndex); } } else { UpdatePictureParams(m_nSurfaceIndex); } TRACE_MPEG2("CDXVADecoderMpeg2::DecodeFrame() : Surf = %d, PictureType = %d, SecondField = %d, m_nNextCodecIndex = %d, rtStart = [%I64d]\n", m_nSurfaceIndex, nSliceType, m_PictureParams.bSecondField, m_nNextCodecIndex, rtStart); CHECK_HR_TRACE(AddExecuteBuffer(DXVA2_PictureParametersBufferType, sizeof(m_PictureParams), &m_PictureParams)); CHECK_HR_TRACE(AddExecuteBuffer(DXVA2_InverseQuantizationMatrixBufferType, sizeof(m_QMatrixData), &m_QMatrixData)); // Send bitstream to accelerator CHECK_HR_TRACE(AddExecuteBuffer(DXVA2_SliceControlBufferType, sizeof(DXVA_SliceInfo)*m_nSliceCount, &m_SliceInfo)); CHECK_HR_TRACE(AddExecuteBuffer(DXVA2_BitStreamDateBufferType, nSize, pDataIn, &nSize)); // Decode frame CHECK_HR_TRACE(Execute()); CHECK_HR_TRACE(EndFrame(m_nSurfaceIndex)); if (m_bSecondField) { if (m_PictureParams.bSecondField) { AddToStore(m_nSurfaceIndex, m_pSampleToDeliver, (m_PictureParams.bPicBackwardPrediction != 1), m_rtStart, m_rtStop, false, (FF_FIELD_TYPE)nFieldType, (FF_SLICE_TYPE)nSliceType, FFGetCodedPicture(m_pFilter->GetAVCtx())); hr = DisplayNextFrame(); } } else { AddToStore(m_nSurfaceIndex, m_pSampleToDeliver, (m_PictureParams.bPicBackwardPrediction != 1), m_rtStart, m_rtStop, false, (FF_FIELD_TYPE)nFieldType, (FF_SLICE_TYPE)nSliceType, FFGetCodedPicture(m_pFilter->GetAVCtx())); hr = DisplayNextFrame(); } m_bFlushed = false; return hr; }