HRESULT CDXVADecoderH264::DecodeFrame(BYTE* pDataIn, UINT nSize, REFERENCE_TIME rtStart, REFERENCE_TIME rtStop) { HRESULT hr = S_FALSE; int got_picture = 0; memset(&m_DXVA_Context.DXVA_H264Context, 0, sizeof(DXVA_H264_Context) * _countof(m_DXVA_Context.DXVA_H264Context)); CHECK_HR_FALSE (FFDecodeFrame(m_pFilter->GetAVCtx(), m_pFilter->GetFrame(), pDataIn, nSize, rtStart, &got_picture)); if (m_nSurfaceIndex == -1 || !m_DXVA_Context.DXVA_H264Context[0].slice_count) { return S_FALSE; } for (UINT i = 0; i < _countof(m_DXVA_Context.DXVA_H264Context); i++) { DXVA_H264_Context* pDXVA_H264_ctx = &m_DXVA_Context.DXVA_H264Context[i]; if (!pDXVA_H264_ctx->slice_count) { continue; } m_nFieldNum = i; pDXVA_H264_ctx->DXVAPicParams.Reserved16Bits = Reserved16Bits; pDXVA_H264_ctx->DXVAPicParams.StatusReportFeedbackNumber = StatusReportFeedbackNumber++; // Begin frame CHECK_HR_FALSE (BeginFrame(m_nSurfaceIndex, m_pSampleToDeliver)); // Add picture parameters CHECK_HR_FRAME (AddExecuteBuffer(DXVA2_PictureParametersBufferType, sizeof(DXVA_PicParams_H264), &pDXVA_H264_ctx->DXVAPicParams)); // Add quantization matrix CHECK_HR_FRAME (AddExecuteBuffer(DXVA2_InverseQuantizationMatrixBufferType, sizeof(DXVA_Qmatrix_H264), &pDXVA_H264_ctx->DXVAScalingMatrix)); // Add bitstream CHECK_HR_FRAME (AddExecuteBuffer(DXVA2_BitStreamDateBufferType)); // Add slice control if (m_bUseLongSlice) { CHECK_HR_FRAME (AddExecuteBuffer(DXVA2_SliceControlBufferType, sizeof(DXVA_Slice_H264_Long) * pDXVA_H264_ctx->slice_count, pDXVA_H264_ctx->SliceLong)); } else { CHECK_HR_FRAME (AddExecuteBuffer(DXVA2_SliceControlBufferType, sizeof(DXVA_Slice_H264_Short) * pDXVA_H264_ctx->slice_count, pDXVA_H264_ctx->SliceShort)); } // Decode frame CHECK_HR_FRAME (Execute()); CHECK_HR_FALSE (EndFrame(m_nSurfaceIndex)); } if (got_picture) { AddToStore(m_nSurfaceIndex, m_pSampleToDeliver, rtStart, rtStop); hr = DisplayNextFrame(); } return hr; }
HRESULT CDXVADecoderMpeg2::DecodeFrame(BYTE* pDataIn, UINT nSize, REFERENCE_TIME rtStart, REFERENCE_TIME rtStop) { HRESULT hr = S_FALSE; bool bIsField = false; int got_picture = 0; memset(&m_DXVA_Context, 0, sizeof(DXVA_Context)); CHECK_HR_FALSE (FFDecodeFrame(m_pFilter->GetAVCtx(), m_pFilter->GetFrame(), pDataIn, nSize, rtStart, &got_picture)); if (m_nSurfaceIndex == -1 || !m_DXVA_Context.DXVA_MPEG2Context[0].slice_count) { return S_FALSE; } m_pFilter->HandleKeyFrame(got_picture); for (UINT i = 0; i < m_DXVA_Context.frame_count; i++) { DXVA_MPEG2_Context* pDXVA_MPEG2_ctx = &m_DXVA_Context.DXVA_MPEG2Context[i]; if (!pDXVA_MPEG2_ctx->slice_count) { continue; } m_nFieldNum = i; UpdatePictureParams(); CHECK_HR_FALSE (BeginFrame(m_nSurfaceIndex, m_pSampleToDeliver)); // Send picture parameters CHECK_HR_FRAME (AddExecuteBuffer(DXVA2_PictureParametersBufferType, sizeof(DXVA_PictureParameters), &pDXVA_MPEG2_ctx->DXVAPicParams)); // Add quantization matrix CHECK_HR_FRAME (AddExecuteBuffer(DXVA2_InverseQuantizationMatrixBufferType, sizeof(DXVA_QmatrixData), &pDXVA_MPEG2_ctx->DXVAScalingMatrix)); // Add bitstream CHECK_HR_FRAME (AddExecuteBuffer(DXVA2_BitStreamDateBufferType)); // Add slice control CHECK_HR_FRAME (AddExecuteBuffer(DXVA2_SliceControlBufferType, sizeof(DXVA_SliceInfo) * pDXVA_MPEG2_ctx->slice_count, pDXVA_MPEG2_ctx->slice)); // Decode frame CHECK_HR_FRAME (Execute()); CHECK_HR_FALSE (EndFrame(m_nSurfaceIndex)); } if (got_picture) { AddToStore(m_nSurfaceIndex, m_pSampleToDeliver, rtStart, rtStop); hr = DisplayNextFrame(); } return hr; }
HRESULT CDXVADecoderMpeg2::DecodeFrameInternal (BYTE* pDataIn, UINT nSize, REFERENCE_TIME rtStart, REFERENCE_TIME rtStop) { HRESULT hr = S_FALSE; int nSurfaceIndex = -1; CComPtr<IMediaSample> pSampleToDeliver; int nFieldType = -1; int nSliceType = -1; bool bIsField = false; int bFrame_repeat_pict = 0; CHECK_HR_FALSE (FFMpeg2DecodeFrame (&m_PictureParams, &m_QMatrixData, m_SliceInfo, &m_nSliceCount, m_pFilter->GetAVCtx(), m_pFilter->GetFrame(), &m_nNextCodecIndex, &nFieldType, &nSliceType, pDataIn, nSize, &bIsField, &bFrame_repeat_pict)); // Wait I frame after a flush if (m_bFlushed && (!m_PictureParams.bPicIntra || (bIsField && m_PictureParams.bSecondField))) { TRACE_MPEG2 ("CDXVADecoderMpeg2::DecodeFrame() : Flush - wait I frame, %ws\n", FrameType(bIsField, m_PictureParams.bSecondField)); return S_FALSE; } CHECK_HR (GetFreeSurfaceIndex (nSurfaceIndex, &pSampleToDeliver, rtStart, rtStop)); if (!bIsField || (bIsField && !m_PictureParams.bSecondField)) { UpdatePictureParams(nSurfaceIndex); } TRACE_MPEG2 ("CDXVADecoderMpeg2::DecodeFrame() : Surf = %d, PictureType = %d, %ws, m_nNextCodecIndex = %d, rtStart = [%I64d]\n", nSurfaceIndex, nSliceType, FrameType(bIsField, m_PictureParams.bSecondField), m_nNextCodecIndex, rtStart); { CHECK_HR (BeginFrame(nSurfaceIndex, pSampleToDeliver)); // Send picture parameters CHECK_HR (AddExecuteBuffer (DXVA2_PictureParametersBufferType, sizeof(m_PictureParams), &m_PictureParams)); // Add quantization matrix CHECK_HR (AddExecuteBuffer (DXVA2_InverseQuantizationMatrixBufferType, sizeof(m_QMatrixData), &m_QMatrixData)); // Add slice control CHECK_HR (AddExecuteBuffer (DXVA2_SliceControlBufferType, sizeof (DXVA_SliceInfo)*m_nSliceCount, &m_SliceInfo)); // Add bitstream CHECK_HR (AddExecuteBuffer (DXVA2_BitStreamDateBufferType, nSize, pDataIn, &nSize)); // Decode frame CHECK_HR (Execute()); CHECK_HR (EndFrame(nSurfaceIndex)); } bool bAdded = AddToStore (nSurfaceIndex, pSampleToDeliver, (m_PictureParams.bPicBackwardPrediction != 1), rtStart, rtStop, bIsField, (FF_FIELD_TYPE)nFieldType, (FF_SLICE_TYPE)nSliceType, FFGetCodedPicture(m_pFilter->GetAVCtx())); if (bAdded) { hr = DisplayNextFrame(); } m_bFlushed = false; return hr; }
HRESULT CDXVADecoderH264_DXVA1::DecodeFrame(BYTE* pDataIn, UINT nSize, REFERENCE_TIME rtStart, REFERENCE_TIME rtStop) { HRESULT hr = S_FALSE; int nSurfaceIndex = -1; int nFramePOC = INT_MIN; int nOutPOC = INT_MIN; REFERENCE_TIME rtOutStart = INVALID_TIME; CH264Nalu Nalu; CComPtr<IMediaSample> pSampleToDeliver; CHECK_HR_FALSE (FFH264DecodeFrame(m_pFilter->GetAVCtx(), m_pFilter->GetFrame(), pDataIn, nSize, rtStart, &nFramePOC, &nOutPOC, &rtOutStart, &m_nNALLength)); // If parsing fail (probably no PPS/SPS), continue anyway it may arrived later (happen on truncated streams) CHECK_HR_FALSE (FFH264BuildPicParams(m_pFilter->GetAVCtx(), &m_DXVAPicParams, &m_DXVAScalingMatrix, m_IsATIUVD)); TRACE_H264 ("CDXVADecoderH264_DXVA1::DecodeFrame() : nFramePOC = %11d, nOutPOC = %11d[%11d], [%d - %d], rtOutStart = [%20I64d]\n", nFramePOC, nOutPOC, m_nOutPOC, m_DXVAPicParams.field_pic_flag, m_DXVAPicParams.RefPicFlag, rtOutStart); // Wait I frame after a flush if (m_bFlushed && !m_DXVAPicParams.IntraPicFlag) { TRACE_H264 ("CDXVADecoderH264_DXVA1::DecodeFrame() : Flush - wait I frame\n"); return S_FALSE; } CHECK_HR_FALSE (GetFreeSurfaceIndex(nSurfaceIndex, &pSampleToDeliver, rtStart, rtStop)); FFH264SetCurrentPicture(nSurfaceIndex, &m_DXVAPicParams, m_pFilter->GetAVCtx()); { m_DXVAPicParams.StatusReportFeedbackNumber++; CHECK_HR_FALSE (BeginFrame(nSurfaceIndex, pSampleToDeliver)); // Send picture parameters CHECK_HR_FALSE (AddExecuteBuffer(DXVA2_PictureParametersBufferType, sizeof(m_DXVAPicParams), &m_DXVAPicParams)); // Add bitstream CHECK_HR_FALSE (AddExecuteBuffer(DXVA2_BitStreamDateBufferType, nSize, pDataIn)); // Add quantization matrix CHECK_HR_FALSE (AddExecuteBuffer(DXVA2_InverseQuantizationMatrixBufferType, sizeof(DXVA_Qmatrix_H264), &m_DXVAScalingMatrix)); // Add slice control CHECK_HR_FALSE (AddExecuteBuffer(DXVA2_SliceControlBufferType, sizeof(DXVA_Slice_H264_Short) * m_nSlices, m_pSliceShort)); // Decode frame CHECK_HR_FALSE (Execute()); CHECK_HR_FALSE (EndFrame(nSurfaceIndex)); } bool bAdded = AddToStore(nSurfaceIndex, pSampleToDeliver, m_DXVAPicParams.RefPicFlag, rtStart, rtStop, m_DXVAPicParams.field_pic_flag, nFramePOC); FFH264UpdateRefFramesList(&m_DXVAPicParams, m_pFilter->GetAVCtx()); ClearUnusedRefFrames(); if (bAdded) { hr = DisplayNextFrame(); } if (nOutPOC != INT_MIN) { m_nOutPOC = nOutPOC; m_rtOutStart = rtOutStart; } m_bFlushed = false; return hr; }