HRESULT CDXVADecoderMpeg2::DecodeFrameInternal (BYTE* pDataIn, UINT nSize, REFERENCE_TIME rtStart, REFERENCE_TIME rtStop)
{
	HRESULT					hr					= S_FALSE;
	int						nSurfaceIndex		= -1;
	CComPtr<IMediaSample>	pSampleToDeliver;
	int						nFieldType			= -1;
	int						nSliceType			= -1;
	bool					bIsField			= false;
	int						bFrame_repeat_pict	= 0;

	CHECK_HR_FALSE (FFMpeg2DecodeFrame (&m_PictureParams, &m_QMatrixData, m_SliceInfo, &m_nSliceCount, m_pFilter->GetAVCtx(),
					m_pFilter->GetFrame(), &m_nNextCodecIndex, &nFieldType, &nSliceType, pDataIn, nSize, &bIsField, &bFrame_repeat_pict));

	// Wait I frame after a flush
	if (m_bFlushed && (!m_PictureParams.bPicIntra || (bIsField && m_PictureParams.bSecondField))) {
		TRACE_MPEG2 ("CDXVADecoderMpeg2::DecodeFrame() : Flush - wait I frame, %ws\n", FrameType(bIsField, m_PictureParams.bSecondField));
		return S_FALSE;
	}

	CHECK_HR (GetFreeSurfaceIndex (nSurfaceIndex, &pSampleToDeliver, rtStart, rtStop));

	if (!bIsField || (bIsField && !m_PictureParams.bSecondField)) {
		UpdatePictureParams(nSurfaceIndex);	
	}

	TRACE_MPEG2 ("CDXVADecoderMpeg2::DecodeFrame() : Surf = %d, PictureType = %d, %ws, m_nNextCodecIndex = %d, rtStart = [%I64d]\n",
				 nSurfaceIndex, nSliceType, FrameType(bIsField, m_PictureParams.bSecondField), m_nNextCodecIndex, rtStart);

	{
		CHECK_HR (BeginFrame(nSurfaceIndex, pSampleToDeliver));
		// Send picture parameters
		CHECK_HR (AddExecuteBuffer (DXVA2_PictureParametersBufferType, sizeof(m_PictureParams), &m_PictureParams));
		// Add quantization matrix
		CHECK_HR (AddExecuteBuffer (DXVA2_InverseQuantizationMatrixBufferType, sizeof(m_QMatrixData), &m_QMatrixData));
		// Add slice control
		CHECK_HR (AddExecuteBuffer (DXVA2_SliceControlBufferType, sizeof (DXVA_SliceInfo)*m_nSliceCount, &m_SliceInfo));
		// Add bitstream
		CHECK_HR (AddExecuteBuffer (DXVA2_BitStreamDateBufferType, nSize, pDataIn, &nSize));
		// Decode frame
		CHECK_HR (Execute());
		CHECK_HR (EndFrame(nSurfaceIndex));
	}

	bool bAdded = AddToStore (nSurfaceIndex, pSampleToDeliver, (m_PictureParams.bPicBackwardPrediction != 1), rtStart, rtStop,
							  bIsField, (FF_FIELD_TYPE)nFieldType, (FF_SLICE_TYPE)nSliceType, FFGetCodedPicture(m_pFilter->GetAVCtx()));

	if (bAdded) {
		hr = DisplayNextFrame();
	}
		
	m_bFlushed = false;
	return hr;
}
HRESULT CDXVADecoderH264::DecodeFrame(BYTE* pDataIn, UINT nSize, REFERENCE_TIME rtStart, REFERENCE_TIME rtStop)
{
	HRESULT	hr			= S_FALSE;
	int		got_picture	= 0;

	memset(&m_DXVA_Context.DXVA_H264Context, 0, sizeof(DXVA_H264_Context) * _countof(m_DXVA_Context.DXVA_H264Context));
	CHECK_HR_FALSE (FFDecodeFrame(m_pFilter->GetAVCtx(), m_pFilter->GetFrame(),
								  pDataIn, nSize, rtStart,
								  &got_picture));

	if (m_nSurfaceIndex == -1 || !m_DXVA_Context.DXVA_H264Context[0].slice_count) {
		return S_FALSE;
	}

	for (UINT i = 0; i < _countof(m_DXVA_Context.DXVA_H264Context); i++) {
		DXVA_H264_Context* pDXVA_H264_ctx = &m_DXVA_Context.DXVA_H264Context[i];

		if (!pDXVA_H264_ctx->slice_count) {
			continue;
		}

		m_nFieldNum = i;

		pDXVA_H264_ctx->DXVAPicParams.Reserved16Bits				= Reserved16Bits;
		pDXVA_H264_ctx->DXVAPicParams.StatusReportFeedbackNumber	= StatusReportFeedbackNumber++;

		// Begin frame
		CHECK_HR_FALSE (BeginFrame(m_nSurfaceIndex, m_pSampleToDeliver));
		// Add picture parameters
		CHECK_HR_FRAME (AddExecuteBuffer(DXVA2_PictureParametersBufferType, sizeof(DXVA_PicParams_H264), &pDXVA_H264_ctx->DXVAPicParams));
		// Add quantization matrix
		CHECK_HR_FRAME (AddExecuteBuffer(DXVA2_InverseQuantizationMatrixBufferType, sizeof(DXVA_Qmatrix_H264), &pDXVA_H264_ctx->DXVAScalingMatrix));
		// Add bitstream
		CHECK_HR_FRAME (AddExecuteBuffer(DXVA2_BitStreamDateBufferType));
		// Add slice control
		if (m_bUseLongSlice) {
			CHECK_HR_FRAME (AddExecuteBuffer(DXVA2_SliceControlBufferType, sizeof(DXVA_Slice_H264_Long) * pDXVA_H264_ctx->slice_count, pDXVA_H264_ctx->SliceLong));
		} else {
			CHECK_HR_FRAME (AddExecuteBuffer(DXVA2_SliceControlBufferType, sizeof(DXVA_Slice_H264_Short) * pDXVA_H264_ctx->slice_count, pDXVA_H264_ctx->SliceShort));
		}
		// Decode frame
		CHECK_HR_FRAME (Execute());
		CHECK_HR_FALSE (EndFrame(m_nSurfaceIndex));
	}

	if (got_picture) {
		AddToStore(m_nSurfaceIndex, m_pSampleToDeliver, rtStart, rtStop);
		hr = DisplayNextFrame();
	}

	return hr;
}
Esempio n. 3
0
// === Public functions
HRESULT CDXVADecoderMpeg2::DecodeFrame (BYTE* pDataIn, UINT nSize, REFERENCE_TIME rtStart, REFERENCE_TIME rtStop)
{
	HRESULT						hr;
	int							nSurfaceIndex;
	CComPtr<IMediaSample>		pSampleToDeliver;
	int							nFieldType;
	int							nSliceType;

	FFMpeg2DecodeFrame (&m_PictureParams, &m_QMatrixData, m_SliceInfo, &m_nSliceCount, m_pFilter->GetAVCtx(), 
						m_pFilter->GetFrame(), &m_nNextCodecIndex, &nFieldType, &nSliceType, pDataIn, nSize);

	// Wait I frame after a flush
	if (m_bFlushed && ! m_PictureParams.bPicIntra)
		return S_FALSE;

	hr = GetFreeSurfaceIndex (nSurfaceIndex, &pSampleToDeliver, rtStart, rtStop);
	if (FAILED (hr))
	{
		ASSERT (hr == VFW_E_NOT_COMMITTED);		// Normal when stop playing
		return hr;
	}

	CHECK_HR (BeginFrame(nSurfaceIndex, pSampleToDeliver));

	UpdatePictureParams(nSurfaceIndex);

	TRACE_MPEG2 ("=> %s   %I64d  Surf=%d\n", GetFFMpegPictureType(nSliceType), rtStart, nSurfaceIndex);

	TRACE_MPEG2("CDXVADecoderMpeg2 : Decode frame %i\n", m_PictureParams.bPicScanMethod);

	CHECK_HR (AddExecuteBuffer (DXVA2_PictureParametersBufferType, sizeof(m_PictureParams), &m_PictureParams));

	CHECK_HR (AddExecuteBuffer (DXVA2_InverseQuantizationMatrixBufferType, sizeof(m_QMatrixData), &m_QMatrixData));

	// Send bitstream to accelerator
	CHECK_HR (AddExecuteBuffer (DXVA2_SliceControlBufferType, sizeof (DXVA_SliceInfo)*m_nSliceCount, &m_SliceInfo));
	CHECK_HR (AddExecuteBuffer (DXVA2_BitStreamDateBufferType, nSize, pDataIn, &nSize));

	// Decode frame
	CHECK_HR (Execute());
	CHECK_HR (EndFrame(nSurfaceIndex));

	AddToStore (nSurfaceIndex, pSampleToDeliver, (m_PictureParams.bPicBackwardPrediction != 1), rtStart, rtStop, 
				false,(FF_FIELD_TYPE)nFieldType, (FF_SLICE_TYPE)nSliceType, FFGetCodedPicture(m_pFilter->GetAVCtx()));
	m_bFlushed = false;

	return DisplayNextFrame();
}
HRESULT CDXVADecoderMpeg2::DecodeFrame(BYTE* pDataIn, UINT nSize, REFERENCE_TIME rtStart, REFERENCE_TIME rtStop)
{
	HRESULT	hr			= S_FALSE;
	bool	bIsField	= false;
	int		got_picture	= 0;

	memset(&m_DXVA_Context, 0, sizeof(DXVA_Context));
	CHECK_HR_FALSE (FFDecodeFrame(m_pFilter->GetAVCtx(), m_pFilter->GetFrame(),
								  pDataIn, nSize, rtStart,
								  &got_picture));

	if (m_nSurfaceIndex == -1 || !m_DXVA_Context.DXVA_MPEG2Context[0].slice_count) {
		return S_FALSE;
	}

	m_pFilter->HandleKeyFrame(got_picture);

	for (UINT i = 0; i < m_DXVA_Context.frame_count; i++) {
		DXVA_MPEG2_Context* pDXVA_MPEG2_ctx = &m_DXVA_Context.DXVA_MPEG2Context[i];

		if (!pDXVA_MPEG2_ctx->slice_count) {
			continue;
		}

		m_nFieldNum = i;
		UpdatePictureParams();

		CHECK_HR_FALSE (BeginFrame(m_nSurfaceIndex, m_pSampleToDeliver));
		// Send picture parameters
		CHECK_HR_FRAME (AddExecuteBuffer(DXVA2_PictureParametersBufferType, sizeof(DXVA_PictureParameters), &pDXVA_MPEG2_ctx->DXVAPicParams));
		// Add quantization matrix
		CHECK_HR_FRAME (AddExecuteBuffer(DXVA2_InverseQuantizationMatrixBufferType, sizeof(DXVA_QmatrixData), &pDXVA_MPEG2_ctx->DXVAScalingMatrix));
		// Add bitstream
		CHECK_HR_FRAME (AddExecuteBuffer(DXVA2_BitStreamDateBufferType));
		// Add slice control
		CHECK_HR_FRAME (AddExecuteBuffer(DXVA2_SliceControlBufferType, sizeof(DXVA_SliceInfo) * pDXVA_MPEG2_ctx->slice_count, pDXVA_MPEG2_ctx->slice));
		// Decode frame
		CHECK_HR_FRAME (Execute());
		CHECK_HR_FALSE (EndFrame(m_nSurfaceIndex));
	}

	if (got_picture) {
		AddToStore(m_nSurfaceIndex, m_pSampleToDeliver, rtStart, rtStop);
		hr = DisplayNextFrame();
	}
		
	return hr;
}
Esempio n. 5
0
HRESULT CDXVADecoderH264::DecodeFrame(BYTE* pDataIn, UINT nSize, REFERENCE_TIME rtStart, REFERENCE_TIME rtStop)
{
    HRESULT hr = S_FALSE;
    UINT nSlices = 0;
    int nSurfaceIndex = -1;
    int nFieldType = -1;
    int nSliceType = -1;
    int nFramePOC = INT_MIN;
    int nOutPOC = INT_MIN;
    REFERENCE_TIME rtOutStart = _I64_MIN;
    CH264Nalu Nalu;
    UINT nNalOffset = 0;
    CComPtr<IMediaSample> pSampleToDeliver;
    CComQIPtr<IMPCDXVA2Sample> pDXVA2Sample;
    int slice_step = 1;

    if (FFH264DecodeBuffer(m_pFilter->GetAVCtx(), pDataIn, nSize, &nFramePOC, &nOutPOC, &rtOutStart) == -1) {
        return S_FALSE;
    }

    while (!nSlices && slice_step <= 2) {
        Nalu.SetBuffer(pDataIn, nSize, slice_step == 1 ? m_nNALLength : 0);
        while (Nalu.ReadNext()) {
            switch (Nalu.GetType()) {
                case NALU_TYPE_SLICE:
                case NALU_TYPE_IDR:
                    if (m_bUseLongSlice) {
                        m_pSliceLong[nSlices].BSNALunitDataLocation = nNalOffset;
                        m_pSliceLong[nSlices].SliceBytesInBuffer = (UINT)Nalu.GetDataLength() + 3; //.GetRoundedDataLength();
                        m_pSliceLong[nSlices].slice_id = nSlices;
                        FF264UpdateRefFrameSliceLong(&m_DXVAPicParams, &m_pSliceLong[nSlices], m_pFilter->GetAVCtx());

                        if (nSlices > 0) {
                            m_pSliceLong[nSlices - 1].NumMbsForSlice = m_pSliceLong[nSlices].NumMbsForSlice = m_pSliceLong[nSlices].first_mb_in_slice - m_pSliceLong[nSlices - 1].first_mb_in_slice;
                        }
                    }
                    nSlices++;
                    nNalOffset += (UINT)(Nalu.GetDataLength() + 3);
                    if (nSlices > MAX_SLICES) {
                        break;
                    }
                    break;
            }
        }
        slice_step++;
    }

    if (!nSlices) {
        return S_FALSE;
    }

    m_nMaxWaiting = min(max(m_DXVAPicParams.num_ref_frames, 3), 8);

    // If parsing fail (probably no PPS/SPS), continue anyway it may arrived later (happen on truncated streams)
    if (FAILED(FFH264BuildPicParams(&m_DXVAPicParams, &m_DXVAScalingMatrix, &nFieldType, &nSliceType, m_pFilter->GetAVCtx(), m_pFilter->GetPCIVendor()))) {
        return S_FALSE;
    }

    TRACE_H264("CDXVADecoderH264::DecodeFrame() : nFramePOC = %11d, nOutPOC = %11d[%11d], [%d - %d], rtOutStart = [%20I64d]\n", nFramePOC, nOutPOC, m_nOutPOC, m_DXVAPicParams.field_pic_flag, m_DXVAPicParams.RefPicFlag, rtOutStart);

    // Wait I frame after a flush
    if (m_bFlushed && !m_DXVAPicParams.IntraPicFlag) {
        TRACE_H264("CDXVADecoderH264::DecodeFrame() : Flush - wait I frame\n");
        m_nBrokenFramesFlag = 0;
        m_nBrokenFramesFlag_POC = 0;
        m_nfield_pic_flag = m_DXVAPicParams.field_pic_flag;
        m_nRefPicFlag = m_DXVAPicParams.RefPicFlag;
        m_nPrevOutPOC = INT_MIN;
        return S_FALSE;
    }

    /* Disabled, because that causes serious problems.
        // Some magic code for detecting the incorrect decoding of interlaced frames ...
        // TODO : necessary to make it better, and preferably on the side of ffmpeg ...
        if (m_nfield_pic_flag && m_nfield_pic_flag == m_DXVAPicParams.field_pic_flag && m_nRefPicFlag == m_DXVAPicParams.RefPicFlag) {
            if (m_nPrevOutPOC == m_nOutPOC && m_nOutPOC == INT_MIN) {
                m_nBrokenFramesFlag_POC++;
            }
            m_nBrokenFramesFlag++;
        } else {
            m_nBrokenFramesFlag     = 0;
            m_nBrokenFramesFlag_POC = 0;
        }
        m_nfield_pic_flag   = m_DXVAPicParams.field_pic_flag;
        m_nRefPicFlag       = m_DXVAPicParams.RefPicFlag;
        m_nPrevOutPOC       = m_nOutPOC;

        if (m_nBrokenFramesFlag > 4) {
            m_nBrokenFramesFlag = 0;
            if (m_nBrokenFramesFlag_POC > 1) {
                TRACE_H264("CDXVADecoderH264::DecodeFrame() : Detected broken frames ... flush data\n");
                m_nBrokenFramesFlag_POC = 0;
                Flush();
                return S_FALSE;
            }
        }
        //
    */

    CHECK_HR_TRACE(GetFreeSurfaceIndex(nSurfaceIndex, &pSampleToDeliver, rtStart, rtStop));
    FFH264SetCurrentPicture(nSurfaceIndex, &m_DXVAPicParams, m_pFilter->GetAVCtx());

    CHECK_HR_TRACE(BeginFrame(nSurfaceIndex, pSampleToDeliver));

    m_DXVAPicParams.StatusReportFeedbackNumber++;

    // Send picture parameters
    CHECK_HR_TRACE(AddExecuteBuffer(DXVA2_PictureParametersBufferType, sizeof(m_DXVAPicParams), &m_DXVAPicParams));
    CHECK_HR_TRACE(Execute());

    // Add bitstream, slice control and quantization matrix
    CHECK_HR_TRACE(AddExecuteBuffer(DXVA2_BitStreamDateBufferType, nSize, pDataIn, &nSize));

    if (m_bUseLongSlice) {
        CHECK_HR_TRACE(AddExecuteBuffer(DXVA2_SliceControlBufferType, sizeof(DXVA_Slice_H264_Long)*nSlices, m_pSliceLong));
    } else {
        CHECK_HR_TRACE(AddExecuteBuffer(DXVA2_SliceControlBufferType, sizeof(DXVA_Slice_H264_Short)*nSlices, m_pSliceShort));
    }

    CHECK_HR_TRACE(AddExecuteBuffer(DXVA2_InverseQuantizationMatrixBufferType, sizeof(DXVA_Qmatrix_H264), (void*)&m_DXVAScalingMatrix));

    // Decode bitstream
    CHECK_HR_TRACE(Execute());
    CHECK_HR_TRACE(EndFrame(nSurfaceIndex));

#if defined(_DEBUG) && 0
    DisplayStatus();
#endif

    bool bAdded = AddToStore(nSurfaceIndex, pSampleToDeliver, m_DXVAPicParams.RefPicFlag, rtStart, rtStop,
                             m_DXVAPicParams.field_pic_flag, (FF_FIELD_TYPE)nFieldType,
                             (FF_SLICE_TYPE)nSliceType, nFramePOC);

    FFH264UpdateRefFramesList(&m_DXVAPicParams, m_pFilter->GetAVCtx());
    ClearUnusedRefFrames();

    if (bAdded) {
        hr = DisplayNextFrame();
    }

    if (nOutPOC != INT_MIN) {
        m_nOutPOC = nOutPOC;
        m_rtOutStart = rtOutStart;
    }

    m_bFlushed = false;
    return hr;
}
Esempio n. 6
0
HRESULT CDXVADecoderH264::DecodeFrame (BYTE* pDataIn, UINT nSize, REFERENCE_TIME rtStart, REFERENCE_TIME rtStop)
{

	HRESULT						hr			= S_FALSE;
  
	CH264Nalu				Nalu;
	UINT						nSlices		= 0;
	int							nSurfaceIndex;
	int							nFieldType;
	int							nSliceType;
	int							nFramePOC;
  IDirect3DSurface9* pSampleToDeliver;
	int							nDXIndex	= 0;
	UINT						nNalOffset	= 0;
	int							nOutPOC;
	REFERENCE_TIME				rtOutStart;

  if(pDataIn == NULL || nSize == 0)
    return S_FALSE;

	Nalu.SetBuffer (pDataIn, nSize, m_nNALLength); 
	FFH264DecodeBuffer (m_pFilter->GetAVCtx(), pDataIn, nSize, &nFramePOC, &nOutPOC, &rtOutStart);

  //CLog::Log(LOGDEBUG, "nFramePOC = %d nOutPOC %d rtOutStart%d", nFramePOC, nOutPOC, rtOutStart);

	while (Nalu.ReadNext())
	{
		switch (Nalu.GetType())
		{
		case NALU_TYPE_SLICE:
		case NALU_TYPE_IDR:
				if(m_bUseLongSlice) 
				{
					m_pSliceLong[nSlices].BSNALunitDataLocation	= nNalOffset;
					m_pSliceLong[nSlices].SliceBytesInBuffer	= Nalu.GetDataLength()+3; //.GetRoundedDataLength();
					m_pSliceLong[nSlices].slice_id				= nSlices;
					FF264UpdateRefFrameSliceLong(&m_DXVAPicParams, &m_pSliceLong[nSlices], m_pFilter->GetAVCtx());

					if (nSlices>0)
						m_pSliceLong[nSlices-1].NumMbsForSlice = m_pSliceLong[nSlices].NumMbsForSlice = m_pSliceLong[nSlices].first_mb_in_slice - m_pSliceLong[nSlices-1].first_mb_in_slice;
				}
				nSlices++; 
				nNalOffset += (UINT)(Nalu.GetDataLength() + 3);
				if (nSlices > MAX_SLICES) break;
				break;
		}
	}
	if (nSlices == 0) return S_FALSE;

	m_nMaxWaiting	= min (max (m_DXVAPicParams.num_ref_frames, 3), 8);

	// If parsing fail (probably no PPS/SPS), continue anyway it may arrived later (happen on truncated streams)
	if (FAILED (FFH264BuildPicParams (&m_DXVAPicParams, &m_DXVAScalingMatrix, &nFieldType, &nSliceType, m_pFilter->GetAVCtx(), m_pFilter->GetPCIVendor())))
		return S_FALSE;

	// Wait I frame after a flush
	if (m_bFlushed && !m_DXVAPicParams.IntraPicFlag)
		return S_FALSE;
	
	CHECK_HR (GetFreeSurfaceIndex (nSurfaceIndex, &pSampleToDeliver, rtStart, rtStop));

	FFH264SetCurrentPicture (nSurfaceIndex, &m_DXVAPicParams, m_pFilter->GetAVCtx());

	CHECK_HR (BeginFrame(pSampleToDeliver));
	
	m_DXVAPicParams.StatusReportFeedbackNumber++;

//	TRACE("CDXVADecoderH264 : Decode frame %u\n", m_DXVAPicParams.StatusReportFeedbackNumber);

	// Send picture parameters
	CHECK_HR (AddExecuteBuffer (DXVA2_PictureParametersBufferType, sizeof(m_DXVAPicParams), &m_DXVAPicParams));
	CHECK_HR (Execute());

	// Add bitstream, slice control and quantization matrix
	CHECK_HR (AddExecuteBuffer (DXVA2_BitStreamDateBufferType, nSize, pDataIn, &nSize));

	if (m_bUseLongSlice)
	{
		CHECK_HR(AddExecuteBuffer(DXVA2_SliceControlBufferType,  sizeof(DXVA_Slice_H264_Long)*nSlices, m_pSliceLong));
	}
	else
	{
		CHECK_HR (AddExecuteBuffer (DXVA2_SliceControlBufferType, sizeof (DXVA_Slice_H264_Short)*nSlices, m_pSliceShort));
	}

	CHECK_HR (AddExecuteBuffer (DXVA2_InverseQuantizationMatrixBufferType, sizeof (DXVA_Qmatrix_H264), (void*)&m_DXVAScalingMatrix));

	// Decode bitstream
	CHECK_HR (Execute());

	CHECK_HR (EndFrame(nSurfaceIndex));

#ifdef _DEBUG
	//DisplayStatus();
#endif

	bool bAdded		= AddToStore (nSurfaceIndex, m_DXVAPicParams.RefPicFlag, rtStart, rtStop,
								  m_DXVAPicParams.field_pic_flag, (FF_FIELD_TYPE)nFieldType, 
								  (FF_SLICE_TYPE)nSliceType, nFramePOC);

	FFH264UpdateRefFramesList (&m_DXVAPicParams, m_pFilter->GetAVCtx());

	ClearUnusedRefFrames();

	if (bAdded) 
	{
		hr = DisplayNextFrame();

		if (nOutPOC != -1)
		{
			m_nOutPOC		= nOutPOC;
			m_rtOutStart	= rtOutStart;
		}
	}
	m_bFlushed		= false;
 
	return hr;
}
Esempio n. 7
0
HRESULT CDXVADecoderH264_DXVA1::DecodeFrame(BYTE* pDataIn, UINT nSize, REFERENCE_TIME rtStart, REFERENCE_TIME rtStop)
{
	HRESULT					hr					= S_FALSE;
	int						nSurfaceIndex		= -1;
	int						nFramePOC			= INT_MIN;
	int						nOutPOC				= INT_MIN;
	REFERENCE_TIME			rtOutStart			= INVALID_TIME;
	CH264Nalu				Nalu;
	CComPtr<IMediaSample>	pSampleToDeliver;

	CHECK_HR_FALSE (FFH264DecodeFrame(m_pFilter->GetAVCtx(), m_pFilter->GetFrame(), pDataIn, nSize, rtStart,
					&nFramePOC, &nOutPOC, &rtOutStart, &m_nNALLength));

	// If parsing fail (probably no PPS/SPS), continue anyway it may arrived later (happen on truncated streams)
	CHECK_HR_FALSE (FFH264BuildPicParams(m_pFilter->GetAVCtx(), &m_DXVAPicParams, &m_DXVAScalingMatrix, m_IsATIUVD));

	TRACE_H264 ("CDXVADecoderH264_DXVA1::DecodeFrame() : nFramePOC = %11d, nOutPOC = %11d[%11d], [%d - %d], rtOutStart = [%20I64d]\n", nFramePOC, nOutPOC, m_nOutPOC, m_DXVAPicParams.field_pic_flag, m_DXVAPicParams.RefPicFlag, rtOutStart);

	// Wait I frame after a flush
	if (m_bFlushed && !m_DXVAPicParams.IntraPicFlag) {
		TRACE_H264 ("CDXVADecoderH264_DXVA1::DecodeFrame() : Flush - wait I frame\n");
		return S_FALSE;
	}

	CHECK_HR_FALSE (GetFreeSurfaceIndex(nSurfaceIndex, &pSampleToDeliver, rtStart, rtStop));
	FFH264SetCurrentPicture(nSurfaceIndex, &m_DXVAPicParams, m_pFilter->GetAVCtx());

	{
		m_DXVAPicParams.StatusReportFeedbackNumber++;

		CHECK_HR_FALSE (BeginFrame(nSurfaceIndex, pSampleToDeliver));
		// Send picture parameters
		CHECK_HR_FALSE (AddExecuteBuffer(DXVA2_PictureParametersBufferType, sizeof(m_DXVAPicParams), &m_DXVAPicParams));
		// Add bitstream
		CHECK_HR_FALSE (AddExecuteBuffer(DXVA2_BitStreamDateBufferType, nSize, pDataIn));
		// Add quantization matrix
		CHECK_HR_FALSE (AddExecuteBuffer(DXVA2_InverseQuantizationMatrixBufferType, sizeof(DXVA_Qmatrix_H264), &m_DXVAScalingMatrix));
		// Add slice control
		CHECK_HR_FALSE (AddExecuteBuffer(DXVA2_SliceControlBufferType, sizeof(DXVA_Slice_H264_Short) * m_nSlices, m_pSliceShort));
		// Decode frame
		CHECK_HR_FALSE (Execute());
		CHECK_HR_FALSE (EndFrame(nSurfaceIndex));
	}

	bool bAdded = AddToStore(nSurfaceIndex, pSampleToDeliver, m_DXVAPicParams.RefPicFlag, rtStart, rtStop,
							 m_DXVAPicParams.field_pic_flag, nFramePOC);


	FFH264UpdateRefFramesList(&m_DXVAPicParams, m_pFilter->GetAVCtx());
	ClearUnusedRefFrames();

	if (bAdded) {
		hr = DisplayNextFrame();
	}

	if (nOutPOC != INT_MIN) {
		m_nOutPOC		= nOutPOC;
		m_rtOutStart	= rtOutStart;
	}

	m_bFlushed = false;
	return hr;
}
Esempio n. 8
0
// === Public functions
HRESULT CDXVADecoderMpeg2::DecodeFrame(BYTE* pDataIn, UINT nSize, REFERENCE_TIME rtStart, REFERENCE_TIME rtStop)
{
    HRESULT hr;
    int nFieldType;
    int nSliceType;

    FFMpeg2DecodeFrame(&m_PictureParams, &m_QMatrixData, m_SliceInfo, &m_nSliceCount, m_pFilter->GetAVCtx(),
                       m_pFilter->GetFrame(), &m_nNextCodecIndex, &nFieldType, &nSliceType, pDataIn, nSize);

    if (m_PictureParams.bSecondField && !m_bSecondField) {
        m_bSecondField = true;
    }

    // Wait I frame after a flush
    if (m_bFlushed && (!m_PictureParams.bPicIntra || (m_bSecondField && m_PictureParams.bSecondField))) {
        TRACE_MPEG2("CDXVADecoderMpeg2::DecodeFrame() : Flush - wait I frame\n");
        return S_FALSE;
    }

    if (m_bSecondField) {
        if (!m_PictureParams.bSecondField) {
            m_rtStart = rtStart;
            m_rtStop  = rtStop;
            m_pSampleToDeliver = NULL;
            hr = GetFreeSurfaceIndex(m_nSurfaceIndex, &m_pSampleToDeliver, rtStart, rtStop);
            if (FAILED(hr)) {
                ASSERT(hr == VFW_E_NOT_COMMITTED);      // Normal when stop playing
                return hr;
            }
        }
    } else {
        m_rtStart = rtStart;
        m_rtStop  = rtStop;
        m_pSampleToDeliver = NULL;
        hr = GetFreeSurfaceIndex(m_nSurfaceIndex, &m_pSampleToDeliver, rtStart, rtStop);
        if (FAILED(hr)) {
            ASSERT(hr == VFW_E_NOT_COMMITTED);      // Normal when stop playing
            return hr;
        }
    }

    if (m_pSampleToDeliver == NULL) {
        return S_FALSE;
    }

    CHECK_HR_TRACE(BeginFrame(m_nSurfaceIndex, m_pSampleToDeliver));

    if (m_bSecondField) {
        if (!m_PictureParams.bSecondField) {
            UpdatePictureParams(m_nSurfaceIndex);
        }
    } else {
        UpdatePictureParams(m_nSurfaceIndex);
    }

    TRACE_MPEG2("CDXVADecoderMpeg2::DecodeFrame() : Surf = %d, PictureType = %d, SecondField = %d, m_nNextCodecIndex = %d, rtStart = [%I64d]\n",
                m_nSurfaceIndex, nSliceType, m_PictureParams.bSecondField, m_nNextCodecIndex, rtStart);

    CHECK_HR_TRACE(AddExecuteBuffer(DXVA2_PictureParametersBufferType, sizeof(m_PictureParams), &m_PictureParams));
    CHECK_HR_TRACE(AddExecuteBuffer(DXVA2_InverseQuantizationMatrixBufferType, sizeof(m_QMatrixData), &m_QMatrixData));

    // Send bitstream to accelerator
    CHECK_HR_TRACE(AddExecuteBuffer(DXVA2_SliceControlBufferType, sizeof(DXVA_SliceInfo)*m_nSliceCount, &m_SliceInfo));
    CHECK_HR_TRACE(AddExecuteBuffer(DXVA2_BitStreamDateBufferType, nSize, pDataIn, &nSize));

    // Decode frame
    CHECK_HR_TRACE(Execute());
    CHECK_HR_TRACE(EndFrame(m_nSurfaceIndex));

    if (m_bSecondField) {
        if (m_PictureParams.bSecondField) {
            AddToStore(m_nSurfaceIndex, m_pSampleToDeliver, (m_PictureParams.bPicBackwardPrediction != 1), m_rtStart, m_rtStop,
                       false, (FF_FIELD_TYPE)nFieldType, (FF_SLICE_TYPE)nSliceType, FFGetCodedPicture(m_pFilter->GetAVCtx()));
            hr = DisplayNextFrame();
        }
    } else {
        AddToStore(m_nSurfaceIndex, m_pSampleToDeliver, (m_PictureParams.bPicBackwardPrediction != 1), m_rtStart, m_rtStop,
                   false, (FF_FIELD_TYPE)nFieldType, (FF_SLICE_TYPE)nSliceType, FFGetCodedPicture(m_pFilter->GetAVCtx()));
        hr = DisplayNextFrame();
    }

    m_bFlushed = false;

    return hr;
}
Esempio n. 9
0
// === Public functions
HRESULT TDXVADecoderVC1::DecodeFrame(BYTE* pDataIn, UINT nSize, REFERENCE_TIME rtStart, REFERENCE_TIME rtStop)
{
    HRESULT               hr;
    int                   nSurfaceIndex;
    CComPtr<IMediaSample> pSampleToDeliver;
    int                   nFieldType, nSliceType;
    UINT                  nFrameSize, nSize_Result;

    m_pCodec->libavcodec->FFVC1UpdatePictureParam(&m_PictureParams, m_pCodec->avctx, &nFieldType, &nSliceType, pDataIn, nSize, &nFrameSize, FALSE, &m_bFrame_repeat_pict);

    if (m_pCodec->libavcodec->FFIsSkipped(m_pCodec->avctx)) {
        return S_OK;
    }

    // Wait I frame after a flush
    if (m_bFlushed && ! m_PictureParams.bPicIntra) {
        return S_FALSE;
    }

    hr = GetFreeSurfaceIndex(nSurfaceIndex, &pSampleToDeliver, rtStart, rtStop);
    if (FAILED(hr)) {
        ASSERT(hr == VFW_E_NOT_COMMITTED);  // Normal when stop playing
        return hr;
    }

    CHECK_HR(BeginFrame(nSurfaceIndex, pSampleToDeliver));

    DPRINTF(_l("TDXVADecoderVC1::DecodeFrame - PictureType = %s, rtStart = %I64d  Surf=%d\n"), m_pCodec->libavcodec->GetFFMpegPictureType(nSliceType), rtStart, nSurfaceIndex);

    m_PictureParams.wDecodedPictureIndex    = nSurfaceIndex;
    m_PictureParams.wDeblockedPictureIndex  = m_PictureParams.wDecodedPictureIndex;

    // Manage reference picture list
    if (!m_PictureParams.bPicBackwardPrediction) {
        if (m_wRefPictureIndex[0] != NO_REF_FRAME) {
            RemoveRefFrame(m_wRefPictureIndex[0]);
        }
        m_wRefPictureIndex[0] = m_wRefPictureIndex[1];
        m_wRefPictureIndex[1] = nSurfaceIndex;
    }
    m_PictureParams.wForwardRefPictureIndex  = (m_PictureParams.bPicIntra == 0)                ? m_wRefPictureIndex[0] : NO_REF_FRAME;
    m_PictureParams.wBackwardRefPictureIndex = (m_PictureParams.bPicBackwardPrediction == 1) ? m_wRefPictureIndex[1] : NO_REF_FRAME;

    m_PictureParams.bPic4MVallowed       = (m_PictureParams.wBackwardRefPictureIndex == NO_REF_FRAME && m_PictureParams.bPicStructure == 3) ? 1 : 0;
    m_PictureParams.bPicDeblockConfined |= (m_PictureParams.wBackwardRefPictureIndex == NO_REF_FRAME) ? 0x04 : 0;

    m_PictureParams.bPicScanMethod++; // Use for status reporting sections 3.8.1 and 3.8.2

    DPRINTF(_l("TDXVADecoderVC1::DecodeFrame - Decode frame %i\n"), m_PictureParams.bPicScanMethod);

    // Send picture params to accelerator
    CHECK_HR(AddExecuteBuffer(DXVA2_PictureParametersBufferType, sizeof(m_PictureParams), &m_PictureParams));

    // Send bitstream to accelerator
    CHECK_HR(AddExecuteBuffer(DXVA2_BitStreamDateBufferType, nFrameSize ? nFrameSize : nSize, pDataIn, &nSize_Result));

    m_SliceInfo.wQuantizerScaleCode = 1;    // TODO : 1->31 ???
    m_SliceInfo.dwSliceBitsInBuffer = nSize_Result * 8;
    CHECK_HR(AddExecuteBuffer(DXVA2_SliceControlBufferType, sizeof(m_SliceInfo), &m_SliceInfo));

    // Decode frame
    CHECK_HR(Execute());
    CHECK_HR(EndFrame(nSurfaceIndex));

    // ***************
    if (nFrameSize) { // Decoding Second Field
        m_pCodec->libavcodec->FFVC1UpdatePictureParam(&m_PictureParams, m_pCodec->avctx, NULL, NULL, pDataIn, nSize, NULL, TRUE, &m_bFrame_repeat_pict);

        CHECK_HR(BeginFrame(nSurfaceIndex, pSampleToDeliver));

        DPRINTF(_l("TDXVADecoderVC1::DecodeFrame - PictureType = %s\n"), m_pCodec->libavcodec->GetFFMpegPictureType(nSliceType));

        CHECK_HR(AddExecuteBuffer(DXVA2_PictureParametersBufferType, sizeof(m_PictureParams), &m_PictureParams));

        // Send bitstream to accelerator
        CHECK_HR(AddExecuteBuffer(DXVA2_BitStreamDateBufferType, nSize - nFrameSize, pDataIn + nFrameSize, &nSize_Result));

        m_SliceInfo.wQuantizerScaleCode = 1;        // TODO : 1->31 ???
        m_SliceInfo.dwSliceBitsInBuffer = nSize_Result * 8;
        CHECK_HR(AddExecuteBuffer(DXVA2_SliceControlBufferType, sizeof(m_SliceInfo), &m_SliceInfo));

        // Decode frame
        CHECK_HR(Execute());
        CHECK_HR(EndFrame(nSurfaceIndex));
    }
    // ***************

#ifdef _DEBUG
    DisplayStatus();
#endif

    // Re-order B frames
    if (m_pCodec->isReorderBFrame()) {
        if (m_PictureParams.bPicBackwardPrediction == 1) {
            SwapRT(rtStart, m_rtStartDelayed);
            SwapRT(rtStop,  m_rtStopDelayed);
        } else {
            // Save I or P reference time (swap later)
            if (!m_bFlushed) {
                if (m_nDelayedSurfaceIndex != -1) {
                    UpdateStore(m_nDelayedSurfaceIndex, m_rtStartDelayed, m_rtStopDelayed);
                }
                m_rtStartDelayed = m_rtStopDelayed = _I64_MAX;
                SwapRT(rtStart, m_rtStartDelayed);
                SwapRT(rtStop,  m_rtStopDelayed);
                m_nDelayedSurfaceIndex = nSurfaceIndex;
            }
        }
    }

    AddToStore(nSurfaceIndex, pSampleToDeliver, (m_PictureParams.bPicBackwardPrediction != 1), rtStart, rtStop,
               false, (FF_FIELD_TYPE)nFieldType, (FF_SLICE_TYPE)nSliceType, 0);

    m_bFlushed = false;

    return DisplayNextFrame();
}