Esempio n. 1
0
HRESULT CBufferFilter::Receive(IMediaSample* pSample)
{
	/*  Check for other streams and pass them on */
	AM_SAMPLE2_PROPERTIES* const pProps = m_pInput->SampleProps();
	if(pProps->dwStreamId != AM_STREAM_MEDIA)
		return m_pOutput->Deliver(pSample);

	HRESULT hr;
	ASSERT(pSample);
	IMediaSample* pOutSample;

	ASSERT(m_pOutput != NULL);

	// Set up the output sample
	hr = InitializeOutputSample(pSample, &pOutSample);

	if(FAILED(hr))
		return hr;

	// Start timing the transform (if PERF is defined)
	MSR_START(m_idTransform);

	// have the derived class transform the data

	hr = Transform(pSample, pOutSample);

	// Stop the clock and log it (if PERF is defined)
	MSR_STOP(m_idTransform);

	if(FAILED(hr)) {
		DbgLog((LOG_TRACE,1,TEXT("Error from transform")));
	}
	else {
		// the Transform() function can return S_FALSE to indicate that the
		// sample should not be delivered; we only deliver the sample if it's
		// really S_OK (same as NOERROR, of course.)
		if(hr == NOERROR) {
			hr = m_pOutput->Deliver(pOutSample);
			m_bSampleSkipped = FALSE;   // last thing no longer dropped
		}
		else {
			// S_FALSE returned from Transform is a PRIVATE agreement
			// We should return NOERROR from Receive() in this cause because returning S_FALSE
			// from Receive() means that this is the end of the stream and no more data should
			// be sent.
			if(S_FALSE == hr) {

				//  Release the sample before calling notify to avoid
				//  deadlocks if the sample holds a lock on the system
				//  such as DirectDraw buffers do
				pOutSample->Release();
				m_bSampleSkipped = TRUE;
				if(!m_bQualityChanged) {
					NotifyEvent(EC_QUALITY_CHANGE,0,0);
					m_bQualityChanged = TRUE;
				}
				return NOERROR;
			}
		}
	}

	// release the output buffer. If the connected pin still needs it,
	// it will have addrefed it itself.
	pOutSample->Release();

	return hr;
}
Esempio n. 2
0
HRESULT CVideoTransformFilter::Receive(IMediaSample *pSample)
{
    // If the next filter downstream is the video renderer, then it may
    // be able to operate in DirectDraw mode which saves copying the data
    // and gives higher performance.  In that case the buffer which we
    // get from GetDeliveryBuffer will be a DirectDraw buffer, and
    // drawing into this buffer draws directly onto the display surface.
    // This means that any waiting for the correct time to draw occurs
    // during GetDeliveryBuffer, and that once the buffer is given to us
    // the video renderer will count it in its statistics as a frame drawn.
    // This means that any decision to drop the frame must be taken before
    // calling GetDeliveryBuffer.

    ASSERT(CritCheckIn(&m_csReceive));
    AM_MEDIA_TYPE *pmtOut, *pmt;
#ifdef _DEBUG
    FOURCCMap fccOut;
#endif
    HRESULT hr;
    ASSERT(pSample);
    IMediaSample * pOutSample;

    // If no output pin to deliver to then no point sending us data
    ASSERT (m_pOutput != NULL) ;

    // The source filter may dynamically ask us to start transforming from a
    // different media type than the one we're using now.  If we don't, we'll
    // draw garbage. (typically, this is a palette change in the movie,
    // but could be something more sinister like the compression type changing,
    // or even the video size changing)

#define rcS1 ((VIDEOINFOHEADER *)(pmt->pbFormat))->rcSource
#define rcT1 ((VIDEOINFOHEADER *)(pmt->pbFormat))->rcTarget

    pSample->GetMediaType(&pmt);
    if (pmt != NULL && pmt->pbFormat != NULL) {

	// spew some debug output
	ASSERT(!IsEqualGUID(pmt->majortype, GUID_NULL));
#ifdef _DEBUG
        fccOut.SetFOURCC(&pmt->subtype);
	LONG lCompression = HEADER(pmt->pbFormat)->biCompression;
	LONG lBitCount = HEADER(pmt->pbFormat)->biBitCount;
	LONG lStride = (HEADER(pmt->pbFormat)->biWidth * lBitCount + 7) / 8;
	lStride = (lStride + 3) & ~3;
        DbgLog((LOG_TRACE,3,TEXT("*Changing input type on the fly to")));
        DbgLog((LOG_TRACE,3,TEXT("FourCC: %lx Compression: %lx BitCount: %ld"),
		fccOut.GetFOURCC(), lCompression, lBitCount));
        DbgLog((LOG_TRACE,3,TEXT("biHeight: %ld rcDst: (%ld, %ld, %ld, %ld)"),
		HEADER(pmt->pbFormat)->biHeight,
		rcT1.left, rcT1.top, rcT1.right, rcT1.bottom));
        DbgLog((LOG_TRACE,3,TEXT("rcSrc: (%ld, %ld, %ld, %ld) Stride: %ld"),
		rcS1.left, rcS1.top, rcS1.right, rcS1.bottom,
		lStride));
#endif

	// now switch to using the new format.  I am assuming that the
	// derived filter will do the right thing when its media type is
	// switched and streaming is restarted.

	StopStreaming();
	m_pInput->CurrentMediaType() = *pmt;
	DeleteMediaType(pmt);
	// if this fails, playback will stop, so signal an error
	hr = StartStreaming();
	if (FAILED(hr)) {
	    return AbortPlayback(hr);
	}
    }

    // Now that we have noticed any format changes on the input sample, it's
    // OK to discard it.

    if (ShouldSkipFrame(pSample)) {
        MSR_NOTE(m_idSkip);
        m_bSampleSkipped = TRUE;
        return NOERROR;
    }

    // Set up the output sample
    hr = InitializeOutputSample(pSample, &pOutSample);

    if (FAILED(hr)) {
        return hr;
    }

    m_bSampleSkipped = FALSE;

    // The renderer may ask us to on-the-fly to start transforming to a
    // different format.  If we don't obey it, we'll draw garbage

#define rcS ((VIDEOINFOHEADER *)(pmtOut->pbFormat))->rcSource
#define rcT ((VIDEOINFOHEADER *)(pmtOut->pbFormat))->rcTarget

    pOutSample->GetMediaType(&pmtOut);
    if (pmtOut != NULL && pmtOut->pbFormat != NULL) {

	// spew some debug output
	ASSERT(!IsEqualGUID(pmtOut->majortype, GUID_NULL));
#ifdef _DEBUG
        fccOut.SetFOURCC(&pmtOut->subtype);
	LONG lCompression = HEADER(pmtOut->pbFormat)->biCompression;
	LONG lBitCount = HEADER(pmtOut->pbFormat)->biBitCount;
	LONG lStride = (HEADER(pmtOut->pbFormat)->biWidth * lBitCount + 7) / 8;
	lStride = (lStride + 3) & ~3;
        DbgLog((LOG_TRACE,3,TEXT("*Changing output type on the fly to")));
        DbgLog((LOG_TRACE,3,TEXT("FourCC: %lx Compression: %lx BitCount: %ld"),
		fccOut.GetFOURCC(), lCompression, lBitCount));
        DbgLog((LOG_TRACE,3,TEXT("biHeight: %ld rcDst: (%ld, %ld, %ld, %ld)"),
		HEADER(pmtOut->pbFormat)->biHeight,
		rcT.left, rcT.top, rcT.right, rcT.bottom));
        DbgLog((LOG_TRACE,3,TEXT("rcSrc: (%ld, %ld, %ld, %ld) Stride: %ld"),
		rcS.left, rcS.top, rcS.right, rcS.bottom,
		lStride));
#endif

	// now switch to using the new format.  I am assuming that the
	// derived filter will do the right thing when its media type is
	// switched and streaming is restarted.

	StopStreaming();
	m_pOutput->CurrentMediaType() = *pmtOut;
	DeleteMediaType(pmtOut);
	hr = StartStreaming();

	if (SUCCEEDED(hr)) {
 	    // a new format, means a new empty buffer, so wait for a keyframe
	    // before passing anything on to the renderer.
	    // !!! a keyframe may never come, so give up after 30 frames
            DbgLog((LOG_TRACE,3,TEXT("Output format change means we must wait for a keyframe")));
	    m_nWaitForKey = 30;

	// if this fails, playback will stop, so signal an error
	} else {

            //  Must release the sample before calling AbortPlayback
            //  because we might be holding the win16 lock or
            //  ddraw lock
            pOutSample->Release();
	    AbortPlayback(hr);
            return hr;
	}
    }

    // After a discontinuity, we need to wait for the next key frame
    if (pSample->IsDiscontinuity() == S_OK) {
        DbgLog((LOG_TRACE,3,TEXT("Non-key discontinuity - wait for keyframe")));
	m_nWaitForKey = 30;
    }

    // Start timing the transform (and log it if PERF is defined)

    if (SUCCEEDED(hr)) {
        m_tDecodeStart = timeGetTime();
        MSR_START(m_idTransform);

        // have the derived class transform the data
        hr = Transform(pSample, pOutSample);

        // Stop the clock (and log it if PERF is defined)
        MSR_STOP(m_idTransform);
        m_tDecodeStart = timeGetTime()-m_tDecodeStart;
        m_itrAvgDecode = m_tDecodeStart*(10000/16) + 15*(m_itrAvgDecode/16);

        // Maybe we're waiting for a keyframe still?
        if (m_nWaitForKey)
            m_nWaitForKey--;
        if (m_nWaitForKey && pSample->IsSyncPoint() == S_OK)
	    m_nWaitForKey = FALSE;

        // if so, then we don't want to pass this on to the renderer
        if (m_nWaitForKey && hr == NOERROR) {
            DbgLog((LOG_TRACE,3,TEXT("still waiting for a keyframe")));
	    hr = S_FALSE;
	}
    }

    if (FAILED(hr)) {
        DbgLog((LOG_TRACE,1,TEXT("Error from video transform")));
    } else {
        // the Transform() function can return S_FALSE to indicate that the
        // sample should not be delivered; we only deliver the sample if it's
        // really S_OK (same as NOERROR, of course.)
        // Try not to return S_FALSE to a direct draw buffer (it's wasteful)
        // Try to take the decision earlier - before you get it.

        if (hr == NOERROR) {
    	    hr = m_pOutput->Deliver(pOutSample);
        } else {
            // S_FALSE returned from Transform is a PRIVATE agreement
            // We should return NOERROR from Receive() in this case because returning S_FALSE
            // from Receive() means that this is the end of the stream and no more data should
            // be sent.
            if (S_FALSE == hr) {

                //  We must Release() the sample before doing anything
                //  like calling the filter graph because having the
                //  sample means we may have the DirectDraw lock
                //  (== win16 lock on some versions)
                pOutSample->Release();
                m_bSampleSkipped = TRUE;
                if (!m_bQualityChanged) {
                    m_bQualityChanged = TRUE;
                    NotifyEvent(EC_QUALITY_CHANGE,0,0);
                }
                return NOERROR;
            }
        }
    }

    // release the output buffer. If the connected pin still needs it,
    // it will have addrefed it itself.
    pOutSample->Release();
    ASSERT(CritCheckIn(&m_csReceive));

    return hr;
}
Esempio n. 3
0
STDMETHODIMP CStreamSwitcherInputPin::Receive(IMediaSample* pSample)
{
    AM_MEDIA_TYPE* pmt = nullptr;
    if (SUCCEEDED(pSample->GetMediaType(&pmt)) && pmt) {
        const CMediaType mt(*pmt);
        DeleteMediaType(pmt), pmt = nullptr;
        SetMediaType(&mt);
    }

    // DAMN!!!!!! this doesn't work if the stream we are blocking
    // shares the same thread with another stream, mpeg splitters
    // are usually like that. Our nicely built up multithreaded
    // strategy is useless because of this, ARRRRRRGHHHHHH.

#ifdef BLOCKSTREAM
    if (m_fCanBlock) {
        m_evBlock.Wait();
    }
#endif

    if (!IsActive()) {
#ifdef BLOCKSTREAM
        if (m_fCanBlock) {
            return S_FALSE;
        }
#endif

        TRACE(_T("&^$#@ : a stupid fix for this stupid problem\n"));
        //Sleep(32);
        return E_FAIL; // a stupid fix for this stupid problem
    }

    CAutoLock cAutoLock(&m_csReceive);

    CStreamSwitcherOutputPin* pOut = (static_cast<CStreamSwitcherFilter*>(m_pFilter))->GetOutputPin();
    ASSERT(pOut->GetConnected());

    HRESULT hr = __super::Receive(pSample);
    if (S_OK != hr) {
        return hr;
    }

    if (m_SampleProps.dwStreamId != AM_STREAM_MEDIA) {
        return pOut->Deliver(pSample);
    }

    //

    ALLOCATOR_PROPERTIES props, actual;
    hr = m_pAllocator->GetProperties(&props);
    hr = pOut->CurrentAllocator()->GetProperties(&actual);

    REFERENCE_TIME rtStart = 0, rtStop = 0;
    if (S_OK == pSample->GetTime(&rtStart, &rtStop)) {
        //
    }

    long cbBuffer = pSample->GetActualDataLength();

    CMediaType mtOut = m_mt;
    mtOut = (static_cast<CStreamSwitcherFilter*>(m_pFilter))->CreateNewOutputMediaType(mtOut, cbBuffer);

    bool fTypeChanged = false;

    if (mtOut != pOut->CurrentMediaType() || cbBuffer > actual.cbBuffer) {
        fTypeChanged = true;

        m_SampleProps.dwSampleFlags |= AM_SAMPLE_TYPECHANGED/*|AM_SAMPLE_DATADISCONTINUITY|AM_SAMPLE_TIMEDISCONTINUITY*/;

        /*
                if (CComQIPtr<IPinConnection> pPC = pOut->CurrentPinConnection())
                {
                    HANDLE hEOS = CreateEvent(nullptr, FALSE, FALSE, nullptr);
                    hr = pPC->NotifyEndOfStream(hEOS);
                    hr = pOut->DeliverEndOfStream();
                    WaitForSingleObject(hEOS, 3000);
                    CloseHandle(hEOS);
                    hr = pOut->DeliverBeginFlush();
                    hr = pOut->DeliverEndFlush();
                }
        */

        if (props.cBuffers < 8 && mtOut.majortype == MEDIATYPE_Audio) {
            props.cBuffers = 8;
        }

        props.cbBuffer = cbBuffer;

        if (actual.cbAlign != props.cbAlign
                || actual.cbPrefix != props.cbPrefix
                || actual.cBuffers < props.cBuffers
                || actual.cbBuffer < props.cbBuffer) {
            hr = pOut->DeliverBeginFlush();
            hr = pOut->DeliverEndFlush();
            hr = pOut->CurrentAllocator()->Decommit();
            hr = pOut->CurrentAllocator()->SetProperties(&props, &actual);
            hr = pOut->CurrentAllocator()->Commit();
        }
    }

    CComPtr<IMediaSample> pOutSample;
    if (FAILED(InitializeOutputSample(pSample, &pOutSample))) {
        return E_FAIL;
    }

    pmt = nullptr;
    if (SUCCEEDED(pOutSample->GetMediaType(&pmt)) && pmt) {
        const CMediaType mt(*pmt);
        DeleteMediaType(pmt), pmt = nullptr;
        // TODO
        ASSERT(0);
    }

    if (fTypeChanged) {
        pOut->SetMediaType(&mtOut);
        (static_cast<CStreamSwitcherFilter*>(m_pFilter))->OnNewOutputMediaType(m_mt, mtOut);
        pOutSample->SetMediaType(&mtOut);
    }

    // Transform

    hr = (static_cast<CStreamSwitcherFilter*>(m_pFilter))->Transform(pSample, pOutSample);

    //

    if (S_OK == hr) {
        hr = pOut->Deliver(pOutSample);
        m_bSampleSkipped = FALSE;
        /*
                if (FAILED(hr))
                {
                    ASSERT(0);
                }
        */
    } else if (S_FALSE == hr) {
        hr = S_OK;
        pOutSample = nullptr;
        m_bSampleSkipped = TRUE;

        if (!m_bQualityChanged) {
            m_pFilter->NotifyEvent(EC_QUALITY_CHANGE, 0, 0);
            m_bQualityChanged = TRUE;
        }
    }

    return hr;
}
Esempio n. 4
0
HRESULT CWavPackDSDecoder::Receive(IMediaSample *pSample)
{
    //  Check for other streams and pass them on 
    AM_SAMPLE2_PROPERTIES * const pProps = m_pInput->SampleProps(); 
    if ((pProps->dwStreamId != AM_STREAM_MEDIA) &&
        (pProps->dwStreamId != AM_STREAM_BLOCK_ADDITIONNAL))
    {
        return m_pOutput->Deliver(pSample);
    }
    
    ASSERT(pSample);
    // If no output to deliver to then no point sending us data 
    ASSERT(m_pOutput != NULL);

    HRESULT hr = S_OK;
    BYTE *pSrc, *pDst;
    DWORD SrcLength = pSample->GetActualDataLength();
    hr = pSample->GetPointer(&pSrc);
    if(FAILED(hr))
        return hr;
    
     // Check for minimal block size
    if(SrcLength < (3 * sizeof(uint32_t)))
    {
        return S_OK;
    }

    WAVEFORMATEX* pwfx = (WAVEFORMATEX*)m_pInput->CurrentMediaType().Format();
    BOOL bSeveralBlocks = (pwfx->nChannels > 2);
 
    if(pProps->dwStreamId == AM_STREAM_MEDIA)
    {
        REFERENCE_TIME rtStop;
        if(pSample->IsSyncPoint() == S_OK)
        {
            pSample->GetTime(&m_rtFrameStart, &rtStop);
            m_TotalSamples = 0;
        }

        m_MainBlockDiscontinuity = (pSample->IsDiscontinuity() == S_OK);

        reconstruct_wavpack_frame(
            m_MainFrame,
            &m_CommonFrameData,
            (char*)pSrc,
            SrcLength,            
            TRUE,
            bSeveralBlocks,
            m_PrivateData.version);

        if(m_HybridMode == TRUE)
        {
            // Stop here and wait for correction data
            return S_OK;
        }
    }
    
    if((m_HybridMode == TRUE) && 
       (pProps->dwStreamId == AM_STREAM_BLOCK_ADDITIONNAL))
    {
        // rebuild correction data block
        reconstruct_wavpack_frame(
            m_CorrectionFrame,
            &m_CommonFrameData,
            (char*)pSrc,
            SrcLength,
            FALSE,
            bSeveralBlocks,
            m_PrivateData.version);
    }

    if(wavpack_buffer_decoder_load_frame(m_Codec, m_MainFrame->data, m_MainFrame->len,
        m_HybridMode ? m_CorrectionFrame->data : NULL, m_CorrectionFrame->len) == 0)
    {
        // Something is wrong
        return S_FALSE;
    }
   
    // We can precise the decoding mode now
    if(m_HybridMode == FALSE)
    {
        if(m_CommonFrameData.array_flags[0] & WV_HYBRID_FLAG)
        {
            m_DecodingMode = DECODING_MODE_LOSSY;
        }
        else
        {
            m_DecodingMode = DECODING_MODE_LOSSLESS;
        }
    }

    uint32_t samplesLeft = m_CommonFrameData.block_samples;
    while(samplesLeft > 0)
    {
        // Set up the output sample
        IMediaSample *pOutSample;
        hr = InitializeOutputSample(pSample, &pOutSample);
        if(FAILED(hr))
        {
            break;
        }
    
        DWORD DstLength = pOutSample->GetSize();
        hr = pOutSample->GetPointer(&pDst);
        if(FAILED(hr))
        {
            pOutSample->Release();
            break;
        }

        DstLength &= 0xFFFFFFF8;
    
        long samples = wavpack_buffer_decoder_unpack(m_Codec,(int32_t *)pDst, m_SamplesPerBuffer);
        if(samples)
        {
            wavpack_buffer_format_samples(m_Codec,
                (uchar *) pDst,
                (long*) pDst,
                samples);
            
            DstLength = samples *
                WavpackGetBytesPerSample(m_Codec->wpc) *
                WavpackGetNumChannels (m_Codec->wpc);

            pOutSample->SetActualDataLength(DstLength);
            
            REFERENCE_TIME rtStart, rtStop;
            rtStart = m_rtFrameStart + (REFERENCE_TIME)(((double)m_TotalSamples / WavpackGetSampleRate(m_Codec->wpc)) * 10000000);
            m_TotalSamples += samples;
            rtStop = m_rtFrameStart + (REFERENCE_TIME)(((double)m_TotalSamples / WavpackGetSampleRate(m_Codec->wpc)) * 10000000);

            if(rtStart < 0 && rtStop < 0)
            {
                // No need to deliver this sample it will be skipped
                pOutSample->Release();
                continue;
            }
            pOutSample->SetTime(&rtStart, &rtStop);
            pOutSample->SetSyncPoint(TRUE);
            pOutSample->SetDiscontinuity(m_MainBlockDiscontinuity);
            if(m_MainBlockDiscontinuity == TRUE)
            {
                m_MainBlockDiscontinuity = FALSE;
            }

            hr = m_pOutput->Deliver(pOutSample);
            if(FAILED(hr))
            {
                pOutSample->Release();
                break;
            }
            pOutSample->Release();
        }
        else
        {
            pOutSample->Release();
            break;
        }
        samplesLeft -= samples;
    }
    
    m_DecodedFrames++;
    m_CrcError = WavpackGetNumErrors(m_Codec->wpc);
    
    return S_OK;
}