Exemple #1
0
HRESULT OutputPin::Push(void *buf, long size)
{
    HRESULT hr;
    IMediaSample *pSample;
    VIDEOINFOHEADER *vi;
    AM_MEDIA_TYPE *pmt;
    BYTE *dst_buf;

    /**
     * Hold the critical section here as the pin might get disconnected
     * during the Deliver() method call.
     */
    m_pLock->Lock();

    hr = GetDeliveryBuffer(&pSample, NULL, NULL, 0);
    if (FAILED(hr))
        goto on_error;

    pSample->GetMediaType(&pmt);
    if (pmt) {
        mediaType.Set(*pmt);
        bufSize = pmt->lSampleSize;
    }

    pSample->GetPointer(&dst_buf);
    vi = (VIDEOINFOHEADER *)mediaType.pbFormat;
    if (vi->rcSource.right == vi->bmiHeader.biWidth) {
        assert(pSample->GetSize() >= size);
        memcpy(dst_buf, buf, size);
    } else {
        unsigned i, bpp;
        unsigned dststride, srcstride;
        BYTE *src_buf = (BYTE *)buf;

        bpp = size / abs(vi->bmiHeader.biHeight) / vi->rcSource.right;
        dststride = vi->bmiHeader.biWidth * bpp;
        srcstride = vi->rcSource.right * bpp;
        for (i = abs(vi->bmiHeader.biHeight); i > 0; i--) {
            memcpy(dst_buf, src_buf, srcstride);
            dst_buf += dststride;
            src_buf += srcstride;
        }
    }
    pSample->SetActualDataLength(size);

    hr = Deliver(pSample);

    pSample->Release();

on_error:
    m_pLock->Unlock();
    return hr;
}
Exemple #2
0
HRESULT CVideoTransformFilter::Receive(IMediaSample *pSample)
{
    // If the next filter downstream is the video renderer, then it may
    // be able to operate in DirectDraw mode which saves copying the data
    // and gives higher performance.  In that case the buffer which we
    // get from GetDeliveryBuffer will be a DirectDraw buffer, and
    // drawing into this buffer draws directly onto the display surface.
    // This means that any waiting for the correct time to draw occurs
    // during GetDeliveryBuffer, and that once the buffer is given to us
    // the video renderer will count it in its statistics as a frame drawn.
    // This means that any decision to drop the frame must be taken before
    // calling GetDeliveryBuffer.

    ASSERT(CritCheckIn(&m_csReceive));
    AM_MEDIA_TYPE *pmtOut, *pmt;
#ifdef _DEBUG
    FOURCCMap fccOut;
#endif
    HRESULT hr;
    ASSERT(pSample);
    IMediaSample * pOutSample;

    // If no output pin to deliver to then no point sending us data
    ASSERT (m_pOutput != NULL) ;

    // The source filter may dynamically ask us to start transforming from a
    // different media type than the one we're using now.  If we don't, we'll
    // draw garbage. (typically, this is a palette change in the movie,
    // but could be something more sinister like the compression type changing,
    // or even the video size changing)

#define rcS1 ((VIDEOINFOHEADER *)(pmt->pbFormat))->rcSource
#define rcT1 ((VIDEOINFOHEADER *)(pmt->pbFormat))->rcTarget

    pSample->GetMediaType(&pmt);
    if (pmt != NULL && pmt->pbFormat != NULL) {

	// spew some debug output
	ASSERT(!IsEqualGUID(pmt->majortype, GUID_NULL));
#ifdef _DEBUG
        fccOut.SetFOURCC(&pmt->subtype);
	LONG lCompression = HEADER(pmt->pbFormat)->biCompression;
	LONG lBitCount = HEADER(pmt->pbFormat)->biBitCount;
	LONG lStride = (HEADER(pmt->pbFormat)->biWidth * lBitCount + 7) / 8;
	lStride = (lStride + 3) & ~3;
        DbgLog((LOG_TRACE,3,TEXT("*Changing input type on the fly to")));
        DbgLog((LOG_TRACE,3,TEXT("FourCC: %lx Compression: %lx BitCount: %ld"),
		fccOut.GetFOURCC(), lCompression, lBitCount));
        DbgLog((LOG_TRACE,3,TEXT("biHeight: %ld rcDst: (%ld, %ld, %ld, %ld)"),
		HEADER(pmt->pbFormat)->biHeight,
		rcT1.left, rcT1.top, rcT1.right, rcT1.bottom));
        DbgLog((LOG_TRACE,3,TEXT("rcSrc: (%ld, %ld, %ld, %ld) Stride: %ld"),
		rcS1.left, rcS1.top, rcS1.right, rcS1.bottom,
		lStride));
#endif

	// now switch to using the new format.  I am assuming that the
	// derived filter will do the right thing when its media type is
	// switched and streaming is restarted.

	StopStreaming();
	m_pInput->CurrentMediaType() = *pmt;
	DeleteMediaType(pmt);
	// if this fails, playback will stop, so signal an error
	hr = StartStreaming();
	if (FAILED(hr)) {
	    return AbortPlayback(hr);
	}
    }

    // Now that we have noticed any format changes on the input sample, it's
    // OK to discard it.

    if (ShouldSkipFrame(pSample)) {
        MSR_NOTE(m_idSkip);
        m_bSampleSkipped = TRUE;
        return NOERROR;
    }

    // Set up the output sample
    hr = InitializeOutputSample(pSample, &pOutSample);

    if (FAILED(hr)) {
        return hr;
    }

    m_bSampleSkipped = FALSE;

    // The renderer may ask us to on-the-fly to start transforming to a
    // different format.  If we don't obey it, we'll draw garbage

#define rcS ((VIDEOINFOHEADER *)(pmtOut->pbFormat))->rcSource
#define rcT ((VIDEOINFOHEADER *)(pmtOut->pbFormat))->rcTarget

    pOutSample->GetMediaType(&pmtOut);
    if (pmtOut != NULL && pmtOut->pbFormat != NULL) {

	// spew some debug output
	ASSERT(!IsEqualGUID(pmtOut->majortype, GUID_NULL));
#ifdef _DEBUG
        fccOut.SetFOURCC(&pmtOut->subtype);
	LONG lCompression = HEADER(pmtOut->pbFormat)->biCompression;
	LONG lBitCount = HEADER(pmtOut->pbFormat)->biBitCount;
	LONG lStride = (HEADER(pmtOut->pbFormat)->biWidth * lBitCount + 7) / 8;
	lStride = (lStride + 3) & ~3;
        DbgLog((LOG_TRACE,3,TEXT("*Changing output type on the fly to")));
        DbgLog((LOG_TRACE,3,TEXT("FourCC: %lx Compression: %lx BitCount: %ld"),
		fccOut.GetFOURCC(), lCompression, lBitCount));
        DbgLog((LOG_TRACE,3,TEXT("biHeight: %ld rcDst: (%ld, %ld, %ld, %ld)"),
		HEADER(pmtOut->pbFormat)->biHeight,
		rcT.left, rcT.top, rcT.right, rcT.bottom));
        DbgLog((LOG_TRACE,3,TEXT("rcSrc: (%ld, %ld, %ld, %ld) Stride: %ld"),
		rcS.left, rcS.top, rcS.right, rcS.bottom,
		lStride));
#endif

	// now switch to using the new format.  I am assuming that the
	// derived filter will do the right thing when its media type is
	// switched and streaming is restarted.

	StopStreaming();
	m_pOutput->CurrentMediaType() = *pmtOut;
	DeleteMediaType(pmtOut);
	hr = StartStreaming();

	if (SUCCEEDED(hr)) {
 	    // a new format, means a new empty buffer, so wait for a keyframe
	    // before passing anything on to the renderer.
	    // !!! a keyframe may never come, so give up after 30 frames
            DbgLog((LOG_TRACE,3,TEXT("Output format change means we must wait for a keyframe")));
	    m_nWaitForKey = 30;

	// if this fails, playback will stop, so signal an error
	} else {

            //  Must release the sample before calling AbortPlayback
            //  because we might be holding the win16 lock or
            //  ddraw lock
            pOutSample->Release();
	    AbortPlayback(hr);
            return hr;
	}
    }

    // After a discontinuity, we need to wait for the next key frame
    if (pSample->IsDiscontinuity() == S_OK) {
        DbgLog((LOG_TRACE,3,TEXT("Non-key discontinuity - wait for keyframe")));
	m_nWaitForKey = 30;
    }

    // Start timing the transform (and log it if PERF is defined)

    if (SUCCEEDED(hr)) {
        m_tDecodeStart = timeGetTime();
        MSR_START(m_idTransform);

        // have the derived class transform the data
        hr = Transform(pSample, pOutSample);

        // Stop the clock (and log it if PERF is defined)
        MSR_STOP(m_idTransform);
        m_tDecodeStart = timeGetTime()-m_tDecodeStart;
        m_itrAvgDecode = m_tDecodeStart*(10000/16) + 15*(m_itrAvgDecode/16);

        // Maybe we're waiting for a keyframe still?
        if (m_nWaitForKey)
            m_nWaitForKey--;
        if (m_nWaitForKey && pSample->IsSyncPoint() == S_OK)
	    m_nWaitForKey = FALSE;

        // if so, then we don't want to pass this on to the renderer
        if (m_nWaitForKey && hr == NOERROR) {
            DbgLog((LOG_TRACE,3,TEXT("still waiting for a keyframe")));
	    hr = S_FALSE;
	}
    }

    if (FAILED(hr)) {
        DbgLog((LOG_TRACE,1,TEXT("Error from video transform")));
    } else {
        // the Transform() function can return S_FALSE to indicate that the
        // sample should not be delivered; we only deliver the sample if it's
        // really S_OK (same as NOERROR, of course.)
        // Try not to return S_FALSE to a direct draw buffer (it's wasteful)
        // Try to take the decision earlier - before you get it.

        if (hr == NOERROR) {
    	    hr = m_pOutput->Deliver(pOutSample);
        } else {
            // S_FALSE returned from Transform is a PRIVATE agreement
            // We should return NOERROR from Receive() in this case because returning S_FALSE
            // from Receive() means that this is the end of the stream and no more data should
            // be sent.
            if (S_FALSE == hr) {

                //  We must Release() the sample before doing anything
                //  like calling the filter graph because having the
                //  sample means we may have the DirectDraw lock
                //  (== win16 lock on some versions)
                pOutSample->Release();
                m_bSampleSkipped = TRUE;
                if (!m_bQualityChanged) {
                    m_bQualityChanged = TRUE;
                    NotifyEvent(EC_QUALITY_CHANGE,0,0);
                }
                return NOERROR;
            }
        }
    }

    // release the output buffer. If the connected pin still needs it,
    // it will have addrefed it itself.
    pOutSample->Release();
    ASSERT(CritCheckIn(&m_csReceive));

    return hr;
}
Exemple #3
0
GstFlowReturn VideoFakeSrcPin::PushBuffer(GstBuffer *buffer)
{
    IMediaSample *pSample = NULL;
    byte *data = GST_BUFFER_DATA (buffer);
    int attempts = 0;
    HRESULT hres;
    BYTE *sample_buffer;
    AM_MEDIA_TYPE *mediatype;

    StartUsingOutputPin();

    while (attempts < MAX_ATTEMPTS)
    {
        hres = GetDeliveryBuffer(&pSample, NULL, NULL, 0);
        if (SUCCEEDED (hres))
            break;
        attempts++;
        Sleep(100);
    }

    if (FAILED (hres))
    {
        StopUsingOutputPin();
        GST_WARNING ("Could not get sample for delivery to sink: %x", hres);
        return GST_FLOW_ERROR;
    }

    pSample->GetPointer(&sample_buffer);
    pSample->GetMediaType(&mediatype);
    if (mediatype)
        SetMediaType (mediatype);

    if(sample_buffer)
    {
        /* Copy to the destination stride.
         * This is not just a simple memcpy because of the different strides.
         * TODO: optimise for the same-stride case and avoid the copy entirely.
         */
        CopyToDestinationBuffer (data, sample_buffer);
    }

    pSample->SetDiscontinuity(FALSE); /* Decoded frame; unimportant */
    pSample->SetSyncPoint(TRUE); /* Decoded frame; always a valid syncpoint */
    pSample->SetPreroll(FALSE); /* For non-displayed frames.
                                 Not used in GStreamer */

    /* Disable synchronising on this sample. We instead let GStreamer handle
     * this at a higher level, inside BaseSink. */
    pSample->SetTime(NULL, NULL);

    while (attempts < MAX_ATTEMPTS)
    {
        hres = Deliver(pSample);
        if (SUCCEEDED (hres))
            break;
        attempts++;
        Sleep(100);
    }

    pSample->Release();

    StopUsingOutputPin();

    if (SUCCEEDED (hres))
        return GST_FLOW_OK;
    else {
        GST_WARNING_OBJECT (this, "Failed to deliver sample: %x", hres);
        if (hres == VFW_E_NOT_CONNECTED)
            return GST_FLOW_NOT_LINKED;
        else
            return GST_FLOW_ERROR;
    }
}