Пример #1
0
static HRESULT WINAPI Gstreamer_transform_ProcessData(TransformFilter *iface, IMediaSample *sample)
{
    GstTfImpl *This = (GstTfImpl*)iface;
    REFERENCE_TIME tStart, tStop;
    BYTE *data;
    GstBuffer *buf;
    HRESULT hr;
    DWORD bufsize;
    int ret;

    TRACE("%p, %p\n", This, sample);

    mark_wine_thread();

    EnterCriticalSection(&This->tf.csReceive);
    IMediaSample_GetPointer(sample, &data);

    IMediaSample_AddRef(sample);
    bufsize = IMediaSample_GetActualDataLength(sample);
    buf = gst_buffer_new_wrapped_full(0, data, bufsize, 0, bufsize, sample, release_sample_wrapper);
    if (!buf) {
        IMediaSample_Release(sample);
        LeaveCriticalSection(&This->tf.csReceive);
        return S_OK;
    }

    IMediaSample_AddRef(sample);
    gst_mini_object_set_qdata(GST_MINI_OBJECT(buf), g_quark_from_static_string(media_quark_string), sample, release_sample_wrapper);

    buf->duration = buf->pts = -1;
    hr = IMediaSample_GetTime(sample, &tStart, &tStop);
    if (SUCCEEDED(hr)) {
        buf->pts = tStart * 100;
        if (hr == S_OK)
            buf->duration = (tStop - tStart)*100;
    }
    if (IMediaSample_GetMediaTime(sample, &tStart, &tStop) == S_OK) {
        buf->offset = tStart * 100;
        buf->offset_end = tStop * 100;
    }
    if (IMediaSample_IsDiscontinuity(sample) == S_OK)
        GST_BUFFER_FLAG_SET(buf, GST_BUFFER_FLAG_DISCONT);
    if (IMediaSample_IsPreroll(sample) == S_OK)
        GST_BUFFER_FLAG_SET(buf, GST_BUFFER_FLAG_LIVE);
    if (IMediaSample_IsSyncPoint(sample) != S_OK)
        GST_BUFFER_FLAG_SET(buf, GST_BUFFER_FLAG_DELTA_UNIT);
    LeaveCriticalSection(&This->tf.csReceive);
    ret = gst_pad_push(This->my_src, buf);
    if (ret)
        WARN("Sending returned: %i\n", ret);
    if (ret == GST_FLOW_FLUSHING)
        return VFW_E_WRONG_STATE;
    return S_OK;
}
Пример #2
0
static HRESULT WINAPI Gstreamer_transform_ProcessData(TransformFilter *iface, IMediaSample *sample) {
    GstTfImpl *This = (GstTfImpl*)iface;
    REFERENCE_TIME tStart, tStop;
    BYTE *data;
    GstBuffer *buf;
    HRESULT hr;
    int ret;
    TRACE("Reading %p\n", sample);

    EnterCriticalSection(&This->tf.filter.csFilter);
    IMediaSample_GetPointer(sample, &data);
    buf = gst_app_buffer_new(data, IMediaSample_GetActualDataLength(sample), release_sample, sample);
    if (!buf) {
        LeaveCriticalSection(&This->tf.filter.csFilter);
        return S_OK;
    }
    gst_buffer_set_caps(buf, gst_pad_get_caps_reffed(This->my_src));
    IMediaSample_AddRef(sample);
    buf->duration = buf->timestamp = -1;
    hr = IMediaSample_GetTime(sample, &tStart, &tStop);
    if (SUCCEEDED(hr)) {
        buf->timestamp = tStart * 100;
        if (hr == S_OK)
            buf->duration = (tStop - tStart)*100;
    }
    if (IMediaSample_GetMediaTime(sample, &tStart, &tStop) == S_OK) {
        buf->offset = tStart * 100;
        buf->offset_end = tStop * 100;
    }
    if (IMediaSample_IsDiscontinuity(sample) == S_OK)
        GST_BUFFER_FLAG_SET(buf, GST_BUFFER_FLAG_DISCONT);
    if (IMediaSample_IsPreroll(sample) == S_OK)
        GST_BUFFER_FLAG_SET(buf, GST_BUFFER_FLAG_PREROLL);
    if (IMediaSample_IsSyncPoint(sample) != S_OK)
        GST_BUFFER_FLAG_SET(buf, GST_BUFFER_FLAG_DELTA_UNIT);
    LeaveCriticalSection(&This->tf.filter.csFilter);
    ret = gst_pad_push(This->my_src, buf);
    if (ret)
        WARN("Sending returned: %i\n", ret);
    if (ret == GST_FLOW_ERROR)
        return E_FAIL;
    if (ret == GST_FLOW_WRONG_STATE)
        return VFW_E_WRONG_STATE;
    if (ret == GST_FLOW_RESEND)
        return S_FALSE;
    return S_OK;
}
Пример #3
0
static HRESULT WINAPI AVIDec_Receive(TransformFilter *tf, IMediaSample *pSample)
{
    AVIDecImpl* This = (AVIDecImpl *)tf;
    AM_MEDIA_TYPE amt;
    HRESULT hr;
    DWORD res;
    IMediaSample* pOutSample = NULL;
    DWORD cbDstStream;
    LPBYTE pbDstStream;
    DWORD cbSrcStream;
    LPBYTE pbSrcStream;
    LONGLONG tStart, tStop;
    DWORD flags = 0;

    EnterCriticalSection(&This->tf.csReceive);
    hr = IMediaSample_GetPointer(pSample, &pbSrcStream);
    if (FAILED(hr))
    {
        ERR("Cannot get pointer to sample data (%x)\n", hr);
        goto error;
    }

    cbSrcStream = IMediaSample_GetActualDataLength(pSample);

    TRACE("Sample data ptr = %p, size = %d\n", pbSrcStream, cbSrcStream);

    hr = IPin_ConnectionMediaType(This->tf.ppPins[0], &amt);
    if (FAILED(hr)) {
        ERR("Unable to retrieve media type\n");
        goto error;
    }

    /* Update input size to match sample size */
    This->pBihIn->biSizeImage = cbSrcStream;

    hr = BaseOutputPinImpl_GetDeliveryBuffer((BaseOutputPin*)This->tf.ppPins[1], &pOutSample, NULL, NULL, 0);
    if (FAILED(hr)) {
        ERR("Unable to get delivery buffer (%x)\n", hr);
        goto error;
    }

    hr = IMediaSample_SetActualDataLength(pOutSample, 0);
    assert(hr == S_OK);

    hr = IMediaSample_GetPointer(pOutSample, &pbDstStream);
    if (FAILED(hr)) {
	ERR("Unable to get pointer to buffer (%x)\n", hr);
	goto error;
    }
    cbDstStream = IMediaSample_GetSize(pOutSample);
    if (cbDstStream < This->pBihOut->biSizeImage) {
        ERR("Sample size is too small %d < %d\n", cbDstStream, This->pBihOut->biSizeImage);
        hr = E_FAIL;
        goto error;
    }

    if (IMediaSample_IsPreroll(pSample) == S_OK)
        flags |= ICDECOMPRESS_PREROLL;
    if (IMediaSample_IsSyncPoint(pSample) != S_OK)
        flags |= ICDECOMPRESS_NOTKEYFRAME;
    hr = IMediaSample_GetTime(pSample, &tStart, &tStop);
    if (hr == S_OK && AVIDec_DropSample(This, tStart))
        flags |= ICDECOMPRESS_HURRYUP;

    res = ICDecompress(This->hvid, flags, This->pBihIn, pbSrcStream, This->pBihOut, pbDstStream);
    if (res != ICERR_OK)
        ERR("Error occurred during the decompression (%x)\n", res);

    /* Drop sample if its intended to be dropped */
    if (flags & ICDECOMPRESS_HURRYUP) {
        hr = S_OK;
        goto error;
    }

    IMediaSample_SetActualDataLength(pOutSample, This->pBihOut->biSizeImage);

    IMediaSample_SetPreroll(pOutSample, (IMediaSample_IsPreroll(pSample) == S_OK));
    IMediaSample_SetDiscontinuity(pOutSample, (IMediaSample_IsDiscontinuity(pSample) == S_OK));
    IMediaSample_SetSyncPoint(pOutSample, (IMediaSample_IsSyncPoint(pSample) == S_OK));

    if (hr == S_OK)
        IMediaSample_SetTime(pOutSample, &tStart, &tStop);
    else if (hr == VFW_S_NO_STOP_TIME)
        IMediaSample_SetTime(pOutSample, &tStart, NULL);
    else
        IMediaSample_SetTime(pOutSample, NULL, NULL);

    if (IMediaSample_GetMediaTime(pSample, &tStart, &tStop) == S_OK)
        IMediaSample_SetMediaTime(pOutSample, &tStart, &tStop);
    else
        IMediaSample_SetMediaTime(pOutSample, NULL, NULL);

    LeaveCriticalSection(&This->tf.csReceive);
    hr = BaseOutputPinImpl_Deliver((BaseOutputPin*)This->tf.ppPins[1], pOutSample);
    EnterCriticalSection(&This->tf.csReceive);
    if (hr != S_OK && hr != VFW_E_NOT_CONNECTED)
        ERR("Error sending sample (%x)\n", hr);

error:
    if (pOutSample)
        IMediaSample_Release(pOutSample);

    LeaveCriticalSection(&This->tf.csReceive);
    return hr;
}
Пример #4
0
HRESULT WINAPI BaseRendererImpl_Receive(BaseRenderer *This, IMediaSample * pSample)
{
    HRESULT hr = S_OK;
    REFERENCE_TIME start, stop;
    AM_MEDIA_TYPE *pmt;

    TRACE("(%p)->%p\n", This, pSample);

    if (This->pInputPin->end_of_stream || This->pInputPin->flushing)
        return S_FALSE;

    if (This->filter.state == State_Stopped)
        return VFW_E_WRONG_STATE;

    if (IMediaSample_GetMediaType(pSample, &pmt) == S_OK)
    {
        if (FAILED(This->pFuncsTable->pfnCheckMediaType(This, pmt)))
        {
            return VFW_E_TYPE_NOT_ACCEPTED;
        }
    }

    This->pMediaSample = pSample;
    IMediaSample_AddRef(pSample);

    if (This->pFuncsTable->pfnPrepareReceive)
        hr = This->pFuncsTable->pfnPrepareReceive(This, pSample);
    if (FAILED(hr))
    {
        if (hr == VFW_E_SAMPLE_REJECTED)
            return S_OK;
        else
            return hr;
    }

    if (This->pFuncsTable->pfnPrepareRender)
        This->pFuncsTable->pfnPrepareRender(This);

    EnterCriticalSection(&This->csRenderLock);
    if ( This->filter.state == State_Paused )
    {
        if (This->pFuncsTable->pfnOnReceiveFirstSample)
            This->pFuncsTable->pfnOnReceiveFirstSample(This, pSample);

        SetEvent(This->evComplete);
    }

    /* Wait for render Time */
    if (SUCCEEDED(IMediaSample_GetMediaTime(pSample, &start, &stop)))
    {
        hr = S_FALSE;
        RendererPosPassThru_RegisterMediaTime(This->pPosition, start);
        if (This->pFuncsTable->pfnShouldDrawSampleNow)
            hr = This->pFuncsTable->pfnShouldDrawSampleNow(This, pSample, &start, &stop);

        if (hr == S_OK)
            ;/* Do not wait: drop through */
        else if (hr == S_FALSE)
        {
            if (This->pFuncsTable->pfnOnWaitStart)
                This->pFuncsTable->pfnOnWaitStart(This);

            hr = QualityControlRender_WaitFor(This->qcimpl, pSample, This->RenderEvent);

            if (This->pFuncsTable->pfnOnWaitEnd)
                This->pFuncsTable->pfnOnWaitEnd(This);
        }
        else
        {
            LeaveCriticalSection(&This->csRenderLock);
            /* Drop Sample */
            return S_OK;
        }
    }

    if (SUCCEEDED(hr))
    {
        QualityControlRender_BeginRender(This->qcimpl);
        hr = This->pFuncsTable->pfnDoRenderSample(This, pSample);
        QualityControlRender_EndRender(This->qcimpl);
    }

    QualityControlRender_DoQOS(This->qcimpl);

    BaseRendererImpl_ClearPendingSample(This);
    LeaveCriticalSection(&This->csRenderLock);

    return hr;
}
Пример #5
0
static HRESULT WINAPI ACMWrapper_Receive(TransformFilter *tf, IMediaSample *pSample)
{
    ACMWrapperImpl* This = impl_from_TransformFilter(tf);
    AM_MEDIA_TYPE amt;
    IMediaSample* pOutSample = NULL;
    DWORD cbDstStream, cbSrcStream;
    LPBYTE pbDstStream;
    LPBYTE pbSrcStream = NULL;
    ACMSTREAMHEADER ash;
    BOOL unprepare_header = FALSE, preroll;
    MMRESULT res;
    HRESULT hr;
    LONGLONG tStart = -1, tStop = -1, tMed;
    LONGLONG mtStart = -1, mtStop = -1, mtMed;

    EnterCriticalSection(&This->tf.csReceive);
    hr = IMediaSample_GetPointer(pSample, &pbSrcStream);
    if (FAILED(hr))
    {
        ERR("Cannot get pointer to sample data (%x)\n", hr);
        LeaveCriticalSection(&This->tf.csReceive);
        return hr;
    }

    preroll = (IMediaSample_IsPreroll(pSample) == S_OK);

    IMediaSample_GetTime(pSample, &tStart, &tStop);
    if (IMediaSample_GetMediaTime(pSample, &mtStart, &mtStop) != S_OK)
        mtStart = mtStop = -1;
    cbSrcStream = IMediaSample_GetActualDataLength(pSample);

    /* Prevent discontinuities when codecs 'absorb' data but not give anything back in return */
    if (IMediaSample_IsDiscontinuity(pSample) == S_OK)
    {
        This->lasttime_real = tStart;
        This->lasttime_sent = tStart;
    }
    else if (This->lasttime_real == tStart)
        tStart = This->lasttime_sent;
    else
        WARN("Discontinuity\n");

    tMed = tStart;
    mtMed = mtStart;

    TRACE("Sample data ptr = %p, size = %d\n", pbSrcStream, cbSrcStream);

    hr = IPin_ConnectionMediaType(This->tf.ppPins[0], &amt);
    if (FAILED(hr))
    {
        ERR("Unable to retrieve media type\n");
        LeaveCriticalSection(&This->tf.csReceive);
        return hr;
    }

    ash.pbSrc = pbSrcStream;
    ash.cbSrcLength = cbSrcStream;

    while(hr == S_OK && ash.cbSrcLength)
    {
        hr = BaseOutputPinImpl_GetDeliveryBuffer((BaseOutputPin*)This->tf.ppPins[1], &pOutSample, NULL, NULL, 0);
        if (FAILED(hr))
        {
            ERR("Unable to get delivery buffer (%x)\n", hr);
            LeaveCriticalSection(&This->tf.csReceive);
            return hr;
        }
        IMediaSample_SetPreroll(pOutSample, preroll);

	hr = IMediaSample_SetActualDataLength(pOutSample, 0);
	assert(hr == S_OK);

	hr = IMediaSample_GetPointer(pOutSample, &pbDstStream);
	if (FAILED(hr)) {
	    ERR("Unable to get pointer to buffer (%x)\n", hr);
	    goto error;
	}
	cbDstStream = IMediaSample_GetSize(pOutSample);

	ash.cbStruct = sizeof(ash);
	ash.fdwStatus = 0;
	ash.dwUser = 0;
	ash.pbDst = pbDstStream;
	ash.cbDstLength = cbDstStream;

	if ((res = acmStreamPrepareHeader(This->has, &ash, 0))) {
	    ERR("Cannot prepare header %d\n", res);
	    goto error;
	}
	unprepare_header = TRUE;

        if (IMediaSample_IsDiscontinuity(pSample) == S_OK)
        {
            res = acmStreamConvert(This->has, &ash, ACM_STREAMCONVERTF_START);
            IMediaSample_SetDiscontinuity(pOutSample, TRUE);
            /* One sample could be converted to multiple packets */
            IMediaSample_SetDiscontinuity(pSample, FALSE);
        }
        else
        {
            res = acmStreamConvert(This->has, &ash, 0);
            IMediaSample_SetDiscontinuity(pOutSample, FALSE);
        }

        if (res)
        {
            if(res != MMSYSERR_MOREDATA)
                ERR("Cannot convert data header %d\n", res);
            goto error;
        }

        TRACE("used in %u/%u, used out %u/%u\n", ash.cbSrcLengthUsed, ash.cbSrcLength, ash.cbDstLengthUsed, ash.cbDstLength);

        hr = IMediaSample_SetActualDataLength(pOutSample, ash.cbDstLengthUsed);
        assert(hr == S_OK);

        /* Bug in acm codecs? It apparently uses the input, but doesn't necessarily output immediately */
        if (!ash.cbSrcLengthUsed)
        {
            WARN("Sample was skipped? Outputted: %u\n", ash.cbDstLengthUsed);
            ash.cbSrcLength = 0;
            goto error;
        }

        TRACE("Sample start time: %u.%03u\n", (DWORD)(tStart/10000000), (DWORD)((tStart/10000)%1000));
        if (ash.cbSrcLengthUsed == cbSrcStream)
        {
            IMediaSample_SetTime(pOutSample, &tStart, &tStop);
            tStart = tMed = tStop;
        }
        else if (tStop != tStart)
        {
            tMed = tStop - tStart;
            tMed = tStart + tMed * ash.cbSrcLengthUsed / cbSrcStream;
            IMediaSample_SetTime(pOutSample, &tStart, &tMed);
            tStart = tMed;
        }
        else
        {
            ERR("No valid timestamp found\n");
            IMediaSample_SetTime(pOutSample, NULL, NULL);
        }

        if (mtStart < 0) {
            IMediaSample_SetMediaTime(pOutSample, NULL, NULL);
        } else if (ash.cbSrcLengthUsed == cbSrcStream) {
            IMediaSample_SetMediaTime(pOutSample, &mtStart, &mtStop);
            mtStart = mtMed = mtStop;
        } else if (mtStop >= mtStart) {
            mtMed = mtStop - mtStart;
            mtMed = mtStart + mtMed * ash.cbSrcLengthUsed / cbSrcStream;
            IMediaSample_SetMediaTime(pOutSample, &mtStart, &mtMed);
            mtStart = mtMed;
        } else {
            IMediaSample_SetMediaTime(pOutSample, NULL, NULL);
        }

        TRACE("Sample stop time: %u.%03u\n", (DWORD)(tStart/10000000), (DWORD)((tStart/10000)%1000));

        LeaveCriticalSection(&This->tf.csReceive);
        hr = BaseOutputPinImpl_Deliver((BaseOutputPin*)This->tf.ppPins[1], pOutSample);
        EnterCriticalSection(&This->tf.csReceive);

        if (hr != S_OK && hr != VFW_E_NOT_CONNECTED) {
            if (FAILED(hr))
                ERR("Error sending sample (%x)\n", hr);
            goto error;
        }

error:
        if (unprepare_header && (res = acmStreamUnprepareHeader(This->has, &ash, 0)))
            ERR("Cannot unprepare header %d\n", res);
        unprepare_header = FALSE;
        ash.pbSrc += ash.cbSrcLengthUsed;
        ash.cbSrcLength -= ash.cbSrcLengthUsed;

        IMediaSample_Release(pOutSample);
        pOutSample = NULL;

    }

    This->lasttime_real = tStop;
    This->lasttime_sent = tMed;

    LeaveCriticalSection(&This->tf.csReceive);
    return hr;
}
Пример #6
0
static HRESULT WINAPI AVICompressorIn_Receive(BaseInputPin *base, IMediaSample *pSample)
{
    AVICompressor *This = impl_from_BasePin(&base->pin);
    VIDEOINFOHEADER *src_videoinfo;
    REFERENCE_TIME start, stop;
    IMediaSample *out_sample;
    AM_MEDIA_TYPE *mt;
    IMediaSample2 *sample2;
    DWORD comp_flags = 0;
    BOOL is_preroll;
    BOOL sync_point;
    BYTE *ptr, *buf;
    DWORD res;
    HRESULT hres;

    TRACE("(%p)->(%p)\n", base, pSample);

    if(!This->hic) {
        FIXME("Driver not loaded\n");
        return E_UNEXPECTED;
    }

    hres = IMediaSample_QueryInterface(pSample, &IID_IMediaSample2, (void**)&sample2);
    if(SUCCEEDED(hres)) {
        FIXME("Use IMediaSample2\n");
        IMediaSample2_Release(sample2);
    }

    is_preroll = IMediaSample_IsPreroll(pSample) == S_OK;
    sync_point = IMediaSample_IsSyncPoint(pSample) == S_OK;

    hres = IMediaSample_GetTime(pSample, &start, &stop);
    if(FAILED(hres)) {
        WARN("GetTime failed: %08x\n", hres);
        return hres;
    }

    hres = IMediaSample_GetMediaType(pSample, &mt);
    if(FAILED(hres))
        return hres;

    hres = IMediaSample_GetPointer(pSample, &ptr);
    if(FAILED(hres)) {
        WARN("GetPointer failed: %08x\n", hres);
        return hres;
    }

    hres = BaseOutputPinImpl_GetDeliveryBuffer(This->out, &out_sample, &start, &stop, 0);
    if(FAILED(hres))
        return hres;

    hres = IMediaSample_GetPointer(out_sample, &buf);
    if(FAILED(hres))
        return hres;

    if((This->driver_flags & VIDCF_TEMPORAL) && !(This->driver_flags & VIDCF_FASTTEMPORALC))
        FIXME("Unsupported temporal compression\n");

    src_videoinfo = (VIDEOINFOHEADER*)This->in->pin.mtCurrent.pbFormat;
    This->videoinfo->bmiHeader.biSizeImage = This->max_frame_size;
    res = ICCompress(This->hic, sync_point ? ICCOMPRESS_KEYFRAME : 0, &This->videoinfo->bmiHeader, buf,
            &src_videoinfo->bmiHeader, ptr, 0, &comp_flags, This->frame_cnt, 0, 0, NULL, NULL);
    if(res != ICERR_OK) {
        WARN("ICCompress failed: %d\n", res);
        IMediaSample_Release(out_sample);
        return E_FAIL;
    }

    IMediaSample_SetActualDataLength(out_sample, This->videoinfo->bmiHeader.biSizeImage);
    IMediaSample_SetPreroll(out_sample, is_preroll);
    IMediaSample_SetSyncPoint(out_sample, (comp_flags&AVIIF_KEYFRAME) != 0);
    IMediaSample_SetDiscontinuity(out_sample, (IMediaSample_IsDiscontinuity(pSample) == S_OK));

    if (IMediaSample_GetMediaTime(pSample, &start, &stop) == S_OK)
        IMediaSample_SetMediaTime(out_sample, &start, &stop);
    else
        IMediaSample_SetMediaTime(out_sample, NULL, NULL);

    hres = BaseOutputPinImpl_Deliver(This->out, out_sample);
    if(FAILED(hres))
        WARN("Deliver failed: %08x\n", hres);

    IMediaSample_Release(out_sample);
    This->frame_cnt++;
    return hres;
}