Example #1
0
static HRESULT WINAPI Gstreamer_YUV_QueryConnect(TransformFilter *iface, const AM_MEDIA_TYPE *amt)
{
    GstTfImpl *This = (GstTfImpl*)iface;
    TRACE("%p %p\n", This, amt);
    dump_AM_MEDIA_TYPE(amt);

    if (!IsEqualGUID(&amt->majortype, &MEDIATYPE_Video) ||
        (!IsEqualGUID(&amt->formattype, &FORMAT_VideoInfo) &&
         !IsEqualGUID(&amt->formattype, &FORMAT_VideoInfo2)))
        return S_FALSE;
    if (memcmp(&amt->subtype.Data2, &MEDIATYPE_Video.Data2, sizeof(GUID) - sizeof(amt->subtype.Data1)))
        return S_FALSE;
    switch (amt->subtype.Data1) {
        case mmioFOURCC('I','4','2','0'):
        case mmioFOURCC('Y','V','1','2'):
        case mmioFOURCC('N','V','1','2'):
        case mmioFOURCC('N','V','2','1'):
        case mmioFOURCC('Y','U','Y','2'):
        case mmioFOURCC('Y','V','Y','U'):
            return S_OK;
        default:
            WARN("Unhandled fourcc %s\n", debugstr_an((char*)&amt->subtype.Data1, 4));
            return S_FALSE;
    }
}
Example #2
0
static HRESULT WINAPI AVICompressorIn_CheckMediaType(BasePin *base, const AM_MEDIA_TYPE *pmt)
{
    AVICompressor *This = impl_from_BasePin(base);
    VIDEOINFOHEADER *videoinfo;
    HRESULT hres;
    DWORD res;

    TRACE("(%p)->(AM_MEDIA_TYPE(%p))\n", base, pmt);
    dump_AM_MEDIA_TYPE(pmt);

    if(!IsEqualIID(&pmt->majortype, &MEDIATYPE_Video))
        return S_FALSE;

    if(!IsEqualIID(&pmt->formattype, &FORMAT_VideoInfo)) {
        FIXME("formattype %s unsupported\n", debugstr_guid(&pmt->formattype));
        return S_FALSE;
    }

    hres = ensure_driver(This);
    if(hres != S_OK)
        return hres;

    videoinfo = (VIDEOINFOHEADER*)pmt->pbFormat;
    res = ICCompressQuery(This->hic, &videoinfo->bmiHeader, NULL);
    return res == ICERR_OK ? S_OK : S_FALSE;
}
Example #3
0
static HRESULT WINAPI Parser_OutputPin_QueryAccept(IPin *iface, const AM_MEDIA_TYPE * pmt)
{
    Parser_OutputPin *This = unsafe_impl_Parser_OutputPin_from_IPin(iface);

    TRACE("()\n");
    dump_AM_MEDIA_TYPE(pmt);

    return (memcmp(This->pmt, pmt, sizeof(AM_MEDIA_TYPE)) == 0);
}
Example #4
0
static HRESULT WINAPI Gstreamer_AudioConvert_QueryConnect(TransformFilter *iface, const AM_MEDIA_TYPE *amt) {
    GstTfImpl *This = (GstTfImpl*)iface;
    TRACE("%p %p\n", This, amt);
    dump_AM_MEDIA_TYPE(amt);

    if (!IsEqualGUID(&amt->majortype, &MEDIATYPE_Audio) ||
        !IsEqualGUID(&amt->subtype, &MEDIASUBTYPE_PCM) ||
        !IsEqualGUID(&amt->formattype, &FORMAT_WaveFormatEx))
        return S_FALSE;
    return S_OK;
}
Example #5
0
static HRESULT TransformFilter_Input_QueryAccept(LPVOID iface, const AM_MEDIA_TYPE * pmt)
{
    TransformFilterImpl* This = (TransformFilterImpl *)((IPinImpl *)iface)->pinInfo.pFilter;
    TRACE("%p\n", iface);
    dump_AM_MEDIA_TYPE(pmt);

    if (This->pFuncsTable->pfnQueryConnect)
        return This->pFuncsTable->pfnQueryConnect(This, pmt);
    /* Assume OK if there's no query method (the connection will fail if
       needed) */
    return S_OK;
}
Example #6
0
static HRESULT WINAPI DSoundRender_CompleteConnect(BaseRenderer * iface, IPin * pReceivePin)
{
    DSoundRenderImpl *This = impl_from_BaseRenderer(iface);
    const AM_MEDIA_TYPE * pmt = &This->renderer.pInputPin->pin.mtCurrent;
    HRESULT hr = S_OK;
    WAVEFORMATEX *format;
    DSBUFFERDESC buf_desc;

    TRACE("(%p)->(%p)\n", This, pReceivePin);
    dump_AM_MEDIA_TYPE(pmt);

    TRACE("MajorType %s\n", debugstr_guid(&pmt->majortype));
    TRACE("SubType %s\n", debugstr_guid(&pmt->subtype));
    TRACE("Format %s\n", debugstr_guid(&pmt->formattype));
    TRACE("Size %d\n", pmt->cbFormat);

    format = (WAVEFORMATEX*)pmt->pbFormat;

    This->buf_size = format->nAvgBytesPerSec;

    memset(&buf_desc,0,sizeof(DSBUFFERDESC));
    buf_desc.dwSize = sizeof(DSBUFFERDESC);
    buf_desc.dwFlags = DSBCAPS_CTRLVOLUME | DSBCAPS_CTRLPAN |
                       DSBCAPS_CTRLFREQUENCY | DSBCAPS_GLOBALFOCUS |
                       DSBCAPS_GETCURRENTPOSITION2;
    buf_desc.dwBufferBytes = This->buf_size;
    buf_desc.lpwfxFormat = format;
    hr = IDirectSound_CreateSoundBuffer(This->dsound, &buf_desc, &This->dsbuffer, NULL);
    This->writepos = This->buf_size;
    if (FAILED(hr))
        ERR("Can't create sound buffer (%x)\n", hr);

    if (SUCCEEDED(hr))
    {
        hr = IDirectSoundBuffer_SetVolume(This->dsbuffer, This->volume);
        if (FAILED(hr))
            ERR("Can't set volume to %d (%x)\n", This->volume, hr);

        hr = IDirectSoundBuffer_SetPan(This->dsbuffer, This->pan);
        if (FAILED(hr))
            ERR("Can't set pan to %d (%x)\n", This->pan, hr);
        hr = S_OK;
    }

    if (FAILED(hr) && hr != VFW_E_ALREADY_CONNECTED)
    {
        if (This->dsbuffer)
            IDirectSoundBuffer_Release(This->dsbuffer);
        This->dsbuffer = NULL;
    }

    return hr;
}
Example #7
0
static HRESULT WINAPI Gstreamer_Mp3_QueryConnect(TransformFilter *iface, const AM_MEDIA_TYPE *amt)
{
    GstTfImpl *This = (GstTfImpl*)iface;
    TRACE("%p %p\n", This, amt);
    dump_AM_MEDIA_TYPE(amt);

    if ( (!IsEqualGUID(&amt->majortype, &MEDIATYPE_Audio) &&
          !IsEqualGUID(&amt->majortype, &MEDIATYPE_Stream)) ||
         (!IsEqualGUID(&amt->subtype, &MEDIASUBTYPE_MPEG1AudioPayload) &&
          !IsEqualGUID(&amt->subtype, &WMMEDIASUBTYPE_MP3))
        || !IsEqualGUID(&amt->formattype, &FORMAT_WaveFormatEx)){
        return S_FALSE;
    }

    return S_OK;
}
Example #8
0
HRESULT qcap_driver_get_format(const Capture *capBox, AM_MEDIA_TYPE ** mT)
{
    VIDEOINFOHEADER *vi;

    mT[0] = CoTaskMemAlloc(sizeof(AM_MEDIA_TYPE));
    if (!mT[0])
        return E_OUTOFMEMORY;
    vi = CoTaskMemAlloc(sizeof(VIDEOINFOHEADER));
    mT[0]->cbFormat = sizeof(VIDEOINFOHEADER);
    if (!vi)
    {
        CoTaskMemFree(mT[0]);
        mT[0] = NULL;
        return E_OUTOFMEMORY;
    }
    mT[0]->majortype = MEDIATYPE_Video;
    mT[0]->subtype = MEDIASUBTYPE_RGB24;
    mT[0]->formattype = FORMAT_VideoInfo;
    mT[0]->bFixedSizeSamples = TRUE;
    mT[0]->bTemporalCompression = FALSE;
    mT[0]->pUnk = NULL;
    mT[0]->lSampleSize = capBox->outputwidth * capBox->outputheight * capBox->bitDepth / 8;
    TRACE("Output format: %dx%d - %d bits = %u KB\n", capBox->outputwidth,
          capBox->outputheight, capBox->bitDepth, mT[0]->lSampleSize/1024);
    vi->rcSource.left = 0; vi->rcSource.top = 0;
    vi->rcTarget.left = 0; vi->rcTarget.top = 0;
    vi->rcSource.right = capBox->width; vi->rcSource.bottom = capBox->height;
    vi->rcTarget.right = capBox->outputwidth; vi->rcTarget.bottom = capBox->outputheight;
    vi->dwBitRate = capBox->fps * mT[0]->lSampleSize;
    vi->dwBitErrorRate = 0;
    vi->AvgTimePerFrame = (LONGLONG)10000000.0 / (LONGLONG)capBox->fps;
    vi->bmiHeader.biSize = 40;
    vi->bmiHeader.biWidth = capBox->outputwidth;
    vi->bmiHeader.biHeight = capBox->outputheight;
    vi->bmiHeader.biPlanes = 1;
    vi->bmiHeader.biBitCount = 24;
    vi->bmiHeader.biCompression = BI_RGB;
    vi->bmiHeader.biSizeImage = mT[0]->lSampleSize;
    vi->bmiHeader.biClrUsed = vi->bmiHeader.biClrImportant = 0;
    vi->bmiHeader.biXPelsPerMeter = 100;
    vi->bmiHeader.biYPelsPerMeter = 100;
    mT[0]->pbFormat = (void *)vi;
    dump_AM_MEDIA_TYPE(mT[0]);
    return S_OK;
}
Example #9
0
static HRESULT WINAPI AVICompressorIn_ReceiveConnection(IPin *iface,
        IPin *pConnector, const AM_MEDIA_TYPE *pmt)
{
    AVICompressor *This = impl_from_IPin(iface);
    HRESULT hres;

    TRACE("(%p)->(%p AM_MEDIA_TYPE(%p))\n", This, pConnector, pmt);
    dump_AM_MEDIA_TYPE(pmt);

    hres = BaseInputPinImpl_ReceiveConnection(iface, pConnector, pmt);
    if(FAILED(hres))
        return hres;

    hres = fill_format_info(This, (VIDEOINFOHEADER*)pmt->pbFormat);
    if(FAILED(hres))
        BasePinImpl_Disconnect(iface);
    return hres;
}
Example #10
0
static HRESULT WINAPI
AMStreamConfig_SetFormat(IAMStreamConfig *iface, AM_MEDIA_TYPE *pmt)
{
    HRESULT hr;
    VfwCapture *This = impl_from_IAMStreamConfig(iface);
    BasePin *pin;

    TRACE("(%p): %p->%p\n", iface, pmt, pmt ? pmt->pbFormat : NULL);

    if (This->filter.state != State_Stopped)
    {
        TRACE("Returning not stopped error\n");
        return VFW_E_NOT_STOPPED;
    }

    if (!pmt)
    {
        TRACE("pmt is NULL\n");
        return E_POINTER;
    }

    dump_AM_MEDIA_TYPE(pmt);

    pin = (BasePin *)This->pOutputPin;
    if (pin->pConnectedTo != NULL)
    {
        hr = IPin_QueryAccept(pin->pConnectedTo, pmt);
        TRACE("Would accept: %d\n", hr);
        if (hr == S_FALSE)
            return VFW_E_INVALIDMEDIATYPE;
    }

    hr = qcap_driver_set_format(This->driver_info, pmt);
    if (SUCCEEDED(hr) && This->filter.filterInfo.pGraph && pin->pConnectedTo )
    {
        hr = IFilterGraph_Reconnect(This->filter.filterInfo.pGraph, This->pOutputPin);
        if (SUCCEEDED(hr))
            TRACE("Reconnection completed, with new media format..\n");
    }
    TRACE("Returning: %d\n", hr);
    return hr;
}
Example #11
0
static HRESULT WINAPI DSoundRender_InputPin_ReceiveConnection(IPin * iface, IPin * pReceivePin, const AM_MEDIA_TYPE * pmt)
{
    BaseInputPin *This = (BaseInputPin *)iface;
    PIN_DIRECTION pindirReceive;
    DSoundRenderImpl *DSImpl;
    HRESULT hr = S_OK;

    TRACE("(%p)->(%p, %p)\n", This, pReceivePin, pmt);
    dump_AM_MEDIA_TYPE(pmt);

    EnterCriticalSection(This->pin.pCritSec);
    {
        DSImpl = (DSoundRenderImpl*)This->pin.pinInfo.pFilter;
        DSImpl->rtLastStop = -1;

        if (This->pin.pConnectedTo)
            hr = VFW_E_ALREADY_CONNECTED;

        if (SUCCEEDED(hr) && This->pin.pFuncsTable->pfnCheckMediaType((BasePin*)This, pmt) != S_OK)
            hr = VFW_E_TYPE_NOT_ACCEPTED;

        if (SUCCEEDED(hr))
        {
            IPin_QueryDirection(pReceivePin, &pindirReceive);

            if (pindirReceive != PINDIR_OUTPUT)
            {
                ERR("Can't connect from non-output pin\n");
                hr = VFW_E_INVALID_DIRECTION;
            }
        }

        if (SUCCEEDED(hr))
        {
            WAVEFORMATEX *format;
            DSBUFFERDESC buf_desc;

            TRACE("MajorType %s\n", debugstr_guid(&pmt->majortype));
            TRACE("SubType %s\n", debugstr_guid(&pmt->subtype));
            TRACE("Format %s\n", debugstr_guid(&pmt->formattype));
            TRACE("Size %d\n", pmt->cbFormat);

            format = (WAVEFORMATEX*)pmt->pbFormat;

            DSImpl->buf_size = format->nAvgBytesPerSec;

            memset(&buf_desc,0,sizeof(DSBUFFERDESC));
            buf_desc.dwSize = sizeof(DSBUFFERDESC);
            buf_desc.dwFlags = DSBCAPS_CTRLVOLUME | DSBCAPS_CTRLPAN |
                               DSBCAPS_CTRLFREQUENCY |
                               DSBCAPS_GETCURRENTPOSITION2;
            buf_desc.dwBufferBytes = DSImpl->buf_size;
            buf_desc.lpwfxFormat = format;
            hr = IDirectSound_CreateSoundBuffer(DSImpl->dsound, &buf_desc, &DSImpl->dsbuffer, NULL);
            if (FAILED(hr))
                ERR("Can't create sound buffer (%x)\n", hr);
        }

        if (SUCCEEDED(hr))
        {
            hr = IDirectSoundBuffer_SetVolume(DSImpl->dsbuffer, DSImpl->volume);
            if (FAILED(hr))
                ERR("Can't set volume to %d (%x)\n", DSImpl->volume, hr);

            hr = IDirectSoundBuffer_SetPan(DSImpl->dsbuffer, DSImpl->pan);
            if (FAILED(hr))
                ERR("Can't set pan to %d (%x)\n", DSImpl->pan, hr);

            DSImpl->write_pos = 0;
            hr = S_OK;
        }

        if (SUCCEEDED(hr))
        {
            CopyMediaType(&This->pin.mtCurrent, pmt);
            This->pin.pConnectedTo = pReceivePin;
            IPin_AddRef(pReceivePin);
        }
        else if (hr != VFW_E_ALREADY_CONNECTED)
        {
            if (DSImpl->dsbuffer)
                IDirectSoundBuffer_Release(DSImpl->dsbuffer);
            DSImpl->dsbuffer = NULL;
        }
    }
    LeaveCriticalSection(This->pin.pCritSec);

    return hr;
}