Esempio n. 1
0
static HRESULT ACMWrapper_ConnectInput(InputPin *pin, const AM_MEDIA_TYPE * pmt)
{
    ACMWrapperImpl* This = (ACMWrapperImpl *)pin->pin.pinInfo.pFilter;
    MMRESULT res;

    TRACE("(%p)->(%p)\n", This, pmt);

    /* Check root (GUID w/o FOURCC) */
    if ((IsEqualIID(&pmt->majortype, &MEDIATYPE_Audio)) &&
        (!memcmp(((const char *)&pmt->subtype)+4, ((const char *)&MEDIATYPE_Audio)+4, sizeof(GUID)-4)) &&
        (IsEqualIID(&pmt->formattype, &FORMAT_WaveFormatEx)))
    {
        HACMSTREAM drv;
        AM_MEDIA_TYPE* outpmt = &This->tf.pmt;
        FreeMediaType(outpmt);

        This->pWfIn = (LPWAVEFORMATEX)pmt->pbFormat;

	/* HACK */
	/* TRACE("ALIGN = %d\n", pACMWrapper->pWfIn->nBlockAlign); */
	/* pACMWrapper->pWfIn->nBlockAlign = 1; */

	/* Set output audio data to PCM */
        CopyMediaType(outpmt, pmt);
        outpmt->subtype.Data1 = WAVE_FORMAT_PCM;
	This->pWfOut = (WAVEFORMATEX*)outpmt->pbFormat;
	This->pWfOut->wFormatTag = WAVE_FORMAT_PCM;
	This->pWfOut->wBitsPerSample = 16;
	This->pWfOut->nBlockAlign = This->pWfOut->wBitsPerSample * This->pWfOut->nChannels / 8;
	This->pWfOut->cbSize = 0;
	This->pWfOut->nAvgBytesPerSec = This->pWfOut->nChannels * This->pWfOut->nSamplesPerSec
						* (This->pWfOut->wBitsPerSample/8);

        if (!(res = acmStreamOpen(&drv, NULL, This->pWfIn, This->pWfOut, NULL, 0, 0, 0)))
        {
            This->has = drv;

            /* Update buffer size of media samples in output */
            ((OutputPin*)This->tf.ppPins[1])->allocProps.cbBuffer = This->pWfOut->nAvgBytesPerSec / 2;
            TRACE("Connection accepted\n");
            return S_OK;
        }
	else
	    FIXME("acmStreamOpen returned %d\n", res);
        FreeMediaType(outpmt);
        TRACE("Unable to find a suitable ACM decompressor\n");
    }

    TRACE("Connection refused\n");
    return VFW_E_TYPE_NOT_ACCEPTED;
}
Esempio n. 2
0
//
// CheckInputType
//
// Check the input type is OK - return an error otherwise
//
HRESULT CAudioTimeScale::CheckInputType(const CMediaType *mtIn)
{
    // check this is an audio format that we can support
    if (*mtIn->FormatType() != FORMAT_WaveFormatEx) {
        return E_INVALIDARG;
    }

    // Can we transform this type
    if (CanPerformTransform(mtIn)) {
		CopyMediaType(&m_mt, mtIn);
    	return NOERROR;
    }
    return E_FAIL;
}
STDMETHODIMP CapturePin::QueryAccept(const AM_MEDIA_TYPE *pmt)
{
    if(pmt->majortype != expectedMajorType)
        return S_FALSE;
    if(!IsValidMediaType(pmt))
        return S_FALSE;

    if(connectedPin)
    {
        FreeMediaType(connectedMediaType);
        CopyMediaType(&connectedMediaType, pmt);
    }

    return S_OK;
}
Esempio n. 4
0
HRESULT CSampleSender::SetAcceptedMediaType( const CMediaType * pmt )
{
    CAutoLock lock( &m_Lock );

    if( !pmt )
    {
        m_mtAccept = CMediaType( );
        return NOERROR;        
    }

    HRESULT hr;
    hr = CopyMediaType( &m_mtAccept, pmt );

    return hr;
}
Esempio n. 5
0
/**
 * \brief IMediaType::SetMediaType (specifies media type for sample)
 *
 * \param[in] This pointer to CMediaSample object
 * \param[in] pMediaType pointer to AM_MEDIA_TYPE specifies new media type
 *
 * \return S_OK success
 * \return E_OUTOFMEMORY insufficient memory
 *
 */
static HRESULT STDCALL CMediaSample_SetMediaType(IMediaSample * This,
						 AM_MEDIA_TYPE *pMediaType)
{
    AM_MEDIA_TYPE* t;
    Debug printf("CMediaSample_SetMediaType(%p) called\n", This);
    if (!pMediaType)
	return E_INVALIDARG;
    t = &((CMediaSample*)This)->media_type;
    if(((CMediaSample*)This)->type_valid)
	FreeMediaType(t);
    CopyMediaType(t,pMediaType);
    ((CMediaSample*) This)->type_valid=1;

    return 0;
}
Esempio n. 6
0
AM_MEDIA_TYPE * WINAPI CreateMediaType(AM_MEDIA_TYPE const * pSrc)
{
    AM_MEDIA_TYPE * pDest;

    pDest = CoTaskMemAlloc(sizeof(AM_MEDIA_TYPE));
    if (!pDest)
        return NULL;

    if (FAILED(CopyMediaType(pDest, pSrc)))
    {
        CoTaskMemFree(pDest);
        return NULL;
    }

    return pDest;
}
STDMETHODIMP CapturePin::ReceiveConnection(IPin *pConnector, const AM_MEDIA_TYPE *pmt)
{
    if(filter->state != State_Stopped)
        return VFW_E_NOT_STOPPED;
    if(!pConnector || !pmt)
        return E_POINTER;
    if(connectedPin)
        return VFW_E_ALREADY_CONNECTED;

    if(QueryAccept(pmt) != S_OK)
        return VFW_E_TYPE_NOT_ACCEPTED;

    connectedPin = pConnector;
    connectedPin->AddRef();

    FreeMediaType(connectedMediaType);
    return CopyMediaType(&connectedMediaType, pmt);
}
HRESULT CBufferFilter::GetMediaType(int iPosition, CMediaType* pMediaType)
{
	if (m_pInput->IsConnected() == FALSE) {
		return E_UNEXPECTED;
	}

	// TODO: offer all input types from upstream and allow reconnection at least in stopped state
	if (iPosition < 0) {
		return E_INVALIDARG;
	}
	if (iPosition > 0) {
		return VFW_S_NO_MORE_ITEMS;
	}

	CopyMediaType(pMediaType, &m_pInput->CurrentMediaType());

	return S_OK;
}
Esempio n. 9
0
AM_MEDIA_TYPE * WINAPI CreateMediaType(AM_MEDIA_TYPE const *pSrc)
{
    ASSERT(pSrc);

    // Allocate a block of memory for the media type

    AM_MEDIA_TYPE *pMediaType =
        (AM_MEDIA_TYPE *)CoTaskMemAlloc(sizeof(AM_MEDIA_TYPE));

    if (pMediaType == NULL) {
        return NULL;
    }
    // Copy the variable length format block

    CopyMediaType(pMediaType,pSrc);

    return pMediaType;
}
Esempio n. 10
0
static HRESULT WINAPI StdMediaSample2_GetMediaType(IMediaSample2 * iface, AM_MEDIA_TYPE ** ppMediaType)
{
    StdMediaSample2 *This = (StdMediaSample2 *)iface;

    TRACE("(%p)\n", ppMediaType);

    if (!This->props.pMediaType) {
        /* Make sure we return a NULL pointer (required by native Quartz dll) */
        if (ppMediaType)
            *ppMediaType = NULL;
        return S_FALSE;
    }

    if (!(*ppMediaType = CoTaskMemAlloc(sizeof(AM_MEDIA_TYPE))))
        return E_OUTOFMEMORY;

    return CopyMediaType(*ppMediaType, This->props.pMediaType);
}
Esempio n. 11
0
static HRESULT WINAPI VfwPin_GetMediaType(BasePin *pin, int iPosition, AM_MEDIA_TYPE *pmt)
{
    VfwPinImpl *This = impl_from_BasePin(pin);
    AM_MEDIA_TYPE *vfw_pmt;
    HRESULT hr;

    if (iPosition < 0)
        return E_INVALIDARG;
    if (iPosition > 0)
        return VFW_S_NO_MORE_ITEMS;

    hr = qcap_driver_get_format(This->parent->driver_info, &vfw_pmt);
    if (SUCCEEDED(hr)) {
        CopyMediaType(pmt, vfw_pmt);
        DeleteMediaType(vfw_pmt);
    }
    return hr;
}
Esempio n. 12
0
static HRESULT WINAPI StdMediaSample2_SetMediaType(IMediaSample2 * iface, AM_MEDIA_TYPE * pMediaType)
{
    StdMediaSample2 *This = impl_from_IMediaSample2(iface);

    TRACE("(%p)->(%p)\n", iface, pMediaType);

    if (This->props.pMediaType)
    {
        FreeMediaType(This->props.pMediaType);
        This->props.pMediaType = NULL;
    }
    if (!pMediaType)
        return S_FALSE;
    if (!(This->props.pMediaType = CoTaskMemAlloc(sizeof(AM_MEDIA_TYPE))))
        return E_OUTOFMEMORY;

    return CopyMediaType(This->props.pMediaType, pMediaType);
}
Esempio n. 13
0
/* Return the media type of the connection */
STDMETHODIMP
CBasePin::ConnectionMediaType(
							  AM_MEDIA_TYPE *pmt
							  )
{
	CheckPointer(pmt,E_POINTER);
	ValidateReadWritePtr(pmt,sizeof(AM_MEDIA_TYPE));
	CComAutoLock cObjectLock(m_pLock);

	/*  Copy constructor of m_mt allocates the memory */
	if (IsConnected()) {
		CopyMediaType( pmt, &m_mt );
		return S_OK;
	} else {
		((CMediaType *)pmt)->InitMediaType();
		return VFW_E_NOT_CONNECTED;
	}
}
Esempio n. 14
0
/**
 * \brief CInputPin constructor
 *
 * \param[in]  amt media type for pin
 *
 * \return pointer to CInputPin if success
 * \return NULL if error occured
 *
 */
CInputPin* CInputPinCreate(CBaseFilter* p, const AM_MEDIA_TYPE* amt)
{
    CInputPin* This = (CInputPin*) malloc(sizeof(CInputPin));

    if (!This)
        return NULL;

    This->refcount = 1;
    This->parent = p;
    CopyMediaType(&(This->type),amt);

    This->vt= (IPin_vt*) malloc(sizeof(IPin_vt));

    if (!This->vt)
    {
	free(This);
	return NULL;
    }

    This->vt->QueryInterface = CInputPin_QueryInterface;
    This->vt->AddRef = CInputPin_AddRef;
    This->vt->Release = CInputPin_Release;
    This->vt->Connect = CInputPin_Connect;
    This->vt->ReceiveConnection = CInputPin_ReceiveConnection;
    This->vt->Disconnect = CInputPin_Disconnect;
    This->vt->ConnectedTo = CInputPin_ConnectedTo;
    This->vt->ConnectionMediaType = CInputPin_ConnectionMediaType;
    This->vt->QueryPinInfo = CInputPin_QueryPinInfo;
    This->vt->QueryDirection = CInputPin_QueryDirection;
    This->vt->QueryId = CInputPin_QueryId;
    This->vt->QueryAccept = CInputPin_QueryAccept;
    This->vt->EnumMediaTypes = CInputPin_EnumMediaTypes;
    This->vt->QueryInternalConnections = CInputPin_QueryInternalConnections;
    This->vt->EndOfStream = CInputPin_EndOfStream;
    This->vt->BeginFlush = CInputPin_BeginFlush;
    This->vt->EndFlush = CInputPin_EndFlush;
    This->vt->NewSegment = CInputPin_NewSegment;

    This->interfaces[0]=IID_IUnknown;

    return This;
}
Esempio n. 15
0
void CorrectMediaType(AM_MEDIA_TYPE* pmt)
{
	if(!pmt) return;

	CMediaType mt(*pmt);

	if(mt.formattype == FORMAT_VideoInfo)
	{
		VIDEOINFOHEADER* vih = (VIDEOINFOHEADER*)mt.pbFormat;

		for(int i = 0; i < VIHSIZE; i++)
		{
			if(mt.subtype == *vihs[i].subtype
			&& vih->bmiHeader.biCompression == vihs[i].vih.bmiHeader.biCompression)
			{
				mt.AllocFormatBuffer(vihs[i].size);
				memcpy(mt.pbFormat, &vihs[i], vihs[i].size);
				memcpy(mt.pbFormat, pmt->pbFormat, sizeof(VIDEOINFOHEADER));
				break;
			}
		}
	}
	else if(mt.formattype == FORMAT_VideoInfo2)
	{
		VIDEOINFOHEADER2* vih2 = (VIDEOINFOHEADER2*)mt.pbFormat;

		for(int i = 0; i < VIHSIZE; i++)
		{
			if(mt.subtype == *vih2s[i].subtype
			&& vih2->bmiHeader.biCompression == vih2s[i].vih.bmiHeader.biCompression)
			{
				mt.AllocFormatBuffer(vih2s[i].size);
				memcpy(mt.pbFormat, &vih2s[i], vih2s[i].size);
				memcpy(mt.pbFormat, pmt->pbFormat, sizeof(VIDEOINFOHEADER2));
				break;
			}
		}
	}

	CopyMediaType(pmt, &mt);
}
Esempio n. 16
0
AM_MEDIA_TYPE * WINAPI CreateMediaType(AM_MEDIA_TYPE const *pSrc)
{
    _ASSERT(pSrc);

    // Allocate a block of memory for the media type

    AM_MEDIA_TYPE *pMediaType =
        (AM_MEDIA_TYPE *)CoTaskMemAlloc(sizeof(AM_MEDIA_TYPE));

    if (pMediaType == NULL) {
        return NULL;
    }
    // Copy the variable length format block

    HRESULT hr = CopyMediaType(pMediaType,pSrc);
    if (FAILED(hr)) {
        CoTaskMemFree((PVOID)pMediaType);
        return NULL;
    }

    return pMediaType;
}
Esempio n. 17
0
HRESULT WINAPI EnumMediaTypes_Construct(BasePin *basePin, BasePin_GetMediaType enumFunc, BasePin_GetMediaTypeVersion versionFunc, IEnumMediaTypes ** ppEnum)
{
    ULONG i;
    IEnumMediaTypesImpl * pEnumMediaTypes = CoTaskMemAlloc(sizeof(IEnumMediaTypesImpl));
    AM_MEDIA_TYPE amt;

    if (!pEnumMediaTypes)
    {
        *ppEnum = NULL;
        return E_OUTOFMEMORY;
    }
    pEnumMediaTypes->IEnumMediaTypes_iface.lpVtbl = &IEnumMediaTypesImpl_Vtbl;
    pEnumMediaTypes->refCount = 1;
    pEnumMediaTypes->uIndex = 0;
    pEnumMediaTypes->enumMediaFunction = enumFunc;
    pEnumMediaTypes->mediaVersionFunction = versionFunc;
    IPin_AddRef(&basePin->IPin_iface);
    pEnumMediaTypes->basePin = basePin;

    i = 0;
    while (enumFunc(basePin, i, &amt) == S_OK) i++;

    pEnumMediaTypes->enumMediaDetails.cMediaTypes = i;
    pEnumMediaTypes->enumMediaDetails.pMediaTypes = CoTaskMemAlloc(sizeof(AM_MEDIA_TYPE) * i);
    for (i = 0; i < pEnumMediaTypes->enumMediaDetails.cMediaTypes; i++)
    {
        enumFunc(basePin,i,&amt);
        if (FAILED(CopyMediaType(&pEnumMediaTypes->enumMediaDetails.pMediaTypes[i], &amt)))
        {
            while (i--)
                FreeMediaType(&pEnumMediaTypes->enumMediaDetails.pMediaTypes[i]);
            CoTaskMemFree(pEnumMediaTypes->enumMediaDetails.pMediaTypes);
            return E_OUTOFMEMORY;
        }
    }
    *ppEnum = &pEnumMediaTypes->IEnumMediaTypes_iface;
    pEnumMediaTypes->currentVersion = versionFunc(basePin);
    return S_OK;
}
Esempio n. 18
0
HRESULT CDeCSSFilter::GetMediaType(int iPosition, CMediaType* pmt)
{
    if (m_pInput->IsConnected() == FALSE) {
        return E_UNEXPECTED;
    }

    if (iPosition < 0) {
        return E_INVALIDARG;
    }
    if (iPosition > 1) {
        return VFW_S_NO_MORE_ITEMS;
    }

    CopyMediaType(pmt, &m_pInput->CurrentMediaType());
    if (iPosition == 0) {
        pmt->majortype = MEDIATYPE_MPEG2_PACK;
    }
    if (iPosition == 1) {
        pmt->majortype = MEDIATYPE_MPEG2_PES;
    }

    return S_OK;
}
Esempio n. 19
0
STDMETHODIMP SampleCapturePin::ReceiveConnection( IPin * pConnector,
                                            const AM_MEDIA_TYPE *pmt )
{
    if( State_Stopped != p_filter->state )
    {
        ATLTRACE(  "SampleCapturePin::ReceiveConnection [not stopped]" );
        return VFW_E_NOT_STOPPED;
    }

    if( !pConnector || !pmt )
    {
        ATLTRACE(  "SampleCapturePin::ReceiveConnection [null pointer]" );
        return E_POINTER;
    }

    if( p_connected_pin )
    {
        ATLTRACE(  "SampleCapturePin::ReceiveConnection [already connected]");
        return VFW_E_ALREADY_CONNECTED;
    }

    if( S_OK != QueryAccept(pmt) )
    {
        ATLTRACE(  "SampleCapturePin::ReceiveConnection "
                 "[media type not accepted]" );
        return VFW_E_TYPE_NOT_ACCEPTED;
    }

    ATLTRACE(  "SampleCapturePin::ReceiveConnection [OK]" );

    p_connected_pin = pConnector;
    p_connected_pin->AddRef();

    FreeMediaType( cx_media_type );
    return CopyMediaType( &cx_media_type, pmt );
}
Esempio n. 20
0
//
// CheckMediaType
//
// We will accept 8, 16, 24 or 32 bit video formats, in any
// image size that gives room to bounce.
// Returns E_INVALIDARG if the mediatype is not acceptable
//
HRESULT CPushPinAudio::CheckMediaType(const CMediaType *pmt)
{
    CheckPointer(pmt,E_POINTER);

    if((*(pmt->Type()) != MEDIATYPE_Audio) ||   // we only output audio
        !(pmt->IsFixedSize()))                  // in fixed size samples
    {                                                  
        return E_INVALIDARG;
    }

    // Check for the subtypes we support
    const GUID *SubType = pmt->Subtype();
    if (SubType == NULL)
        return E_INVALIDARG;

    if(*SubType != MEDIASUBTYPE_PCM)
    {
        return E_INVALIDARG;
    }


	//if (pmt->FormatLength() != sizeof(WAVEFORMATEX)) // no compressed types allowed
	//{
	//	return E_INVALIDARG;
	//}
	//WAVEFORMATEX *pFormatIn = (WAVEFORMATEX *)pmt->Format();
	//if (pFormatIn->wFormatTag != WAVE_FORMAT_PCM)
	//{
	//	return E_INVALIDARG;
	//}

	CopyMediaType(&m_mt, pmt);

    return S_OK;  // This format is acceptable.

} // CheckMediaType
Esempio n. 21
0
void CStreamSwitcherAllocator::NotifyMediaType(const CMediaType& mt)
{
    CopyMediaType(&m_mt, &mt);
    m_fMediaTypeChanged = true;
}
Esempio n. 22
0
static HRESULT WINAPI AVIDec_SetMediaType(TransformFilter *tf, PIN_DIRECTION dir, const AM_MEDIA_TYPE * pmt)
{
    AVIDecImpl* This = (AVIDecImpl*)tf;
    HRESULT hr = VFW_E_TYPE_NOT_ACCEPTED;

    TRACE("(%p)->(%p)\n", This, pmt);

    if (dir != PINDIR_INPUT)
        return S_OK;

    /* Check root (GUID w/o FOURCC) */
    if ((IsEqualIID(&pmt->majortype, &MEDIATYPE_Video)) &&
        (!memcmp(((const char *)&pmt->subtype)+4, ((const char *)&MEDIATYPE_Video)+4, sizeof(GUID)-4)))
    {
        VIDEOINFOHEADER *format1 = (VIDEOINFOHEADER *)pmt->pbFormat;
        VIDEOINFOHEADER2 *format2 = (VIDEOINFOHEADER2 *)pmt->pbFormat;
        BITMAPINFOHEADER *bmi;

        if (IsEqualIID(&pmt->formattype, &FORMAT_VideoInfo))
            bmi = &format1->bmiHeader;
        else if (IsEqualIID(&pmt->formattype, &FORMAT_VideoInfo2))
            bmi = &format2->bmiHeader;
        else
            goto failed;
        TRACE("Fourcc: %s\n", debugstr_an((const char *)&pmt->subtype.Data1, 4));

        This->hvid = ICLocate(pmt->majortype.Data1, pmt->subtype.Data1, bmi, NULL, ICMODE_DECOMPRESS);
        if (This->hvid)
        {
            AM_MEDIA_TYPE* outpmt = &This->tf.pmt;
            const CLSID* outsubtype;
            DWORD bih_size;
            DWORD output_depth = bmi->biBitCount;
            DWORD result;
            FreeMediaType(outpmt);

            switch(bmi->biBitCount)
            {
                case 32: outsubtype = &MEDIASUBTYPE_RGB32; break;
                case 24: outsubtype = &MEDIASUBTYPE_RGB24; break;
                case 16: outsubtype = &MEDIASUBTYPE_RGB565; break;
                case 8:  outsubtype = &MEDIASUBTYPE_RGB8; break;
                default:
                    WARN("Non standard input depth %d, forced output depth to 32\n", bmi->biBitCount);
                    outsubtype = &MEDIASUBTYPE_RGB32;
                    output_depth = 32;
                    break;
            }

            /* Copy bitmap header from media type to 1 for input and 1 for output */
            bih_size = bmi->biSize + bmi->biClrUsed * 4;
            This->pBihIn = CoTaskMemAlloc(bih_size);
            if (!This->pBihIn)
            {
                hr = E_OUTOFMEMORY;
                goto failed;
            }
            This->pBihOut = CoTaskMemAlloc(bih_size);
            if (!This->pBihOut)
            {
                hr = E_OUTOFMEMORY;
                goto failed;
            }
            memcpy(This->pBihIn, bmi, bih_size);
            memcpy(This->pBihOut, bmi, bih_size);

            /* Update output format as non compressed bitmap */
            This->pBihOut->biCompression = 0;
            This->pBihOut->biBitCount = output_depth;
            This->pBihOut->biSizeImage = This->pBihOut->biWidth * This->pBihOut->biHeight * This->pBihOut->biBitCount / 8;
            TRACE("Size: %u\n", This->pBihIn->biSize);
            result = ICDecompressQuery(This->hvid, This->pBihIn, This->pBihOut);
            if (result != ICERR_OK)
            {
                ERR("Unable to found a suitable output format (%d)\n", result);
                goto failed;
            }

            /* Update output media type */
            CopyMediaType(outpmt, pmt);
            outpmt->subtype = *outsubtype;

            if (IsEqualIID(&pmt->formattype, &FORMAT_VideoInfo))
                memcpy(&(((VIDEOINFOHEADER *)outpmt->pbFormat)->bmiHeader), This->pBihOut, This->pBihOut->biSize);
            else if (IsEqualIID(&pmt->formattype, &FORMAT_VideoInfo2))
                memcpy(&(((VIDEOINFOHEADER2 *)outpmt->pbFormat)->bmiHeader), This->pBihOut, This->pBihOut->biSize);
            else
                assert(0);

            TRACE("Connection accepted\n");
            return S_OK;
        }
        TRACE("Unable to find a suitable VFW decompressor\n");
    }

failed:

    TRACE("Connection refused\n");
    return hr;
}
Esempio n. 23
0
CMediaType::CMediaType(const CMediaType& rt)
{
    CopyMediaType(this, &rt);
}
Esempio n. 24
0
CMediaType::CMediaType(const AM_MEDIA_TYPE& rt)
{
    CopyMediaType(this, &rt);
}
Esempio n. 25
0
static HRESULT WINAPI QTInPin_ReceiveConnection(IPin *iface, IPin *pReceivePin, const AM_MEDIA_TYPE *pmt)
{
    HRESULT hr = S_OK;
    ALLOCATOR_PROPERTIES props;
    QTInPin *This = impl_from_IPin(iface);

    TRACE("(%p/%p)->(%p, %p)\n", This, iface, pReceivePin, pmt);

    EnterCriticalSection(This->pin.pCritSec);
    This->pReader = NULL;

    if (This->pin.pConnectedTo)
        hr = VFW_E_ALREADY_CONNECTED;
    else if (IPin_QueryAccept(iface, pmt) != S_OK)
        hr = VFW_E_TYPE_NOT_ACCEPTED;
    else
    {
        PIN_DIRECTION pindirReceive;
        IPin_QueryDirection(pReceivePin, &pindirReceive);
        if (pindirReceive != PINDIR_OUTPUT)
            hr = VFW_E_INVALID_DIRECTION;
    }

    if (FAILED(hr))
    {
        LeaveCriticalSection(This->pin.pCritSec);
        return hr;
    }

    hr = IPin_QueryInterface(pReceivePin, &IID_IAsyncReader, (LPVOID *)&This->pReader);
    if (FAILED(hr))
    {
        LeaveCriticalSection(This->pin.pCritSec);
        TRACE("Input source is not an AsyncReader\n");
        return hr;
    }

    LeaveCriticalSection(This->pin.pCritSec);
    EnterCriticalSection(&impl_from_IBaseFilter(This->pin.pinInfo.pFilter)->filter.csFilter);
    hr = QT_Process_Movie(impl_from_IBaseFilter(This->pin.pinInfo.pFilter));
    if (FAILED(hr))
    {
        LeaveCriticalSection(&impl_from_IBaseFilter(This->pin.pinInfo.pFilter)->filter.csFilter);
        TRACE("Unable to process movie\n");
        return hr;
    }

    This->pAlloc = NULL;
    props.cBuffers = 8;
    props.cbAlign = 1;
    props.cbBuffer = impl_from_IBaseFilter(This->pin.pinInfo.pFilter)->outputSize + props.cbAlign;
    props.cbPrefix = 0;

    hr = IAsyncReader_RequestAllocator(This->pReader, NULL, &props, &This->pAlloc);
    if (SUCCEEDED(hr))
    {
        CopyMediaType(&This->pin.mtCurrent, pmt);
        This->pin.pConnectedTo = pReceivePin;
        IPin_AddRef(pReceivePin);
        hr = IMemAllocator_Commit(This->pAlloc);
    }
    else
    {
        QT_RemoveOutputPins(impl_from_IBaseFilter(This->pin.pinInfo.pFilter));
        if (This->pReader)
            IAsyncReader_Release(This->pReader);
        This->pReader = NULL;
        if (This->pAlloc)
            IMemAllocator_Release(This->pAlloc);
        This->pAlloc = NULL;
    }
    TRACE("Size: %i\n", props.cbBuffer);
    LeaveCriticalSection(&impl_from_IBaseFilter(This->pin.pinInfo.pFilter)->filter.csFilter);

    return hr;
}
Esempio n. 26
0
static HRESULT WINAPI DSoundRender_Receive(BaseInputPin *pin, IMediaSample * pSample)
{
    DSoundRenderImpl *This = (DSoundRenderImpl*)pin->pin.pinInfo.pFilter;
    LPBYTE pbSrcStream = NULL;
    LONG cbSrcStream = 0;
    REFERENCE_TIME tStart, tStop;
    HRESULT hr;
    AM_MEDIA_TYPE *amt;

    TRACE("%p %p\n", pin, pSample);

    /* Slightly incorrect, Pause completes when a frame is received so we should signal
     * pause completion here, but for sound playing a single frame doesn't make sense
     */

    EnterCriticalSection(&This->filter.csFilter);

    if (This->pInputPin->end_of_stream || This->pInputPin->flushing)
    {
        LeaveCriticalSection(&This->filter.csFilter);
        return S_FALSE;
    }

    if (This->filter.state == State_Stopped)
    {
        LeaveCriticalSection(&This->filter.csFilter);
        return VFW_E_WRONG_STATE;
    }

    if (IMediaSample_GetMediaType(pSample, &amt) == S_OK)
    {
        AM_MEDIA_TYPE *orig = &This->pInputPin->pin.mtCurrent;
        WAVEFORMATEX *origfmt = (WAVEFORMATEX *)orig->pbFormat;
        WAVEFORMATEX *newfmt = (WAVEFORMATEX *)amt->pbFormat;

        if (origfmt->wFormatTag == newfmt->wFormatTag &&
            origfmt->nChannels == newfmt->nChannels &&
            origfmt->nBlockAlign == newfmt->nBlockAlign &&
            origfmt->wBitsPerSample == newfmt->wBitsPerSample &&
            origfmt->cbSize ==  newfmt->cbSize)
        {
            if (origfmt->nSamplesPerSec != newfmt->nSamplesPerSec)
            {
                hr = IDirectSoundBuffer_SetFrequency(This->dsbuffer,
                                                     newfmt->nSamplesPerSec);
                if (FAILED(hr))
                {
                    LeaveCriticalSection(&This->filter.csFilter);
                    return VFW_E_TYPE_NOT_ACCEPTED;
                }
                FreeMediaType(orig);
                CopyMediaType(orig, amt);
                IMediaSample_SetMediaType(pSample, NULL);
            }
        }
        else
        {
            LeaveCriticalSection(&This->filter.csFilter);
            return VFW_E_TYPE_NOT_ACCEPTED;
        }
    }

    hr = IMediaSample_GetPointer(pSample, &pbSrcStream);
    if (FAILED(hr))
    {
        ERR("Cannot get pointer to sample data (%x)\n", hr);
        LeaveCriticalSection(&This->filter.csFilter);
        return hr;
    }

    hr = IMediaSample_GetTime(pSample, &tStart, &tStop);
    if (FAILED(hr))
        ERR("Cannot get sample time (%x)\n", hr);
    else
        MediaSeekingPassThru_RegisterMediaTime(This->seekthru_unk, tStart);

    if (This->rtLastStop != tStart && (IMediaSample_IsDiscontinuity(pSample) == S_FALSE))
        WARN("Unexpected discontinuity: Last: %u.%03u, tStart: %u.%03u\n",
            (DWORD)(This->rtLastStop / 10000000), (DWORD)((This->rtLastStop / 10000)%1000),
            (DWORD)(tStart / 10000000), (DWORD)((tStart / 10000)%1000));
    This->rtLastStop = tStop;

    if (IMediaSample_IsPreroll(pSample) == S_OK)
    {
        TRACE("Preroll!\n");
        LeaveCriticalSection(&This->filter.csFilter);
        return S_OK;
    }

    if (This->filter.state == State_Paused)
    {
        SetEvent(This->state_change);
        LeaveCriticalSection(&This->filter.csFilter);
        WaitForSingleObject(This->blocked, INFINITE);
        EnterCriticalSection(&This->filter.csFilter);
        if (This->filter.state == State_Stopped)
        {
            LeaveCriticalSection(&This->filter.csFilter);
            return VFW_E_WRONG_STATE;
        }

        if (This->filter.state == State_Paused)
        {
            /* Assuming we return because of flushing */
            TRACE("Flushing\n");
            LeaveCriticalSection(&This->filter.csFilter);
            return S_OK;
        }
        SetEvent(This->state_change);
    }

    cbSrcStream = IMediaSample_GetActualDataLength(pSample);
    TRACE("Sample data ptr = %p, size = %d\n", pbSrcStream, cbSrcStream);

#if 0 /* For debugging purpose */
    {
        int i;
        for(i = 0; i < cbSrcStream; i++)
        {
	    if ((i!=0) && !(i%16))
                TRACE("\n");
            TRACE("%02x ", pbSrcStream[i]);
        }
        TRACE("\n");
    }
#endif

    hr = DSoundRender_SendSampleData(This, pbSrcStream, cbSrcStream);
    SetEvent(This->state_change);
    LeaveCriticalSection(&This->filter.csFilter);
    return hr;
}
Esempio n. 27
0
static HRESULT WINAPI DSoundRender_InputPin_ReceiveConnection(IPin * iface, IPin * pReceivePin, const AM_MEDIA_TYPE * pmt)
{
    BaseInputPin *This = (BaseInputPin *)iface;
    PIN_DIRECTION pindirReceive;
    DSoundRenderImpl *DSImpl;
    HRESULT hr = S_OK;

    TRACE("(%p)->(%p, %p)\n", This, pReceivePin, pmt);
    dump_AM_MEDIA_TYPE(pmt);

    EnterCriticalSection(This->pin.pCritSec);
    {
        DSImpl = (DSoundRenderImpl*)This->pin.pinInfo.pFilter;
        DSImpl->rtLastStop = -1;

        if (This->pin.pConnectedTo)
            hr = VFW_E_ALREADY_CONNECTED;

        if (SUCCEEDED(hr) && This->pin.pFuncsTable->pfnCheckMediaType((BasePin*)This, pmt) != S_OK)
            hr = VFW_E_TYPE_NOT_ACCEPTED;

        if (SUCCEEDED(hr))
        {
            IPin_QueryDirection(pReceivePin, &pindirReceive);

            if (pindirReceive != PINDIR_OUTPUT)
            {
                ERR("Can't connect from non-output pin\n");
                hr = VFW_E_INVALID_DIRECTION;
            }
        }

        if (SUCCEEDED(hr))
        {
            WAVEFORMATEX *format;
            DSBUFFERDESC buf_desc;

            TRACE("MajorType %s\n", debugstr_guid(&pmt->majortype));
            TRACE("SubType %s\n", debugstr_guid(&pmt->subtype));
            TRACE("Format %s\n", debugstr_guid(&pmt->formattype));
            TRACE("Size %d\n", pmt->cbFormat);

            format = (WAVEFORMATEX*)pmt->pbFormat;

            DSImpl->buf_size = format->nAvgBytesPerSec;

            memset(&buf_desc,0,sizeof(DSBUFFERDESC));
            buf_desc.dwSize = sizeof(DSBUFFERDESC);
            buf_desc.dwFlags = DSBCAPS_CTRLVOLUME | DSBCAPS_CTRLPAN |
                               DSBCAPS_CTRLFREQUENCY |
                               DSBCAPS_GETCURRENTPOSITION2;
            buf_desc.dwBufferBytes = DSImpl->buf_size;
            buf_desc.lpwfxFormat = format;
            hr = IDirectSound_CreateSoundBuffer(DSImpl->dsound, &buf_desc, &DSImpl->dsbuffer, NULL);
            if (FAILED(hr))
                ERR("Can't create sound buffer (%x)\n", hr);
        }

        if (SUCCEEDED(hr))
        {
            hr = IDirectSoundBuffer_SetVolume(DSImpl->dsbuffer, DSImpl->volume);
            if (FAILED(hr))
                ERR("Can't set volume to %d (%x)\n", DSImpl->volume, hr);

            hr = IDirectSoundBuffer_SetPan(DSImpl->dsbuffer, DSImpl->pan);
            if (FAILED(hr))
                ERR("Can't set pan to %d (%x)\n", DSImpl->pan, hr);

            DSImpl->write_pos = 0;
            hr = S_OK;
        }

        if (SUCCEEDED(hr))
        {
            CopyMediaType(&This->pin.mtCurrent, pmt);
            This->pin.pConnectedTo = pReceivePin;
            IPin_AddRef(pReceivePin);
        }
        else if (hr != VFW_E_ALREADY_CONNECTED)
        {
            if (DSImpl->dsbuffer)
                IDirectSoundBuffer_Release(DSImpl->dsbuffer);
            DSImpl->dsbuffer = NULL;
        }
    }
    LeaveCriticalSection(This->pin.pCritSec);

    return hr;
}
Esempio n. 28
0
static HRESULT WINAPI Gstreamer_YUV_SetMediaType(TransformFilter *tf, PIN_DIRECTION dir,  const AM_MEDIA_TYPE *amt)
{
    GstTfImpl *This = (GstTfImpl*)tf;
    GstCaps *capsin, *capsout;
    AM_MEDIA_TYPE *outpmt = &This->tf.pmt;
    HRESULT hr;
    int avgtime;
    LONG width, height;

    TRACE("%p 0x%x %p\n", This, dir, amt);

    mark_wine_thread();

    if (dir != PINDIR_INPUT)
        return S_OK;

    if (Gstreamer_YUV_QueryConnect(&This->tf, amt) == S_FALSE || !amt->pbFormat)
        return E_FAIL;

    FreeMediaType(outpmt);
    CopyMediaType(outpmt, amt);

    if (IsEqualGUID(&amt->formattype, &FORMAT_VideoInfo)) {
        VIDEOINFOHEADER *vih = (VIDEOINFOHEADER*)outpmt->pbFormat;
        avgtime = vih->AvgTimePerFrame;
        width = vih->bmiHeader.biWidth;
        height = vih->bmiHeader.biHeight;
        if (vih->bmiHeader.biHeight > 0)
            vih->bmiHeader.biHeight = -vih->bmiHeader.biHeight;
        vih->bmiHeader.biBitCount = 24;
        vih->bmiHeader.biCompression = BI_RGB;
        vih->bmiHeader.biSizeImage = width * abs(height) * 3;
    } else {
        VIDEOINFOHEADER2 *vih = (VIDEOINFOHEADER2*)outpmt->pbFormat;
        avgtime = vih->AvgTimePerFrame;
        width = vih->bmiHeader.biWidth;
        height = vih->bmiHeader.biHeight;
        if (vih->bmiHeader.biHeight > 0)
            vih->bmiHeader.biHeight = -vih->bmiHeader.biHeight;
        vih->bmiHeader.biBitCount = 24;
        vih->bmiHeader.biCompression = BI_RGB;
        vih->bmiHeader.biSizeImage = width * abs(height) * 3;
    }
    if (!avgtime)
        avgtime = 10000000 / 30;

    outpmt->subtype = MEDIASUBTYPE_RGB24;

    capsin = gst_caps_new_simple("video/x-raw",
                                 "format", G_TYPE_STRING,
                                   gst_video_format_to_string(
                                     gst_video_format_from_fourcc(amt->subtype.Data1)),
                                 "width", G_TYPE_INT, width,
                                 "height", G_TYPE_INT, height,
                                 "framerate", GST_TYPE_FRACTION, 10000000, avgtime,
                                 NULL);
    capsout = gst_caps_new_simple("video/x-raw",
                                  "format", G_TYPE_STRING, "BGR",
                                  "width", G_TYPE_INT, width,
                                  "height", G_TYPE_INT, height,
                                  "framerate", GST_TYPE_FRACTION, 10000000, avgtime,
                                   NULL);

    hr = Gstreamer_transform_ConnectInput(This, amt, capsin, capsout);
    gst_caps_unref(capsin);
    gst_caps_unref(capsout);

    This->cbBuffer = width * height * 4;
    return hr;
}
Esempio n. 29
0
static HRESULT WINAPI Gstreamer_Mp3_SetMediaType(TransformFilter *tf, PIN_DIRECTION dir, const AM_MEDIA_TYPE *amt)
{
    GstTfImpl *This = (GstTfImpl*)tf;
    GstCaps *capsin, *capsout;
    AM_MEDIA_TYPE *outpmt = &This->tf.pmt;
    WAVEFORMATEX *wfx, *wfxin;
    HRESULT hr;
    int layer;

    TRACE("%p 0x%x %p\n", This, dir, amt);

    mark_wine_thread();

    if (dir != PINDIR_INPUT)
        return S_OK;

    if (Gstreamer_Mp3_QueryConnect(&This->tf, amt) == S_FALSE || !amt->pbFormat)
        return VFW_E_TYPE_NOT_ACCEPTED;

    wfxin = (WAVEFORMATEX*)amt->pbFormat;
    switch (wfxin->wFormatTag) {
        case WAVE_FORMAT_MPEGLAYER3:
            layer = 3;
            break;
        case WAVE_FORMAT_MPEG: {
            MPEG1WAVEFORMAT *mpgformat = (MPEG1WAVEFORMAT*)wfxin;
            layer = mpgformat->fwHeadLayer;
            break;
        }
        default:
            FIXME("Unhandled tag %x\n", wfxin->wFormatTag);
            return E_FAIL;
    }

    FreeMediaType(outpmt);
    CopyMediaType(outpmt, amt);

    outpmt->subtype = MEDIASUBTYPE_PCM;
    outpmt->formattype = FORMAT_WaveFormatEx;
    outpmt->cbFormat = sizeof(*wfx);
    CoTaskMemFree(outpmt->pbFormat);
    wfx = CoTaskMemAlloc(outpmt->cbFormat);
    outpmt->pbFormat = (BYTE*)wfx;
    wfx->wFormatTag = WAVE_FORMAT_PCM;
    wfx->wBitsPerSample = 16;
    wfx->nSamplesPerSec = wfxin->nSamplesPerSec;
    wfx->nChannels = wfxin->nChannels;
    wfx->nBlockAlign = wfx->wBitsPerSample * wfx->nChannels / 8;
    wfx->cbSize = 0;
    wfx->nAvgBytesPerSec = wfx->nSamplesPerSec * wfx->nBlockAlign;

    capsin = gst_caps_new_simple("audio/mpeg",
                                 "mpegversion", G_TYPE_INT, 1,
                                 "layer", G_TYPE_INT, layer,
                                 "rate", G_TYPE_INT, wfx->nSamplesPerSec,
                                 "channels", G_TYPE_INT, wfx->nChannels,
                                 NULL);
    capsout = gst_caps_new_simple("audio/x-raw",
                                  "format", G_TYPE_STRING, "S16LE"
                                  "rate", G_TYPE_INT, wfx->nSamplesPerSec,
                                  "channels", G_TYPE_INT, wfx->nChannels,
                                   NULL);

    hr = Gstreamer_transform_ConnectInput(This, amt, capsin, capsout);
    gst_caps_unref(capsin);
    gst_caps_unref(capsout);

    This->cbBuffer = wfx->nAvgBytesPerSec / 4;

    return hr;
}
Esempio n. 30
0
static HRESULT WINAPI Gstreamer_YUV_SetMediaType(TransformFilter *tf, PIN_DIRECTION dir,  const AM_MEDIA_TYPE *amt) {
    GstTfImpl *This = (GstTfImpl*)tf;
    GstCaps *capsin, *capsout;
    AM_MEDIA_TYPE *outpmt = &This->tf.pmt;
    HRESULT hr;
    int avgtime;
    DWORD width, height;

    if (dir != PINDIR_INPUT)
        return S_OK;

    if (Gstreamer_YUV_QueryConnect(&This->tf, amt) == S_FALSE || !amt->pbFormat)
        return E_FAIL;

    FreeMediaType(outpmt);
    CopyMediaType(outpmt, amt);

    if (IsEqualGUID(&amt->formattype, &FORMAT_VideoInfo)) {
        VIDEOINFOHEADER *vih = (VIDEOINFOHEADER*)outpmt->pbFormat;
        avgtime = vih->AvgTimePerFrame;
        width = vih->bmiHeader.biWidth;
        height = vih->bmiHeader.biHeight;
        if ((LONG)vih->bmiHeader.biHeight > 0)
            vih->bmiHeader.biHeight = -vih->bmiHeader.biHeight;
        vih->bmiHeader.biBitCount = 24;
        vih->bmiHeader.biCompression = BI_RGB;
    } else {
        VIDEOINFOHEADER2 *vih = (VIDEOINFOHEADER2*)outpmt->pbFormat;
        avgtime = vih->AvgTimePerFrame;
        width = vih->bmiHeader.biWidth;
        height = vih->bmiHeader.biHeight;
        if ((LONG)vih->bmiHeader.biHeight > 0)
            vih->bmiHeader.biHeight = -vih->bmiHeader.biHeight;
        vih->bmiHeader.biBitCount = 24;
        vih->bmiHeader.biCompression = BI_RGB;
    }
    if (!avgtime)
        avgtime = 10000000 / 30;

    outpmt->subtype = MEDIASUBTYPE_RGB24;

    capsin = gst_caps_new_simple("video/x-raw-yuv",
                                 "format", GST_TYPE_FOURCC, amt->subtype.Data1,
                                 "width", G_TYPE_INT, width,
                                 "height", G_TYPE_INT, height,
                                 "framerate", GST_TYPE_FRACTION, 10000000, avgtime,
                                 NULL);
    capsout = gst_caps_new_simple("video/x-raw-rgb",
                                  "endianness", G_TYPE_INT, 4321,
                                  "width", G_TYPE_INT, width,
                                  "height", G_TYPE_INT, height,
                                  "framerate", GST_TYPE_FRACTION, 10000000, avgtime,
                                  "bpp", G_TYPE_INT, 24,
                                  "depth", G_TYPE_INT, 24,
                                  "red_mask", G_TYPE_INT, 0xff,
                                  "green_mask", G_TYPE_INT, 0xff00,
                                  "blue_mask", G_TYPE_INT, 0xff0000,
                                   NULL);

    hr = Gstreamer_transform_ConnectInput(This, amt, capsin, capsout);
    gst_caps_unref(capsin);
    gst_caps_unref(capsout);

    This->cbBuffer = width * height * 4;
    return hr;
}