static HRESULT WINAPI Gstreamer_YUV_SetMediaType(TransformFilter *tf, PIN_DIRECTION dir, const AM_MEDIA_TYPE *amt) { GstTfImpl *This = (GstTfImpl*)tf; GstCaps *capsin, *capsout; AM_MEDIA_TYPE *outpmt = &This->tf.pmt; HRESULT hr; int avgtime; DWORD width, height; if (dir != PINDIR_INPUT) return S_OK; if (Gstreamer_YUV_QueryConnect(&This->tf, amt) == S_FALSE || !amt->pbFormat) return E_FAIL; FreeMediaType(outpmt); CopyMediaType(outpmt, amt); if (IsEqualGUID(&amt->formattype, &FORMAT_VideoInfo)) { VIDEOINFOHEADER *vih = (VIDEOINFOHEADER*)outpmt->pbFormat; avgtime = vih->AvgTimePerFrame; width = vih->bmiHeader.biWidth; height = vih->bmiHeader.biHeight; if ((LONG)vih->bmiHeader.biHeight > 0) vih->bmiHeader.biHeight = -vih->bmiHeader.biHeight; vih->bmiHeader.biBitCount = 24; vih->bmiHeader.biCompression = BI_RGB; } else { VIDEOINFOHEADER2 *vih = (VIDEOINFOHEADER2*)outpmt->pbFormat; avgtime = vih->AvgTimePerFrame; width = vih->bmiHeader.biWidth; height = vih->bmiHeader.biHeight; if ((LONG)vih->bmiHeader.biHeight > 0) vih->bmiHeader.biHeight = -vih->bmiHeader.biHeight; vih->bmiHeader.biBitCount = 24; vih->bmiHeader.biCompression = BI_RGB; } if (!avgtime) avgtime = 10000000 / 30; outpmt->subtype = MEDIASUBTYPE_RGB24; capsin = gst_caps_new_simple("video/x-raw-yuv", "format", GST_TYPE_FOURCC, amt->subtype.Data1, "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, "framerate", GST_TYPE_FRACTION, 10000000, avgtime, NULL); capsout = gst_caps_new_simple("video/x-raw-rgb", "endianness", G_TYPE_INT, 4321, "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, "framerate", GST_TYPE_FRACTION, 10000000, avgtime, "bpp", G_TYPE_INT, 24, "depth", G_TYPE_INT, 24, "red_mask", G_TYPE_INT, 0xff, "green_mask", G_TYPE_INT, 0xff00, "blue_mask", G_TYPE_INT, 0xff0000, NULL); hr = Gstreamer_transform_ConnectInput(This, amt, capsin, capsout); gst_caps_unref(capsin); gst_caps_unref(capsout); This->cbBuffer = width * height * 4; return hr; }
static HRESULT WINAPI Gstreamer_Mp3_SetMediaType(TransformFilter *tf, PIN_DIRECTION dir, const AM_MEDIA_TYPE *amt) { GstTfImpl *This = (GstTfImpl*)tf; GstCaps *capsin, *capsout; AM_MEDIA_TYPE *outpmt = &This->tf.pmt; WAVEFORMATEX *wfx, *wfxin; HRESULT hr; int layer; if (dir != PINDIR_INPUT) return S_OK; if (Gstreamer_Mp3_QueryConnect(&This->tf, amt) == S_FALSE || !amt->pbFormat) return VFW_E_TYPE_NOT_ACCEPTED; wfxin = (WAVEFORMATEX*)amt->pbFormat; switch (wfxin->wFormatTag) { case WAVE_FORMAT_MPEGLAYER3: layer = 3; break; case WAVE_FORMAT_MPEG: { MPEG1WAVEFORMAT *mpgformat = (MPEG1WAVEFORMAT*)wfxin; layer = mpgformat->fwHeadLayer; break; } default: FIXME("Unhandled tag %x\n", wfxin->wFormatTag); return E_FAIL; } FreeMediaType(outpmt); CopyMediaType(outpmt, amt); outpmt->subtype = MEDIASUBTYPE_PCM; outpmt->formattype = FORMAT_WaveFormatEx; outpmt->cbFormat = sizeof(*wfx); CoTaskMemFree(outpmt->pbFormat); wfx = CoTaskMemAlloc(outpmt->cbFormat); outpmt->pbFormat = (BYTE*)wfx; wfx->wFormatTag = WAVE_FORMAT_PCM; wfx->wBitsPerSample = 16; wfx->nSamplesPerSec = wfxin->nSamplesPerSec; wfx->nChannels = wfxin->nChannels; wfx->nBlockAlign = wfx->wBitsPerSample * wfx->nChannels / 8; wfx->cbSize = 0; wfx->nAvgBytesPerSec = wfx->nSamplesPerSec * wfx->nBlockAlign; capsin = gst_caps_new_simple("audio/mpeg", "mpegversion", G_TYPE_INT, 1, "layer", G_TYPE_INT, layer, "rate", G_TYPE_INT, wfx->nSamplesPerSec, "channels", G_TYPE_INT, wfx->nChannels, NULL); capsout = gst_caps_new_simple("audio/x-raw-int", "endianness", G_TYPE_INT, 1234, "signed", G_TYPE_BOOLEAN, 1, "width", G_TYPE_INT, 16, "depth", G_TYPE_INT, 16, "rate", G_TYPE_INT, wfx->nSamplesPerSec, "channels", G_TYPE_INT, wfx->nChannels, NULL); hr = Gstreamer_transform_ConnectInput(This, amt, capsin, capsout); gst_caps_unref(capsin); gst_caps_unref(capsout); This->cbBuffer = wfx->nAvgBytesPerSec / 4; return hr; }
static HRESULT WINAPI Gstreamer_AudioConvert_SetMediaType(TransformFilter *tf, PIN_DIRECTION dir, const AM_MEDIA_TYPE *amt) { GstTfImpl *This = (GstTfImpl*)tf; GstCaps *capsin, *capsout; AM_MEDIA_TYPE *outpmt = &This->tf.pmt; WAVEFORMATEX *inwfe; WAVEFORMATEX *outwfe; WAVEFORMATEXTENSIBLE *outwfx; GstAudioFormat format; HRESULT hr; BOOL inisfloat = FALSE; int indepth; TRACE("%p 0x%x %p\n", This, dir, amt); mark_wine_thread(); if (dir != PINDIR_INPUT) return S_OK; if (Gstreamer_AudioConvert_QueryConnect(&This->tf, amt) == S_FALSE || !amt->pbFormat) return E_FAIL; FreeMediaType(outpmt); *outpmt = *amt; outpmt->pUnk = NULL; outpmt->cbFormat = sizeof(WAVEFORMATEXTENSIBLE); outpmt->pbFormat = CoTaskMemAlloc(outpmt->cbFormat); inwfe = (WAVEFORMATEX*)amt->pbFormat; indepth = inwfe->wBitsPerSample; if (inwfe->wFormatTag == WAVE_FORMAT_EXTENSIBLE) { WAVEFORMATEXTENSIBLE *inwfx = (WAVEFORMATEXTENSIBLE*)inwfe; inisfloat = IsEqualGUID(&inwfx->SubFormat, &KSDATAFORMAT_SUBTYPE_IEEE_FLOAT); if (inwfx->Samples.wValidBitsPerSample) indepth = inwfx->Samples.wValidBitsPerSample; } else if (inwfe->wFormatTag == WAVE_FORMAT_IEEE_FLOAT) inisfloat = TRUE; if (inisfloat) format = inwfe->wBitsPerSample == 64 ? GST_AUDIO_FORMAT_F64LE : GST_AUDIO_FORMAT_F32LE; else format = gst_audio_format_build_integer(inwfe->wBitsPerSample != 8, G_LITTLE_ENDIAN, inwfe->wBitsPerSample, indepth); capsin = gst_caps_new_simple("audio/x-raw", "format", G_TYPE_STRING, gst_audio_format_to_string(format), "channels", G_TYPE_INT, inwfe->nChannels, "rate", G_TYPE_INT, inwfe->nSamplesPerSec, NULL); outwfe = (WAVEFORMATEX*)outpmt->pbFormat; outwfx = (WAVEFORMATEXTENSIBLE*)outwfe; outwfe->wFormatTag = WAVE_FORMAT_EXTENSIBLE; outwfe->nChannels = 2; outwfe->nSamplesPerSec = inwfe->nSamplesPerSec; outwfe->wBitsPerSample = 16; outwfe->nBlockAlign = outwfe->nChannels * outwfe->wBitsPerSample / 8; outwfe->nAvgBytesPerSec = outwfe->nBlockAlign * outwfe->nSamplesPerSec; outwfe->cbSize = sizeof(*outwfx) - sizeof(*outwfe); outwfx->Samples.wValidBitsPerSample = outwfe->wBitsPerSample; outwfx->dwChannelMask = SPEAKER_FRONT_LEFT|SPEAKER_FRONT_RIGHT; outwfx->SubFormat = KSDATAFORMAT_SUBTYPE_PCM; capsout = gst_caps_new_simple("audio/x-raw", "format", G_TYPE_STRING, "S16LE", "channels", G_TYPE_INT, outwfe->nChannels, "rate", G_TYPE_INT, outwfe->nSamplesPerSec, NULL); hr = Gstreamer_transform_ConnectInput(This, amt, capsin, capsout); gst_caps_unref(capsin); gst_caps_unref(capsout); This->cbBuffer = inwfe->nAvgBytesPerSec; return hr; }
static HRESULT WINAPI Gstreamer_YUV2ARGB_SetMediaType(TransformFilter *tf, PIN_DIRECTION dir, const AM_MEDIA_TYPE *amt) { GstTfImpl *This = (GstTfImpl*)tf; GstCaps *capsin, *capsout; AM_MEDIA_TYPE *outpmt = &This->tf.pmt; HRESULT hr; int avgtime; LONG width, height; TRACE("%p 0x%x %p\n", This, dir, amt); mark_wine_thread(); if (dir != PINDIR_INPUT) return S_OK; if (Gstreamer_YUV_QueryConnect(&This->tf, amt) == S_FALSE || !amt->pbFormat) return E_FAIL; FreeMediaType(outpmt); CopyMediaType(outpmt, amt); if (IsEqualGUID(&amt->formattype, &FORMAT_VideoInfo)) { VIDEOINFOHEADER *vih = (VIDEOINFOHEADER*)outpmt->pbFormat; avgtime = vih->AvgTimePerFrame; width = vih->bmiHeader.biWidth; height = vih->bmiHeader.biHeight; vih->bmiHeader.biBitCount = 32; vih->bmiHeader.biCompression = BI_RGB; vih->bmiHeader.biSizeImage = width * abs(height) * 3; } else { VIDEOINFOHEADER2 *vih = (VIDEOINFOHEADER2*)outpmt->pbFormat; avgtime = vih->AvgTimePerFrame; width = vih->bmiHeader.biWidth; height = vih->bmiHeader.biHeight; vih->bmiHeader.biBitCount = 32; vih->bmiHeader.biCompression = BI_RGB; vih->bmiHeader.biSizeImage = width * abs(height) * 3; } if (!avgtime) avgtime = 10000000 / 30; outpmt->subtype = MEDIASUBTYPE_ARGB32; capsin = gst_caps_new_simple("video/x-raw", "format", G_TYPE_STRING, gst_video_format_to_string( gst_video_format_from_fourcc(amt->subtype.Data1)), "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, "framerate", GST_TYPE_FRACTION, 10000000, avgtime, NULL); capsout = gst_caps_new_simple("video/x-raw", "format", G_TYPE_STRING, "BGRA", "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, "framerate", GST_TYPE_FRACTION, 10000000, avgtime, NULL); hr = Gstreamer_transform_ConnectInput(This, amt, capsin, capsout); gst_caps_unref(capsin); gst_caps_unref(capsout); This->cbBuffer = width * height * 4; return hr; }
static HRESULT WINAPI Gstreamer_AudioConvert_SetMediaType(TransformFilter *tf, PIN_DIRECTION dir, const AM_MEDIA_TYPE *amt) { GstTfImpl *This = (GstTfImpl*)tf; GstCaps *capsin, *capsout; AM_MEDIA_TYPE *outpmt = &This->tf.pmt; WAVEFORMATEX *inwfe; WAVEFORMATEX *outwfe; WAVEFORMATEXTENSIBLE *outwfx; HRESULT hr; BOOL inisfloat = FALSE; int indepth; if (dir != PINDIR_INPUT) return S_OK; if (Gstreamer_AudioConvert_QueryConnect(&This->tf, amt) == S_FALSE || !amt->pbFormat) return E_FAIL; FreeMediaType(outpmt); *outpmt = *amt; outpmt->pUnk = NULL; outpmt->cbFormat = sizeof(WAVEFORMATEXTENSIBLE); outpmt->pbFormat = CoTaskMemAlloc(outpmt->cbFormat); inwfe = (WAVEFORMATEX*)amt->pbFormat; indepth = inwfe->wBitsPerSample; if (inwfe->wFormatTag == WAVE_FORMAT_EXTENSIBLE) { WAVEFORMATEXTENSIBLE *inwfx = (WAVEFORMATEXTENSIBLE*)inwfe; inisfloat = IsEqualGUID(&inwfx->SubFormat, &KSDATAFORMAT_SUBTYPE_IEEE_FLOAT); if (inwfx->Samples.wValidBitsPerSample) indepth = inwfx->Samples.wValidBitsPerSample; } capsin = gst_caps_new_simple(inisfloat ? "audio/x-raw-float" : "audio/x-raw-int", "endianness", G_TYPE_INT, 1234, "width", G_TYPE_INT, inwfe->wBitsPerSample, "depth", G_TYPE_INT, indepth, "channels", G_TYPE_INT, inwfe->nChannels, "rate", G_TYPE_INT, inwfe->nSamplesPerSec, NULL); outwfe = (WAVEFORMATEX*)outpmt->pbFormat; outwfx = (WAVEFORMATEXTENSIBLE*)outwfe; outwfe->wFormatTag = WAVE_FORMAT_EXTENSIBLE; outwfe->nChannels = 2; outwfe->nSamplesPerSec = inwfe->nSamplesPerSec; outwfe->wBitsPerSample = 16; outwfe->nBlockAlign = outwfe->nChannels * outwfe->wBitsPerSample / 8; outwfe->nAvgBytesPerSec = outwfe->nBlockAlign * outwfe->nSamplesPerSec; outwfe->cbSize = sizeof(*outwfx) - sizeof(*outwfe); outwfx->Samples.wValidBitsPerSample = outwfe->wBitsPerSample; outwfx->dwChannelMask = SPEAKER_FRONT_LEFT|SPEAKER_FRONT_RIGHT; outwfx->SubFormat = KSDATAFORMAT_SUBTYPE_PCM; capsout = gst_caps_new_simple("audio/x-raw-int", "endianness", G_TYPE_INT, 1234, "width", G_TYPE_INT, outwfe->wBitsPerSample, "depth", G_TYPE_INT, outwfx->Samples.wValidBitsPerSample, "channels", G_TYPE_INT, outwfe->nChannels, "rate", G_TYPE_INT, outwfe->nSamplesPerSec, NULL); hr = Gstreamer_transform_ConnectInput(This, amt, capsin, capsout); gst_caps_unref(capsin); gst_caps_unref(capsout); This->cbBuffer = inwfe->nAvgBytesPerSec; return hr; }
static HRESULT WINAPI Gstreamer_YUV_SetMediaType(TransformFilter *tf, PIN_DIRECTION dir, const AM_MEDIA_TYPE *amt) { GstTfImpl *This = (GstTfImpl*)tf; GstCaps *capsin, *capsout; AM_MEDIA_TYPE *outpmt = &This->tf.pmt; HRESULT hr; int avgtime; LONG width, height; if (dir != PINDIR_INPUT) return S_OK; if (Gstreamer_YUV_QueryConnect(&This->tf, amt) == S_FALSE || !amt->pbFormat) return E_FAIL; FreeMediaType(outpmt); CopyMediaType(outpmt, amt); if (IsEqualGUID(&amt->formattype, &FORMAT_VideoInfo)) { VIDEOINFOHEADER *vih = (VIDEOINFOHEADER*)outpmt->pbFormat; avgtime = vih->AvgTimePerFrame; width = vih->bmiHeader.biWidth; height = vih->bmiHeader.biHeight; if (vih->bmiHeader.biHeight > 0) vih->bmiHeader.biHeight = -vih->bmiHeader.biHeight; vih->bmiHeader.biBitCount = 24; vih->bmiHeader.biCompression = BI_RGB; vih->bmiHeader.biSizeImage = width * abs(height) * 3; } else { VIDEOINFOHEADER2 *vih = (VIDEOINFOHEADER2*)outpmt->pbFormat; avgtime = vih->AvgTimePerFrame; width = vih->bmiHeader.biWidth; height = vih->bmiHeader.biHeight; if (vih->bmiHeader.biHeight > 0) vih->bmiHeader.biHeight = -vih->bmiHeader.biHeight; vih->bmiHeader.biBitCount = 24; vih->bmiHeader.biCompression = BI_RGB; vih->bmiHeader.biSizeImage = width * abs(height) * 3; } if (!avgtime) avgtime = 10000000 / 30; outpmt->subtype = MEDIASUBTYPE_RGB24; capsin = gst_caps_new_simple("video/x-raw-yuv", "format", GST_TYPE_FOURCC, amt->subtype.Data1, "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, "framerate", GST_TYPE_FRACTION, 10000000, (int)avgtime, NULL); capsout = gst_caps_new_simple("video/x-raw-rgb", "bpp", G_TYPE_INT, 24, "depth", G_TYPE_INT, 24, "endianness", G_TYPE_INT, 4321, "red_mask", G_TYPE_INT, 0xff, "green_mask", G_TYPE_INT, 0xff00, "blue_mask", G_TYPE_INT, 0xff0000, "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, "framerate", GST_TYPE_FRACTION, 10000000, (int)avgtime, NULL); This->filter2 = gst_element_factory_make("videoflip", NULL); if (This->filter2) { GstIterator *it; int done = 0; g_object_set(This->filter2, "method", 5, NULL); it = gst_element_iterate_sink_pads(This->filter2); while (!done) { gpointer item; switch (gst_iterator_next(it, &item)) { case GST_ITERATOR_RESYNC: gst_iterator_resync (it); break; case GST_ITERATOR_OK: This->their_sink2 = item; case GST_ITERATOR_ERROR: case GST_ITERATOR_DONE: done = 1; break; } } gst_iterator_free(it); done = 0; it = gst_element_iterate_src_pads(This->filter2); while (!done) { gpointer item; switch (gst_iterator_next(it, &item)) { case GST_ITERATOR_RESYNC: gst_iterator_resync (it); break; case GST_ITERATOR_OK: This->their_src2 = item; case GST_ITERATOR_ERROR: case GST_ITERATOR_DONE: done = 1; break; } } gst_iterator_free(it); if (!This->their_src2 || !This->their_sink2) { if (This->their_src2) gst_object_unref(This->their_src2); if (This->their_sink2) gst_object_unref(This->their_sink2); gst_object_unref(This->filter2); This->filter2 = 0; } } hr = Gstreamer_transform_ConnectInput(This, amt, capsin, capsout); gst_caps_unref(capsin); gst_caps_unref(capsout); This->cbBuffer = width * height * 4; return hr; }