static HRESULT WINAPI Gstreamer_YUV_SetMediaType(TransformFilter *tf, PIN_DIRECTION dir, const AM_MEDIA_TYPE *amt) { GstTfImpl *This = (GstTfImpl*)tf; GstCaps *capsin, *capsout; AM_MEDIA_TYPE *outpmt = &This->tf.pmt; HRESULT hr; int avgtime; DWORD width, height; if (dir != PINDIR_INPUT) return S_OK; if (Gstreamer_YUV_QueryConnect(&This->tf, amt) == S_FALSE || !amt->pbFormat) return E_FAIL; FreeMediaType(outpmt); CopyMediaType(outpmt, amt); if (IsEqualGUID(&amt->formattype, &FORMAT_VideoInfo)) { VIDEOINFOHEADER *vih = (VIDEOINFOHEADER*)outpmt->pbFormat; avgtime = vih->AvgTimePerFrame; width = vih->bmiHeader.biWidth; height = vih->bmiHeader.biHeight; if ((LONG)vih->bmiHeader.biHeight > 0) vih->bmiHeader.biHeight = -vih->bmiHeader.biHeight; vih->bmiHeader.biBitCount = 24; vih->bmiHeader.biCompression = BI_RGB; } else { VIDEOINFOHEADER2 *vih = (VIDEOINFOHEADER2*)outpmt->pbFormat; avgtime = vih->AvgTimePerFrame; width = vih->bmiHeader.biWidth; height = vih->bmiHeader.biHeight; if ((LONG)vih->bmiHeader.biHeight > 0) vih->bmiHeader.biHeight = -vih->bmiHeader.biHeight; vih->bmiHeader.biBitCount = 24; vih->bmiHeader.biCompression = BI_RGB; } if (!avgtime) avgtime = 10000000 / 30; outpmt->subtype = MEDIASUBTYPE_RGB24; capsin = gst_caps_new_simple("video/x-raw-yuv", "format", GST_TYPE_FOURCC, amt->subtype.Data1, "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, "framerate", GST_TYPE_FRACTION, 10000000, avgtime, NULL); capsout = gst_caps_new_simple("video/x-raw-rgb", "endianness", G_TYPE_INT, 4321, "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, "framerate", GST_TYPE_FRACTION, 10000000, avgtime, "bpp", G_TYPE_INT, 24, "depth", G_TYPE_INT, 24, "red_mask", G_TYPE_INT, 0xff, "green_mask", G_TYPE_INT, 0xff00, "blue_mask", G_TYPE_INT, 0xff0000, NULL); hr = Gstreamer_transform_ConnectInput(This, amt, capsin, capsout); gst_caps_unref(capsin); gst_caps_unref(capsout); This->cbBuffer = width * height * 4; return hr; }
static HRESULT WINAPI Gstreamer_YUV_SetMediaType(TransformFilter *tf, PIN_DIRECTION dir, const AM_MEDIA_TYPE *amt) { GstTfImpl *This = (GstTfImpl*)tf; GstCaps *capsin, *capsout; AM_MEDIA_TYPE *outpmt = &This->tf.pmt; HRESULT hr; int avgtime; LONG width, height; TRACE("%p 0x%x %p\n", This, dir, amt); mark_wine_thread(); if (dir != PINDIR_INPUT) return S_OK; if (Gstreamer_YUV_QueryConnect(&This->tf, amt) == S_FALSE || !amt->pbFormat) return E_FAIL; FreeMediaType(outpmt); CopyMediaType(outpmt, amt); if (IsEqualGUID(&amt->formattype, &FORMAT_VideoInfo)) { VIDEOINFOHEADER *vih = (VIDEOINFOHEADER*)outpmt->pbFormat; avgtime = vih->AvgTimePerFrame; width = vih->bmiHeader.biWidth; height = vih->bmiHeader.biHeight; if (vih->bmiHeader.biHeight > 0) vih->bmiHeader.biHeight = -vih->bmiHeader.biHeight; vih->bmiHeader.biBitCount = 24; vih->bmiHeader.biCompression = BI_RGB; vih->bmiHeader.biSizeImage = width * abs(height) * 3; } else { VIDEOINFOHEADER2 *vih = (VIDEOINFOHEADER2*)outpmt->pbFormat; avgtime = vih->AvgTimePerFrame; width = vih->bmiHeader.biWidth; height = vih->bmiHeader.biHeight; if (vih->bmiHeader.biHeight > 0) vih->bmiHeader.biHeight = -vih->bmiHeader.biHeight; vih->bmiHeader.biBitCount = 24; vih->bmiHeader.biCompression = BI_RGB; vih->bmiHeader.biSizeImage = width * abs(height) * 3; } if (!avgtime) avgtime = 10000000 / 30; outpmt->subtype = MEDIASUBTYPE_RGB24; capsin = gst_caps_new_simple("video/x-raw", "format", G_TYPE_STRING, gst_video_format_to_string( gst_video_format_from_fourcc(amt->subtype.Data1)), "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, "framerate", GST_TYPE_FRACTION, 10000000, avgtime, NULL); capsout = gst_caps_new_simple("video/x-raw", "format", G_TYPE_STRING, "BGR", "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, "framerate", GST_TYPE_FRACTION, 10000000, avgtime, NULL); hr = Gstreamer_transform_ConnectInput(This, amt, capsin, capsout); gst_caps_unref(capsin); gst_caps_unref(capsout); This->cbBuffer = width * height * 4; return hr; }
static HRESULT WINAPI Gstreamer_YUV_SetMediaType(TransformFilter *tf, PIN_DIRECTION dir, const AM_MEDIA_TYPE *amt) { GstTfImpl *This = (GstTfImpl*)tf; GstCaps *capsin, *capsout; AM_MEDIA_TYPE *outpmt = &This->tf.pmt; HRESULT hr; int avgtime; LONG width, height; if (dir != PINDIR_INPUT) return S_OK; if (Gstreamer_YUV_QueryConnect(&This->tf, amt) == S_FALSE || !amt->pbFormat) return E_FAIL; FreeMediaType(outpmt); CopyMediaType(outpmt, amt); if (IsEqualGUID(&amt->formattype, &FORMAT_VideoInfo)) { VIDEOINFOHEADER *vih = (VIDEOINFOHEADER*)outpmt->pbFormat; avgtime = vih->AvgTimePerFrame; width = vih->bmiHeader.biWidth; height = vih->bmiHeader.biHeight; if (vih->bmiHeader.biHeight > 0) vih->bmiHeader.biHeight = -vih->bmiHeader.biHeight; vih->bmiHeader.biBitCount = 24; vih->bmiHeader.biCompression = BI_RGB; vih->bmiHeader.biSizeImage = width * abs(height) * 3; } else { VIDEOINFOHEADER2 *vih = (VIDEOINFOHEADER2*)outpmt->pbFormat; avgtime = vih->AvgTimePerFrame; width = vih->bmiHeader.biWidth; height = vih->bmiHeader.biHeight; if (vih->bmiHeader.biHeight > 0) vih->bmiHeader.biHeight = -vih->bmiHeader.biHeight; vih->bmiHeader.biBitCount = 24; vih->bmiHeader.biCompression = BI_RGB; vih->bmiHeader.biSizeImage = width * abs(height) * 3; } if (!avgtime) avgtime = 10000000 / 30; outpmt->subtype = MEDIASUBTYPE_RGB24; capsin = gst_caps_new_simple("video/x-raw-yuv", "format", GST_TYPE_FOURCC, amt->subtype.Data1, "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, "framerate", GST_TYPE_FRACTION, 10000000, (int)avgtime, NULL); capsout = gst_caps_new_simple("video/x-raw-rgb", "bpp", G_TYPE_INT, 24, "depth", G_TYPE_INT, 24, "endianness", G_TYPE_INT, 4321, "red_mask", G_TYPE_INT, 0xff, "green_mask", G_TYPE_INT, 0xff00, "blue_mask", G_TYPE_INT, 0xff0000, "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, "framerate", GST_TYPE_FRACTION, 10000000, (int)avgtime, NULL); This->filter2 = gst_element_factory_make("videoflip", NULL); if (This->filter2) { GstIterator *it; int done = 0; g_object_set(This->filter2, "method", 5, NULL); it = gst_element_iterate_sink_pads(This->filter2); while (!done) { gpointer item; switch (gst_iterator_next(it, &item)) { case GST_ITERATOR_RESYNC: gst_iterator_resync (it); break; case GST_ITERATOR_OK: This->their_sink2 = item; case GST_ITERATOR_ERROR: case GST_ITERATOR_DONE: done = 1; break; } } gst_iterator_free(it); done = 0; it = gst_element_iterate_src_pads(This->filter2); while (!done) { gpointer item; switch (gst_iterator_next(it, &item)) { case GST_ITERATOR_RESYNC: gst_iterator_resync (it); break; case GST_ITERATOR_OK: This->their_src2 = item; case GST_ITERATOR_ERROR: case GST_ITERATOR_DONE: done = 1; break; } } gst_iterator_free(it); if (!This->their_src2 || !This->their_sink2) { if (This->their_src2) gst_object_unref(This->their_src2); if (This->their_sink2) gst_object_unref(This->their_sink2); gst_object_unref(This->filter2); This->filter2 = 0; } } hr = Gstreamer_transform_ConnectInput(This, amt, capsin, capsout); gst_caps_unref(capsin); gst_caps_unref(capsout); This->cbBuffer = width * height * 4; return hr; }