static void test_AviCo(void) { IPersistPropertyBag *persist_bag; IPin *pin, *in_pin, *out_pin; IEnumPins *enum_pins; IBaseFilter *avico; PIN_INFO pin_info; HRESULT hres; static const WCHAR inputW[] = {'I','n','p','u','t',0}; static const WCHAR outputW[] = {'O','u','t','p','u','t',0}; hres = CoCreateInstance(&CLSID_AVICo, NULL, CLSCTX_INPROC_SERVER, &IID_IBaseFilter, (void**)&avico); if(hres == REGDB_E_CLASSNOTREG) { win_skip("CLSID_AVICo not restered\n"); return; } ok(hres == S_OK, "Could not create CLSID_AVICo class: %08x\n", hres); hres = IBaseFilter_QueryInterface(avico, &IID_IPin, (void**)&pin); ok(hres == E_NOINTERFACE, "QueryInterface(IID_IPin) returned: %08x\n", hres); hres = IBaseFilter_QueryInterface(avico, &IID_IPersistPropertyBag, (void**)&persist_bag); ok(hres == S_OK, "QueryInterface(IID_IPersistPropertyBag) returned: %08x\n", hres); SET_EXPECT(Read_FccHandler); hres = IPersistPropertyBag_Load(persist_bag, &PropertyBag, NULL); ok(hres == S_OK, "Load failed: %08x\n", hres); CHECK_CALLED(Read_FccHandler); IPersistPropertyBag_Release(persist_bag); hres = IBaseFilter_EnumPins(avico, &enum_pins); ok(hres == S_OK, "EnumPins failed: %08x\n", hres); hres = IEnumPins_Next(enum_pins, 1, &in_pin, NULL); ok(hres == S_OK, "Next failed: %08x\n", hres); hres = IPin_QueryPinInfo(in_pin, &pin_info); ok(hres == S_OK, "QueryPinInfo failed: %08x\n", hres); ok(pin_info.pFilter == avico, "pin_info.pFilter != avico\n"); ok(pin_info.dir == PINDIR_INPUT, "pin_info.dir = %d\n", pin_info.dir); ok(!lstrcmpW(pin_info.achName, inputW), "pin_info.achName = %s\n", wine_dbgstr_w(pin_info.achName)); hres = IEnumPins_Next(enum_pins, 1, &out_pin, NULL); ok(hres == S_OK, "Next failed: %08x\n", hres); hres = IPin_QueryPinInfo(out_pin, &pin_info); ok(hres == S_OK, "QueryPinInfo failed: %08x\n", hres); ok(pin_info.pFilter == avico, "pin_info.pFilter != avico\n"); ok(pin_info.dir == PINDIR_OUTPUT, "pin_info.dir = %d\n", pin_info.dir); ok(!lstrcmpW(pin_info.achName, outputW), "pin_info.achName = %s\n", wine_dbgstr_w(pin_info.achName)); IEnumPins_Release(enum_pins); IPin_Release(in_pin); IPin_Release(out_pin); IBaseFilter_Release(avico); }
static HRESULT WINAPI MediaDet_get_Filename(IMediaDet* iface, BSTR *pVal) { MediaDetImpl *This = impl_from_IMediaDet(iface); IFileSourceFilter *file; LPOLESTR name; HRESULT hr; TRACE("(%p)\n", This); if (!pVal) return E_POINTER; *pVal = NULL; /* MSDN says it should return E_FAIL if no file is open, but tests show otherwise. */ if (!This->source) return S_OK; hr = IBaseFilter_QueryInterface(This->source, &IID_IFileSourceFilter, (void **) &file); if (FAILED(hr)) return hr; hr = IFileSourceFilter_GetCurFile(file, &name, NULL); IFileSourceFilter_Release(file); if (FAILED(hr)) return hr; *pVal = SysAllocString(name); CoTaskMemFree(name); if (!*pVal) return E_OUTOFMEMORY; return S_OK; }
static HRESULT WINAPI QTOutPin_QueryInterface(IPin *iface, REFIID riid, void **ppv) { QTOutPin *This = impl_QTOutPin_from_IPin(iface); TRACE("(%s, %p)\n", debugstr_guid(riid), ppv); *ppv = NULL; if (IsEqualIID(riid, &IID_IUnknown)) *ppv = iface; else if (IsEqualIID(riid, &IID_IPin)) *ppv = iface; else if (IsEqualIID(riid, &IID_IMediaSeeking)) return IBaseFilter_QueryInterface(This->pin.pin.pinInfo.pFilter, &IID_IMediaSeeking, ppv); else if (IsEqualIID(riid, &IID_IQualityControl)) *ppv = &This->IQualityControl_iface; if (*ppv) { IUnknown_AddRef((IUnknown *)(*ppv)); return S_OK; } FIXME("No interface for %s!\n", debugstr_guid(riid)); return E_NOINTERFACE; }
/** * Pops up a user dialog allowing them to adjust properties for the given filter, if possible. */ void dshow_show_filter_properties(IBaseFilter *device_filter, AVFormatContext *avctx) { ISpecifyPropertyPages *property_pages = NULL; IUnknown *device_filter_iunknown = NULL; HRESULT hr; FILTER_INFO filter_info = {0}; /* a warning on this line is false positive GCC bug 53119 AFAICT */ CAUUID ca_guid = {0}; hr = IBaseFilter_QueryInterface(device_filter, &IID_ISpecifyPropertyPages, (void **)&property_pages); if (hr != S_OK) { av_log(avctx, AV_LOG_WARNING, "requested filter does not have a property page to show"); goto end; } hr = IBaseFilter_QueryFilterInfo(device_filter, &filter_info); if (hr != S_OK) { goto fail; } hr = IBaseFilter_QueryInterface(device_filter, &IID_IUnknown, (void **)&device_filter_iunknown); if (hr != S_OK) { goto fail; } hr = ISpecifyPropertyPages_GetPages(property_pages, &ca_guid); if (hr != S_OK) { goto fail; } hr = OleCreatePropertyFrame(NULL, 0, 0, filter_info.achName, 1, &device_filter_iunknown, ca_guid.cElems, ca_guid.pElems, 0, 0, NULL); if (hr != S_OK) { goto fail; } goto end; fail: av_log(avctx, AV_LOG_ERROR, "Failure showing property pages for filter"); end: if (property_pages) ISpecifyPropertyPages_Release(property_pages); if (device_filter_iunknown) IUnknown_Release(device_filter_iunknown); if (filter_info.pGraph) IFilterGraph_Release(filter_info.pGraph); if (ca_guid.pElems) CoTaskMemFree(ca_guid.pElems); }
static HRESULT WINAPI IAMMultiMediaStreamImpl_GetFilter(IAMMultiMediaStream* iface, IMediaStreamFilter** ppFilter) { IAMMultiMediaStreamImpl *This = impl_from_IAMMultiMediaStream(iface); HRESULT hr = S_OK; TRACE("(%p/%p)->(%p)\n", This, iface, ppFilter); if (!ppFilter) return E_POINTER; *ppFilter = NULL; if (This->media_stream_filter) hr = IBaseFilter_QueryInterface(This->media_stream_filter, &IID_IMediaStreamFilter, (LPVOID*)ppFilter); return hr; }
static HRESULT WINAPI fnCaptureGraphBuilder2_FindInterface(ICaptureGraphBuilder2 * iface, const GUID *pCategory, const GUID *pType, IBaseFilter *pf, REFIID riid, void **ppint) { CaptureGraphImpl *This = impl_from_ICaptureGraphBuilder2(iface); FIXME("(%p/%p)->(%s, %s, %p, %s, %p) - workaround stub!\n", This, iface, debugstr_guid(pCategory), debugstr_guid(pType), pf, debugstr_guid(riid), ppint); return IBaseFilter_QueryInterface(pf, riid, ppint); /* Looks for the specified interface on the filter, upstream and * downstream from the filter, and, optionally, only on the output * pin of the given category. */ }
static int dshow_open_device(AVFormatContext *avctx, ICreateDevEnum *devenum, enum dshowDeviceType devtype, enum dshowSourceFilterType sourcetype) { struct dshow_ctx *ctx = avctx->priv_data; IBaseFilter *device_filter = NULL; IGraphBuilder *graph = ctx->graph; IPin *device_pin = NULL; libAVPin *capture_pin = NULL; libAVFilter *capture_filter = NULL; ICaptureGraphBuilder2 *graph_builder2 = NULL; int ret = AVERROR(EIO); int r; IStream *ifile_stream = NULL; IStream *ofile_stream = NULL; IPersistStream *pers_stream = NULL; const wchar_t *filter_name[2] = { L"Audio capture filter", L"Video capture filter" }; if ( ((ctx->audio_filter_load_file) && (strlen(ctx->audio_filter_load_file)>0) && (sourcetype == AudioSourceDevice)) || ((ctx->video_filter_load_file) && (strlen(ctx->video_filter_load_file)>0) && (sourcetype == VideoSourceDevice)) ) { HRESULT hr; char *filename = NULL; if (sourcetype == AudioSourceDevice) filename = ctx->audio_filter_load_file; else filename = ctx->video_filter_load_file; hr = SHCreateStreamOnFile ((LPCSTR) filename, STGM_READ, &ifile_stream); if (S_OK != hr) { av_log(avctx, AV_LOG_ERROR, "Could not open capture filter description file.\n"); goto error; } hr = OleLoadFromStream(ifile_stream, &IID_IBaseFilter, (void **) &device_filter); if (hr != S_OK) { av_log(avctx, AV_LOG_ERROR, "Could not load capture filter from file.\n"); goto error; } if (sourcetype == AudioSourceDevice) av_log(avctx, AV_LOG_INFO, "Audio-"); else av_log(avctx, AV_LOG_INFO, "Video-"); av_log(avctx, AV_LOG_INFO, "Capture filter loaded successfully from file \"%s\".\n", filename); } else { if ((r = dshow_cycle_devices(avctx, devenum, devtype, sourcetype, &device_filter)) < 0) { ret = r; goto error; } } ctx->device_filter [devtype] = device_filter; r = IGraphBuilder_AddFilter(graph, device_filter, NULL); if (r != S_OK) { av_log(avctx, AV_LOG_ERROR, "Could not add device filter to graph.\n"); goto error; } if ((r = dshow_cycle_pins(avctx, devtype, sourcetype, device_filter, &device_pin)) < 0) { ret = r; goto error; } ctx->device_pin[devtype] = device_pin; capture_filter = libAVFilter_Create(avctx, callback, devtype); if (!capture_filter) { av_log(avctx, AV_LOG_ERROR, "Could not create grabber filter.\n"); goto error; } ctx->capture_filter[devtype] = capture_filter; if ( ((ctx->audio_filter_save_file) && (strlen(ctx->audio_filter_save_file)>0) && (sourcetype == AudioSourceDevice)) || ((ctx->video_filter_save_file) && (strlen(ctx->video_filter_save_file)>0) && (sourcetype == VideoSourceDevice)) ) { HRESULT hr; char *filename = NULL; if (sourcetype == AudioSourceDevice) filename = ctx->audio_filter_save_file; else filename = ctx->video_filter_save_file; hr = SHCreateStreamOnFile ((LPCSTR) filename, STGM_CREATE | STGM_READWRITE, &ofile_stream); if (S_OK != hr) { av_log(avctx, AV_LOG_ERROR, "Could not create capture filter description file.\n"); goto error; } hr = IBaseFilter_QueryInterface(device_filter, &IID_IPersistStream, (void **) &pers_stream); if (hr != S_OK) { av_log(avctx, AV_LOG_ERROR, "Query for IPersistStream failed.\n"); goto error; } hr = OleSaveToStream(pers_stream, ofile_stream); if (hr != S_OK) { av_log(avctx, AV_LOG_ERROR, "Could not save capture filter \n"); goto error; } hr = IStream_Commit(ofile_stream, STGC_DEFAULT); if (S_OK != hr) { av_log(avctx, AV_LOG_ERROR, "Could not commit capture filter data to file.\n"); goto error; } if (sourcetype == AudioSourceDevice) av_log(avctx, AV_LOG_INFO, "Audio-"); else av_log(avctx, AV_LOG_INFO, "Video-"); av_log(avctx, AV_LOG_INFO, "Capture filter saved successfully to file \"%s\".\n", filename); } r = IGraphBuilder_AddFilter(graph, (IBaseFilter *) capture_filter, filter_name[devtype]); if (r != S_OK) { av_log(avctx, AV_LOG_ERROR, "Could not add capture filter to graph\n"); goto error; } libAVPin_AddRef(capture_filter->pin); capture_pin = capture_filter->pin; ctx->capture_pin[devtype] = capture_pin; r = CoCreateInstance(&CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC_SERVER, &IID_ICaptureGraphBuilder2, (void **) &graph_builder2); if (r != S_OK) { av_log(avctx, AV_LOG_ERROR, "Could not create CaptureGraphBuilder2\n"); goto error; } ICaptureGraphBuilder2_SetFiltergraph(graph_builder2, graph); if (r != S_OK) { av_log(avctx, AV_LOG_ERROR, "Could not set graph for CaptureGraphBuilder2\n"); goto error; } r = ICaptureGraphBuilder2_RenderStream(graph_builder2, NULL, NULL, (IUnknown *) device_pin, NULL /* no intermediate filter */, (IBaseFilter *) capture_filter); /* connect pins, optionally insert intermediate filters like crossbar if necessary */ if (r != S_OK) { av_log(avctx, AV_LOG_ERROR, "Could not RenderStream to connect pins\n"); goto error; } r = dshow_try_setup_crossbar_options(graph_builder2, device_filter, devtype, avctx); if (r != S_OK) { av_log(avctx, AV_LOG_ERROR, "Could not setup CrossBar\n"); goto error; } ret = 0; error: if (graph_builder2 != NULL) ICaptureGraphBuilder2_Release(graph_builder2); if (pers_stream) IPersistStream_Release(pers_stream); if (ifile_stream) IStream_Release(ifile_stream); if (ofile_stream) IStream_Release(ofile_stream); return ret; }
static gboolean gst_dshowvideodec_sink_setcaps (GstPad * pad, GstCaps * caps) { gboolean ret = FALSE; HRESULT hres; GstStructure *s = gst_caps_get_structure (caps, 0); GstDshowVideoDec *vdec = (GstDshowVideoDec *) gst_pad_get_parent (pad); GstDshowVideoDecClass *klass = (GstDshowVideoDecClass *) G_OBJECT_GET_CLASS (vdec); GstBuffer *extradata = NULL; const GValue *v = NULL; gint size = 0; GstCaps *caps_out; AM_MEDIA_TYPE output_mediatype, input_mediatype; VIDEOINFOHEADER *input_vheader = NULL, *output_vheader = NULL; IPin *output_pin = NULL, *input_pin = NULL; IGstDshowInterface *gstdshowinterface = NULL; const GValue *fps; /* read data */ if (!gst_structure_get_int (s, "width", &vdec->width) || !gst_structure_get_int (s, "height", &vdec->height)) { GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION, ("error getting video width or height from caps"), (NULL)); goto end; } fps = gst_structure_get_value (s, "framerate"); if (!fps) { GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION, ("error getting video framerate from caps"), (NULL)); goto end; } vdec->fps_n = gst_value_get_fraction_numerator (fps); vdec->fps_d = gst_value_get_fraction_denominator (fps); if ((v = gst_structure_get_value (s, "codec_data"))) extradata = gst_value_get_buffer (v); /* define the input type format */ memset (&input_mediatype, 0, sizeof (AM_MEDIA_TYPE)); input_mediatype.majortype = klass->entry->input_majortype; input_mediatype.subtype = klass->entry->input_subtype; input_mediatype.bFixedSizeSamples = FALSE; input_mediatype.bTemporalCompression = TRUE; if (strstr (klass->entry->sinkcaps, "video/mpeg, mpegversion= (int) 1")) { size = sizeof (MPEG1VIDEOINFO) + (extradata ? GST_BUFFER_SIZE (extradata) - 1 : 0); input_vheader = g_malloc0 (size); input_vheader->bmiHeader.biSize = sizeof (BITMAPINFOHEADER); if (extradata) { MPEG1VIDEOINFO *mpeg_info = (MPEG1VIDEOINFO *) input_vheader; memcpy (mpeg_info->bSequenceHeader, GST_BUFFER_DATA (extradata), GST_BUFFER_SIZE (extradata)); mpeg_info->cbSequenceHeader = GST_BUFFER_SIZE (extradata); } input_mediatype.formattype = FORMAT_MPEGVideo; } else { size = sizeof (VIDEOINFOHEADER) + (extradata ? GST_BUFFER_SIZE (extradata) : 0); input_vheader = g_malloc0 (size); input_vheader->bmiHeader.biSize = sizeof (BITMAPINFOHEADER); if (extradata) { /* Codec data is appended after our header */ memcpy (((guchar *) input_vheader) + sizeof (VIDEOINFOHEADER), GST_BUFFER_DATA (extradata), GST_BUFFER_SIZE (extradata)); input_vheader->bmiHeader.biSize += GST_BUFFER_SIZE (extradata); } input_mediatype.formattype = FORMAT_VideoInfo; } input_vheader->rcSource.top = input_vheader->rcSource.left = 0; input_vheader->rcSource.right = vdec->width; input_vheader->rcSource.bottom = vdec->height; input_vheader->rcTarget = input_vheader->rcSource; input_vheader->bmiHeader.biWidth = vdec->width; input_vheader->bmiHeader.biHeight = vdec->height; input_vheader->bmiHeader.biPlanes = 1; input_vheader->bmiHeader.biBitCount = 16; input_vheader->bmiHeader.biCompression = klass->entry->format; input_vheader->bmiHeader.biSizeImage = (vdec->width * vdec->height) * (input_vheader->bmiHeader.biBitCount / 8); input_mediatype.cbFormat = size; input_mediatype.pbFormat = (BYTE *) input_vheader; input_mediatype.lSampleSize = input_vheader->bmiHeader.biSizeImage; hres = IBaseFilter_QueryInterface (vdec->srcfilter, &IID_IGstDshowInterface, (void **) &gstdshowinterface); if (hres != S_OK || !gstdshowinterface) { GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION, ("Can't get IGstDshowInterface interface from dshow fakesrc filter (error=%d)", hres), (NULL)); goto end; } /* save a reference to IGstDshowInterface to use it processing functions */ if (!vdec->gstdshowsrcfilter) { vdec->gstdshowsrcfilter = gstdshowinterface; IBaseFilter_AddRef (vdec->gstdshowsrcfilter); } IGstDshowInterface_gst_set_media_type (gstdshowinterface, &input_mediatype); IGstDshowInterface_Release (gstdshowinterface); gstdshowinterface = NULL; /* set the sample size for fakesrc filter to the output buffer size */ IGstDshowInterface_gst_set_sample_size (vdec->gstdshowsrcfilter, input_mediatype.lSampleSize); /* connect our fake src to decoder */ gst_dshow_get_pin_from_filter (vdec->srcfilter, PINDIR_OUTPUT, &output_pin); if (!output_pin) { GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION, ("Can't get output pin from our directshow fakesrc filter"), (NULL)); goto end; } gst_dshow_get_pin_from_filter (vdec->decfilter, PINDIR_INPUT, &input_pin); if (!input_pin) { GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION, ("Can't get input pin from decoder filter"), (NULL)); goto end; } hres = IFilterGraph_ConnectDirect (vdec->filtergraph, output_pin, input_pin, NULL); if (hres != S_OK) { GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION, ("Can't connect fakesrc with decoder (error=%d)", hres), (NULL)); goto end; } IPin_Release (input_pin); IPin_Release (output_pin); input_pin = NULL; output_pin = NULL; /* get decoder output video format */ if (!gst_dshowvideodec_get_filter_output_format (vdec, &klass->entry->output_subtype, &output_vheader, &size)) { GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION, ("Can't get decoder output video format"), (NULL)); goto end; } memset (&output_mediatype, 0, sizeof (AM_MEDIA_TYPE)); output_mediatype.majortype = klass->entry->output_majortype; output_mediatype.subtype = klass->entry->output_subtype; output_mediatype.bFixedSizeSamples = TRUE; output_mediatype.bTemporalCompression = FALSE; output_mediatype.lSampleSize = output_vheader->bmiHeader.biSizeImage; output_mediatype.formattype = FORMAT_VideoInfo; output_mediatype.cbFormat = size; output_mediatype.pbFormat = (char *) output_vheader; hres = IBaseFilter_QueryInterface (vdec->sinkfilter, &IID_IGstDshowInterface, (void **) &gstdshowinterface); if (hres != S_OK || !gstdshowinterface) { GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION, ("Can't get IGstDshowInterface interface from dshow fakesink filter (error=%d)", hres), (NULL)); goto end; } IGstDshowInterface_gst_set_media_type (gstdshowinterface, &output_mediatype); IGstDshowInterface_gst_set_buffer_callback (gstdshowinterface, gst_dshowvideodec_push_buffer, (byte *) vdec); IGstDshowInterface_Release (gstdshowinterface); gstdshowinterface = NULL; /* connect decoder to our fake sink */ gst_dshow_get_pin_from_filter (vdec->decfilter, PINDIR_OUTPUT, &output_pin); if (!output_pin) { GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION, ("Can't get output pin from our decoder filter"), (NULL)); goto end; } gst_dshow_get_pin_from_filter (vdec->sinkfilter, PINDIR_INPUT, &input_pin); if (!input_pin) { GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION, ("Can't get input pin from our directshow fakesink filter"), (NULL)); goto end; } hres = IFilterGraph_ConnectDirect (vdec->filtergraph, output_pin, input_pin, &output_mediatype); if (hres != S_OK) { GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION, ("Can't connect decoder with fakesink (error=%d)", hres), (NULL)); goto end; } /* negotiate output */ caps_out = gst_caps_from_string (klass->entry->srccaps); gst_caps_set_simple (caps_out, "width", G_TYPE_INT, vdec->width, "height", G_TYPE_INT, vdec->height, "framerate", GST_TYPE_FRACTION, vdec->fps_n, vdec->fps_d, NULL); if (!gst_pad_set_caps (vdec->srcpad, caps_out)) { gst_caps_unref (caps_out); GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION, ("Failed to negotiate output"), (NULL)); goto end; } gst_caps_unref (caps_out); hres = IMediaFilter_Run (vdec->mediafilter, -1); if (hres != S_OK) { GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION, ("Can't run the directshow graph (error=%d)", hres), (NULL)); goto end; } ret = TRUE; end: gst_object_unref (vdec); if (input_vheader) g_free (input_vheader); if (gstdshowinterface) IGstDshowInterface_Release (gstdshowinterface); if (input_pin) IPin_Release (input_pin); if (output_pin) IPin_Release (output_pin); return ret; }
static HRESULT GetSplitter(MediaDetImpl *This) { IFileSourceFilter *file; LPOLESTR name; AM_MEDIA_TYPE mt; GUID type[2]; IFilterMapper2 *map; IEnumMoniker *filters; IMoniker *mon; VARIANT var; GUID clsid; IBaseFilter *splitter; IEnumPins *pins; IPin *source_pin, *splitter_pin; HRESULT hr; hr = CoCreateInstance(&CLSID_FilterMapper2, NULL, CLSCTX_INPROC_SERVER, &IID_IFilterMapper2, (void **) &map); if (FAILED(hr)) return hr; hr = IBaseFilter_QueryInterface(This->source, &IID_IFileSourceFilter, (void **) &file); if (FAILED(hr)) { IFilterMapper2_Release(map); return hr; } hr = IFileSourceFilter_GetCurFile(file, &name, &mt); IFileSourceFilter_Release(file); CoTaskMemFree(name); if (FAILED(hr)) { IFilterMapper2_Release(map); return hr; } type[0] = mt.majortype; type[1] = mt.subtype; CoTaskMemFree(mt.pbFormat); hr = IFilterMapper2_EnumMatchingFilters(map, &filters, 0, TRUE, MERIT_UNLIKELY, FALSE, 1, type, NULL, NULL, FALSE, TRUE, 0, NULL, NULL, NULL); IFilterMapper2_Release(map); if (FAILED(hr)) return hr; hr = E_NOINTERFACE; while (IEnumMoniker_Next(filters, 1, &mon, NULL) == S_OK) { hr = GetFilterInfo(mon, &clsid, &var); IMoniker_Release(mon); if (FAILED(hr)) continue; hr = CoCreateInstance(&clsid, NULL, CLSCTX_INPROC_SERVER, &IID_IBaseFilter, (void **) &splitter); if (FAILED(hr)) { VariantClear(&var); continue; } hr = IGraphBuilder_AddFilter(This->graph, splitter, V_UNION(&var, bstrVal)); VariantClear(&var); This->splitter = splitter; if (FAILED(hr)) goto retry; hr = IBaseFilter_EnumPins(This->source, &pins); if (FAILED(hr)) goto retry; IEnumPins_Next(pins, 1, &source_pin, NULL); IEnumPins_Release(pins); hr = IBaseFilter_EnumPins(splitter, &pins); if (FAILED(hr)) { IPin_Release(source_pin); goto retry; } IEnumPins_Next(pins, 1, &splitter_pin, NULL); IEnumPins_Release(pins); hr = IPin_Connect(source_pin, splitter_pin, NULL); IPin_Release(source_pin); IPin_Release(splitter_pin); if (SUCCEEDED(hr)) break; retry: IBaseFilter_Release(splitter); This->splitter = NULL; } IEnumMoniker_Release(filters); if (FAILED(hr)) return hr; return S_OK; }
static gboolean gst_dshowaudiosrc_prepare (GstAudioSrc * asrc, GstRingBufferSpec * spec) { HRESULT hres; IGstDshowInterface *srcinterface = NULL; IPin *input_pin = NULL; GstDshowAudioSrc *src = GST_DSHOWAUDIOSRC (asrc); /* search the negociated caps in our caps list to get its index and the corresponding mediatype */ if (gst_caps_is_subset (spec->caps, src->caps)) { guint i = 0; gint res = -1; for (; i < gst_caps_get_size (src->caps) && res == -1; i++) { GstCaps *capstmp = gst_caps_copy_nth (src->caps, i); if (gst_caps_is_subset (spec->caps, capstmp)) { res = i; } gst_caps_unref (capstmp); } if (res != -1 && src->pins_mediatypes) { /*get the corresponding media type and build the dshow graph */ GstCapturePinMediaType *pin_mediatype = NULL; GList *type = g_list_nth (src->pins_mediatypes, res); if (type) { pin_mediatype = (GstCapturePinMediaType *) type->data; hres = IBaseFilter_QueryInterface (src->dshow_fakesink, &IID_IGstDshowInterface, (void **) &srcinterface); if (hres != S_OK || !srcinterface) { GST_CAT_ERROR (dshowaudiosrc_debug, "Can't get IGstDshowInterface interface from our dshow fakesink filter (error=%d)", hres); goto error; } IGstDshowInterface_gst_set_media_type (srcinterface, pin_mediatype->mediatype); IGstDshowInterface_gst_set_buffer_callback (srcinterface, (byte *) gst_dshowaudiosrc_push_buffer, (byte *) src); if (srcinterface) { IGstDshowInterface_Release (srcinterface); } gst_dshow_get_pin_from_filter (src->dshow_fakesink, PINDIR_INPUT, &input_pin); if (!input_pin) { GST_CAT_ERROR (dshowaudiosrc_debug, "Can't get input pin from our directshow fakesink filter"); goto error; } hres = IFilterGraph_ConnectDirect (src->filter_graph, pin_mediatype->capture_pin, input_pin, NULL); IPin_Release (input_pin); if (hres != S_OK) { GST_CAT_ERROR (dshowaudiosrc_debug, "Can't connect capture filter with fakesink filter (error=%d)", hres); goto error; } spec->segsize = spec->rate * spec->channels; spec->segtotal = 1; } } } return TRUE; error: if (srcinterface) { IGstDshowInterface_Release (srcinterface); } return FALSE; }
static HRESULT WINAPI QT_Seeking_QueryInterface(IMediaSeeking * iface, REFIID riid, LPVOID * ppv) { QTSplitter *This = impl_from_IMediaSeeking(iface); return IBaseFilter_QueryInterface(&This->filter.IBaseFilter_iface, riid, ppv); }
HRESULT WINAPI QualityControlImpl_QueryInterface(IQualityControl *iface, REFIID riid, void **ppv) { QualityControlImpl *This = (QualityControlImpl*)iface; return IBaseFilter_QueryInterface(This->self, riid, ppv); }
static HRESULT WINAPI AMFilterMiscFlags_QueryInterface(IAMFilterMiscFlags *iface, REFIID riid, void **ppv) { DSoundRenderImpl *This = impl_from_IAMFilterMiscFlags(iface); return IBaseFilter_QueryInterface(&This->renderer.filter.IBaseFilter_iface, riid, ppv); }
/* This test doesn't use the quartz filtergraph because it makes it impossible * to be certain that a thread is really one owned by the avi splitter. * A lot of the decoder filters will also have their own thread, and Windows' * filtergraph has a separate thread for start/stop/seeking requests. * By avoiding the filtergraph altogether and connecting streams directly to * the null renderer I am sure that this is not the case here. */ static void test_threads(void) { IFileSourceFilter *pfile = NULL; IBaseFilter *preader = NULL, *pavi = NULL; IEnumPins *enumpins = NULL; IPin *filepin = NULL, *avipin = NULL; HRESULT hr; int baselevel, curlevel, expected; HANDLE file = NULL; PIN_DIRECTION dir = PINDIR_OUTPUT; char buffer[13]; DWORD readbytes; FILTER_STATE state; /* We need another way of counting threads on NT4. Skip these tests (for now) */ if (!pCreateToolhelp32Snapshot || !pThread32First || !pThread32Next) { win_skip("Needed thread functions are not available (NT4)\n"); return; } /* Before doing anything (the thread count at the start differs per OS) */ baselevel = count_threads(); file = CreateFileW(wfile, GENERIC_READ, FILE_SHARE_READ|FILE_SHARE_WRITE, NULL, OPEN_EXISTING, 0, NULL); if (file == INVALID_HANDLE_VALUE) { skip("Could not read test file \"%s\", skipping test\n", afile); return; } memset(buffer, 0, 13); readbytes = 12; ReadFile(file, buffer, readbytes, &readbytes, NULL); CloseHandle(file); if (strncmp(buffer, "RIFF", 4) || strcmp(buffer + 8, "AVI ")) { skip("%s is not an avi riff file, not doing the avi splitter test\n", afile); return; } hr = IUnknown_QueryInterface(pAviSplitter, &IID_IFileSourceFilter, (void **)&pfile); ok(hr == E_NOINTERFACE, "Avi splitter returns unexpected error: %08x\n", hr); if (pfile) IFileSourceFilter_Release(pfile); pfile = NULL; hr = CoCreateInstance(&CLSID_AsyncReader, NULL, CLSCTX_INPROC_SERVER, &IID_IBaseFilter, (LPVOID*)&preader); ok(hr == S_OK, "Could not create asynchronous reader: %08x\n", hr); if (hr != S_OK) goto fail; hr = IBaseFilter_QueryInterface(preader, &IID_IFileSourceFilter, (void**)&pfile); ok(hr == S_OK, "Could not get IFileSourceFilter: %08x\n", hr); if (hr != S_OK) goto fail; hr = IUnknown_QueryInterface(pAviSplitter, &IID_IBaseFilter, (void**)&pavi); ok(hr == S_OK, "Could not get base filter: %08x\n", hr); if (hr != S_OK) goto fail; hr = IFileSourceFilter_Load(pfile, wfile, NULL); if (hr != S_OK) { trace("Could not load file\n"); goto fail; } hr = IBaseFilter_EnumPins(preader, &enumpins); ok(hr == S_OK, "No enumpins: %08x\n", hr); if (hr != S_OK) goto fail; hr = IEnumPins_Next(enumpins, 1, &filepin, NULL); ok(hr == S_OK, "No pin: %08x\n", hr); if (hr != S_OK) goto fail; IEnumPins_Release(enumpins); enumpins = NULL; hr = IBaseFilter_EnumPins(pavi, &enumpins); ok(hr == S_OK, "No enumpins: %08x\n", hr); if (hr != S_OK) goto fail; hr = IEnumPins_Next(enumpins, 1, &avipin, NULL); ok(hr == S_OK, "No pin: %08x\n", hr); if (hr != S_OK) goto fail; curlevel = count_threads(); ok(curlevel == baselevel, "The thread count should be %d not %d\n", baselevel, curlevel); hr = IPin_Connect(filepin, avipin, NULL); ok(hr == S_OK, "Could not connect: %08x\n", hr); if (hr != S_OK) goto fail; expected = 1 + baselevel; curlevel = count_threads(); ok(curlevel == expected, "The thread count should be %d not %d\n", expected, curlevel); IPin_Release(avipin); avipin = NULL; IEnumPins_Reset(enumpins); /* Windows puts the pins in the order: Outputpins - Inputpin, * wine does the reverse, just don't test it for now * Hate to admit it, but windows way makes more sense */ while (IEnumPins_Next(enumpins, 1, &avipin, NULL) == S_OK) { IPin_QueryDirection(avipin, &dir); if (dir == PINDIR_OUTPUT) { /* Well, connect it to a null renderer! */ IBaseFilter *pnull = NULL; IEnumPins *nullenum = NULL; IPin *nullpin = NULL; hr = CoCreateInstance(&CLSID_NullRenderer, NULL, CLSCTX_INPROC_SERVER, &IID_IBaseFilter, (LPVOID*)&pnull); ok(hr == S_OK, "Could not create null renderer: %08x\n", hr); if (hr != S_OK) break; IBaseFilter_EnumPins(pnull, &nullenum); IEnumPins_Next(nullenum, 1, &nullpin, NULL); IEnumPins_Release(nullenum); IPin_QueryDirection(nullpin, &dir); hr = IPin_Connect(avipin, nullpin, NULL); ok(hr == S_OK, "Failed to connect output pin: %08x\n", hr); IPin_Release(nullpin); if (hr != S_OK) { IBaseFilter_Release(pnull); break; } IBaseFilter_Run(pnull, 0); ++expected; } IPin_Release(avipin); avipin = NULL; } if (avipin) IPin_Release(avipin); avipin = NULL; if (hr != S_OK) goto fail2; /* At this point there is a minimalistic connected avi splitter that can * be used for all sorts of source filter tests. However that still needs * to be written at a later time. * * Interesting tests: * - Can you disconnect an output pin while running? * Expecting: Yes * - Can you disconnect the pullpin while running? * Expecting: No * - Is the reference count incremented during playback or when connected? * Does this happen once for every output pin? Or is there something else * going on. * Expecting: You tell me */ IBaseFilter_Run(preader, 0); IBaseFilter_Run(pavi, 0); IBaseFilter_GetState(pavi, INFINITE, &state); curlevel = count_threads(); ok(curlevel == expected, "The thread count should be %d not %d\n", expected, curlevel); IBaseFilter_Pause(pavi); IBaseFilter_Pause(preader); IBaseFilter_Stop(pavi); IBaseFilter_Stop(preader); IBaseFilter_GetState(pavi, INFINITE, &state); IBaseFilter_GetState(preader, INFINITE, &state); fail2: IEnumPins_Reset(enumpins); while (IEnumPins_Next(enumpins, 1, &avipin, NULL) == S_OK) { IPin *to = NULL; IPin_QueryDirection(avipin, &dir); IPin_ConnectedTo(avipin, &to); if (to) { IPin_Release(to); if (dir == PINDIR_OUTPUT) { PIN_INFO info; IPin_QueryPinInfo(to, &info); /* Release twice: Once normal, second from the * previous while loop */ IBaseFilter_Stop(info.pFilter); IPin_Disconnect(to); IPin_Disconnect(avipin); IBaseFilter_Release(info.pFilter); IBaseFilter_Release(info.pFilter); } else { IPin_Disconnect(to); IPin_Disconnect(avipin); } } IPin_Release(avipin); avipin = NULL; } fail: if (hr != S_OK) skip("Prerequisites not matched, skipping remainder of test\n"); if (enumpins) IEnumPins_Release(enumpins); if (avipin) IPin_Release(avipin); if (filepin) { IPin *to = NULL; IPin_ConnectedTo(filepin, &to); if (to) { IPin_Disconnect(filepin); IPin_Disconnect(to); } IPin_Release(filepin); } if (preader) IBaseFilter_Release(preader); if (pavi) IBaseFilter_Release(pavi); if (pfile) IFileSourceFilter_Release(pfile); curlevel = count_threads(); todo_wine ok(curlevel == baselevel, "The thread count should be %d not %d\n", baselevel, curlevel); }
static HRESULT WINAPI AVICompressorPropertyBag_QueryInterface(IPersistPropertyBag *iface, REFIID riid, void **ppv) { AVICompressor *This = impl_from_IPersistPropertyBag(iface); return IBaseFilter_QueryInterface(&This->filter.IBaseFilter_iface, riid, ppv); }
static void test_filesourcefilter(void) { static const WCHAR prefix[] = {'w','i','n',0}; static const struct { const char *label; const char *data; DWORD size; const GUID *subtype; } tests[] = { { "AVI", "\x52\x49\x46\x46xxxx\x41\x56\x49\x20", 12, &MEDIASUBTYPE_Avi, }, { "MPEG1 System", "\x00\x00\x01\xBA\x21\x00\x01\x00\x01\x80\x00\x01\x00\x00\x01\xBB", 16, &MEDIASUBTYPE_MPEG1System, }, { "MPEG1 Video", "\x00\x00\x01\xB3", 4, &MEDIASUBTYPE_MPEG1Video, }, { "MPEG1 Audio", "\xFF\xE0", 2, &MEDIASUBTYPE_MPEG1Audio, }, { "MPEG2 Program", "\x00\x00\x01\xBA\x40", 5, &MEDIASUBTYPE_MPEG2_PROGRAM, }, { "WAVE", "\x52\x49\x46\x46xxxx\x57\x41\x56\x45", 12, &MEDIASUBTYPE_WAVE, }, { "unknown format", "Hello World", 11, NULL, /* FIXME: should be &MEDIASUBTYPE_NULL */ }, }; WCHAR path[MAX_PATH], temp[MAX_PATH]; IFileSourceFilter *filesource; DWORD ret, written; IBaseFilter *base; AM_MEDIA_TYPE mt; OLECHAR *olepath; BOOL success; HANDLE file; HRESULT hr; int i; ret = GetTempPathW(MAX_PATH, temp); ok(ret, "GetTempPathW failed with error %u\n", GetLastError()); ret = GetTempFileNameW(temp, prefix, 0, path); ok(ret, "GetTempFileNameW failed with error %u\n", GetLastError()); for (i = 0; i < ARRAY_SIZE(tests); i++) { trace("Running test for %s\n", tests[i].label); file = CreateFileW(path, GENERIC_READ | GENERIC_WRITE, 0, NULL, CREATE_ALWAYS, FILE_ATTRIBUTE_NORMAL, NULL); ok(file != INVALID_HANDLE_VALUE, "CreateFileW failed with error %u\n", GetLastError()); success = WriteFile(file, tests[i].data, tests[i].size, &written, NULL); ok(success, "WriteFile failed with error %u\n", GetLastError()); ok(written == tests[i].size, "could not write test data\n"); CloseHandle(file); hr = CoCreateInstance(&CLSID_AsyncReader, NULL, CLSCTX_INPROC_SERVER, &IID_IBaseFilter, (void **)&base); ok(hr == S_OK, "CoCreateInstance failed with %08x\n", hr); hr = IBaseFilter_QueryInterface(base, &IID_IFileSourceFilter, (void **)&filesource); ok(hr == S_OK, "IBaseFilter_QueryInterface failed with %08x\n", hr); olepath = (void *)0xdeadbeef; hr = IFileSourceFilter_GetCurFile(filesource, &olepath, NULL); ok(hr == S_OK, "expected S_OK, got %08x\n", hr); ok(olepath == NULL, "expected NULL, got %p\n", olepath); hr = IFileSourceFilter_Load(filesource, NULL, NULL); ok(hr == E_POINTER, "expected E_POINTER, got %08x\n", hr); hr = IFileSourceFilter_Load(filesource, path, NULL); ok(hr == S_OK, "IFileSourceFilter_Load failed with %08x\n", hr); hr = IFileSourceFilter_GetCurFile(filesource, NULL, &mt); ok(hr == E_POINTER, "expected E_POINTER, got %08x\n", hr); olepath = NULL; hr = IFileSourceFilter_GetCurFile(filesource, &olepath, NULL); ok(hr == S_OK, "expected S_OK, got %08x\n", hr); CoTaskMemFree(olepath); olepath = NULL; memset(&mt, 0x11, sizeof(mt)); hr = IFileSourceFilter_GetCurFile(filesource, &olepath, &mt); ok(hr == S_OK, "expected S_OK, got %08x\n", hr); ok(!lstrcmpW(olepath, path), "expected %s, got %s\n", wine_dbgstr_w(path), wine_dbgstr_w(olepath)); if (tests[i].subtype) { ok(IsEqualGUID(&mt.majortype, &MEDIATYPE_Stream), "expected MEDIATYPE_Stream, got %s\n", wine_dbgstr_guid(&mt.majortype)); ok(IsEqualGUID(&mt.subtype, tests[i].subtype), "expected %s, got %s\n", wine_dbgstr_guid(tests[i].subtype), wine_dbgstr_guid(&mt.subtype)); } CoTaskMemFree(olepath); IFileSourceFilter_Release(filesource); IBaseFilter_Release(base); success = DeleteFileW(path); ok(success, "DeleteFileW failed with error %u\n", GetLastError()); } }