HRESULT WINAPI EnumPins_Construct(BaseFilter *base, BaseFilter_GetPin receive_pin, BaseFilter_GetPinCount receive_pincount, BaseFilter_GetPinVersion receive_version, IEnumPins ** ppEnum) { IEnumPinsImpl * pEnumPins; if (!ppEnum) return E_POINTER; pEnumPins = CoTaskMemAlloc(sizeof(IEnumPinsImpl)); if (!pEnumPins) { *ppEnum = NULL; return E_OUTOFMEMORY; } pEnumPins->IEnumPins_iface.lpVtbl = &IEnumPinsImpl_Vtbl; pEnumPins->refCount = 1; pEnumPins->uIndex = 0; pEnumPins->receive_pin = receive_pin; pEnumPins->receive_pincount = receive_pincount; pEnumPins->receive_version = receive_version; pEnumPins->base = base; IBaseFilter_AddRef(&base->IBaseFilter_iface); *ppEnum = &pEnumPins->IEnumPins_iface; pEnumPins->Version = receive_version(base); TRACE("Created new enumerator (%p)\n", *ppEnum); return S_OK; }
/* IPin */ static HRESULT WINAPI SampleGrabber_IPin_QueryPinInfo(IPin *iface, PIN_INFO *info) { SG_Pin *This = impl_from_IPin(iface); TRACE("(%p)->(%p)\n", This, info); if (!info) return E_POINTER; info->pFilter = &This->sg->filter.IBaseFilter_iface; IBaseFilter_AddRef(info->pFilter); info->dir = This->dir; lstrcpynW(info->achName,This->name,MAX_PIN_NAME); return S_OK; }
static IEnumPins *pinsenum_create(IBaseFilter *filter, IPin **pins, ULONG pinCount) { ULONG len = sizeof(PE_Impl) + (pinCount * sizeof(IPin *)); PE_Impl *obj = CoTaskMemAlloc(len); if (obj) { ULONG i; ZeroMemory(obj, len); obj->pe.lpVtbl = &IEnumPins_VTable; obj->refCount = 1; obj->filter = filter; obj->numPins = pinCount; obj->index = 0; for (i=0; i<pinCount; i++) obj->pins[i] = pins[i]; IBaseFilter_AddRef(filter); } return &obj->pe; }
static gboolean gst_dshowvideodec_sink_setcaps (GstPad * pad, GstCaps * caps) { gboolean ret = FALSE; HRESULT hres; GstStructure *s = gst_caps_get_structure (caps, 0); GstDshowVideoDec *vdec = (GstDshowVideoDec *) gst_pad_get_parent (pad); GstDshowVideoDecClass *klass = (GstDshowVideoDecClass *) G_OBJECT_GET_CLASS (vdec); GstBuffer *extradata = NULL; const GValue *v = NULL; gint size = 0; GstCaps *caps_out; AM_MEDIA_TYPE output_mediatype, input_mediatype; VIDEOINFOHEADER *input_vheader = NULL, *output_vheader = NULL; IPin *output_pin = NULL, *input_pin = NULL; IGstDshowInterface *gstdshowinterface = NULL; const GValue *fps; /* read data */ if (!gst_structure_get_int (s, "width", &vdec->width) || !gst_structure_get_int (s, "height", &vdec->height)) { GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION, ("error getting video width or height from caps"), (NULL)); goto end; } fps = gst_structure_get_value (s, "framerate"); if (!fps) { GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION, ("error getting video framerate from caps"), (NULL)); goto end; } vdec->fps_n = gst_value_get_fraction_numerator (fps); vdec->fps_d = gst_value_get_fraction_denominator (fps); if ((v = gst_structure_get_value (s, "codec_data"))) extradata = gst_value_get_buffer (v); /* define the input type format */ memset (&input_mediatype, 0, sizeof (AM_MEDIA_TYPE)); input_mediatype.majortype = klass->entry->input_majortype; input_mediatype.subtype = klass->entry->input_subtype; input_mediatype.bFixedSizeSamples = FALSE; input_mediatype.bTemporalCompression = TRUE; if (strstr (klass->entry->sinkcaps, "video/mpeg, mpegversion= (int) 1")) { size = sizeof (MPEG1VIDEOINFO) + (extradata ? GST_BUFFER_SIZE (extradata) - 1 : 0); input_vheader = g_malloc0 (size); input_vheader->bmiHeader.biSize = sizeof (BITMAPINFOHEADER); if (extradata) { MPEG1VIDEOINFO *mpeg_info = (MPEG1VIDEOINFO *) input_vheader; memcpy (mpeg_info->bSequenceHeader, GST_BUFFER_DATA (extradata), GST_BUFFER_SIZE (extradata)); mpeg_info->cbSequenceHeader = GST_BUFFER_SIZE (extradata); } input_mediatype.formattype = FORMAT_MPEGVideo; } else { size = sizeof (VIDEOINFOHEADER) + (extradata ? GST_BUFFER_SIZE (extradata) : 0); input_vheader = g_malloc0 (size); input_vheader->bmiHeader.biSize = sizeof (BITMAPINFOHEADER); if (extradata) { /* Codec data is appended after our header */ memcpy (((guchar *) input_vheader) + sizeof (VIDEOINFOHEADER), GST_BUFFER_DATA (extradata), GST_BUFFER_SIZE (extradata)); input_vheader->bmiHeader.biSize += GST_BUFFER_SIZE (extradata); } input_mediatype.formattype = FORMAT_VideoInfo; } input_vheader->rcSource.top = input_vheader->rcSource.left = 0; input_vheader->rcSource.right = vdec->width; input_vheader->rcSource.bottom = vdec->height; input_vheader->rcTarget = input_vheader->rcSource; input_vheader->bmiHeader.biWidth = vdec->width; input_vheader->bmiHeader.biHeight = vdec->height; input_vheader->bmiHeader.biPlanes = 1; input_vheader->bmiHeader.biBitCount = 16; input_vheader->bmiHeader.biCompression = klass->entry->format; input_vheader->bmiHeader.biSizeImage = (vdec->width * vdec->height) * (input_vheader->bmiHeader.biBitCount / 8); input_mediatype.cbFormat = size; input_mediatype.pbFormat = (BYTE *) input_vheader; input_mediatype.lSampleSize = input_vheader->bmiHeader.biSizeImage; hres = IBaseFilter_QueryInterface (vdec->srcfilter, &IID_IGstDshowInterface, (void **) &gstdshowinterface); if (hres != S_OK || !gstdshowinterface) { GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION, ("Can't get IGstDshowInterface interface from dshow fakesrc filter (error=%d)", hres), (NULL)); goto end; } /* save a reference to IGstDshowInterface to use it processing functions */ if (!vdec->gstdshowsrcfilter) { vdec->gstdshowsrcfilter = gstdshowinterface; IBaseFilter_AddRef (vdec->gstdshowsrcfilter); } IGstDshowInterface_gst_set_media_type (gstdshowinterface, &input_mediatype); IGstDshowInterface_Release (gstdshowinterface); gstdshowinterface = NULL; /* set the sample size for fakesrc filter to the output buffer size */ IGstDshowInterface_gst_set_sample_size (vdec->gstdshowsrcfilter, input_mediatype.lSampleSize); /* connect our fake src to decoder */ gst_dshow_get_pin_from_filter (vdec->srcfilter, PINDIR_OUTPUT, &output_pin); if (!output_pin) { GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION, ("Can't get output pin from our directshow fakesrc filter"), (NULL)); goto end; } gst_dshow_get_pin_from_filter (vdec->decfilter, PINDIR_INPUT, &input_pin); if (!input_pin) { GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION, ("Can't get input pin from decoder filter"), (NULL)); goto end; } hres = IFilterGraph_ConnectDirect (vdec->filtergraph, output_pin, input_pin, NULL); if (hres != S_OK) { GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION, ("Can't connect fakesrc with decoder (error=%d)", hres), (NULL)); goto end; } IPin_Release (input_pin); IPin_Release (output_pin); input_pin = NULL; output_pin = NULL; /* get decoder output video format */ if (!gst_dshowvideodec_get_filter_output_format (vdec, &klass->entry->output_subtype, &output_vheader, &size)) { GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION, ("Can't get decoder output video format"), (NULL)); goto end; } memset (&output_mediatype, 0, sizeof (AM_MEDIA_TYPE)); output_mediatype.majortype = klass->entry->output_majortype; output_mediatype.subtype = klass->entry->output_subtype; output_mediatype.bFixedSizeSamples = TRUE; output_mediatype.bTemporalCompression = FALSE; output_mediatype.lSampleSize = output_vheader->bmiHeader.biSizeImage; output_mediatype.formattype = FORMAT_VideoInfo; output_mediatype.cbFormat = size; output_mediatype.pbFormat = (char *) output_vheader; hres = IBaseFilter_QueryInterface (vdec->sinkfilter, &IID_IGstDshowInterface, (void **) &gstdshowinterface); if (hres != S_OK || !gstdshowinterface) { GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION, ("Can't get IGstDshowInterface interface from dshow fakesink filter (error=%d)", hres), (NULL)); goto end; } IGstDshowInterface_gst_set_media_type (gstdshowinterface, &output_mediatype); IGstDshowInterface_gst_set_buffer_callback (gstdshowinterface, gst_dshowvideodec_push_buffer, (byte *) vdec); IGstDshowInterface_Release (gstdshowinterface); gstdshowinterface = NULL; /* connect decoder to our fake sink */ gst_dshow_get_pin_from_filter (vdec->decfilter, PINDIR_OUTPUT, &output_pin); if (!output_pin) { GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION, ("Can't get output pin from our decoder filter"), (NULL)); goto end; } gst_dshow_get_pin_from_filter (vdec->sinkfilter, PINDIR_INPUT, &input_pin); if (!input_pin) { GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION, ("Can't get input pin from our directshow fakesink filter"), (NULL)); goto end; } hres = IFilterGraph_ConnectDirect (vdec->filtergraph, output_pin, input_pin, &output_mediatype); if (hres != S_OK) { GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION, ("Can't connect decoder with fakesink (error=%d)", hres), (NULL)); goto end; } /* negotiate output */ caps_out = gst_caps_from_string (klass->entry->srccaps); gst_caps_set_simple (caps_out, "width", G_TYPE_INT, vdec->width, "height", G_TYPE_INT, vdec->height, "framerate", GST_TYPE_FRACTION, vdec->fps_n, vdec->fps_d, NULL); if (!gst_pad_set_caps (vdec->srcpad, caps_out)) { gst_caps_unref (caps_out); GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION, ("Failed to negotiate output"), (NULL)); goto end; } gst_caps_unref (caps_out); hres = IMediaFilter_Run (vdec->mediafilter, -1); if (hres != S_OK) { GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION, ("Can't run the directshow graph (error=%d)", hres), (NULL)); goto end; } ret = TRUE; end: gst_object_unref (vdec); if (input_vheader) g_free (input_vheader); if (gstdshowinterface) IGstDshowInterface_Release (gstdshowinterface); if (input_pin) IPin_Release (input_pin); if (output_pin) IPin_Release (output_pin); return ret; }
static ULONG WINAPI QT_Seeking_AddRef(IMediaSeeking * iface) { QTSplitter *This = impl_from_IMediaSeeking(iface); return IBaseFilter_AddRef(&This->filter.IBaseFilter_iface); }
ULONG WINAPI QualityControlImpl_AddRef(IQualityControl *iface) { QualityControlImpl *This = (QualityControlImpl*)iface; return IBaseFilter_AddRef(This->self); }
static ULONG WINAPI AMFilterMiscFlags_AddRef(IAMFilterMiscFlags *iface) { DSoundRenderImpl *This = impl_from_IAMFilterMiscFlags(iface); return IBaseFilter_AddRef(&This->renderer.filter.IBaseFilter_iface); }
ULONG WINAPI QualityControlImpl_AddRef(IQualityControl *iface) { QualityControlImpl *This = impl_from_IQualityControl(iface); return IBaseFilter_AddRef(This->self); }
static int dshow_open_device(AVFormatContext *avctx, ICreateDevEnum *devenum, enum dshowDeviceType devtype, enum dshowSourceFilterType sourcetype) { struct dshow_ctx *ctx = avctx->priv_data; IBaseFilter *device_filter = NULL; char *device_filter_unique_name = NULL; IGraphBuilder *graph = ctx->graph; IPin *device_pin = NULL; libAVPin *capture_pin = NULL; libAVFilter *capture_filter = NULL; ICaptureGraphBuilder2 *graph_builder2 = NULL; int ret = AVERROR(EIO); int r; IStream *ifile_stream = NULL; IStream *ofile_stream = NULL; IPersistStream *pers_stream = NULL; enum dshowDeviceType otherDevType = (devtype == VideoDevice) ? AudioDevice : VideoDevice; const wchar_t *filter_name[2] = { L"Audio capture filter", L"Video capture filter" }; if ( ((ctx->audio_filter_load_file) && (strlen(ctx->audio_filter_load_file)>0) && (sourcetype == AudioSourceDevice)) || ((ctx->video_filter_load_file) && (strlen(ctx->video_filter_load_file)>0) && (sourcetype == VideoSourceDevice)) ) { HRESULT hr; char *filename = NULL; if (sourcetype == AudioSourceDevice) filename = ctx->audio_filter_load_file; else filename = ctx->video_filter_load_file; hr = SHCreateStreamOnFile ((LPCSTR) filename, STGM_READ, &ifile_stream); if (S_OK != hr) { av_log(avctx, AV_LOG_ERROR, "Could not open capture filter description file.\n"); goto error; } hr = OleLoadFromStream(ifile_stream, &IID_IBaseFilter, (void **) &device_filter); if (hr != S_OK) { av_log(avctx, AV_LOG_ERROR, "Could not load capture filter from file.\n"); goto error; } if (sourcetype == AudioSourceDevice) av_log(avctx, AV_LOG_INFO, "Audio-"); else av_log(avctx, AV_LOG_INFO, "Video-"); av_log(avctx, AV_LOG_INFO, "Capture filter loaded successfully from file \"%s\".\n", filename); } else { if ((r = dshow_cycle_devices(avctx, devenum, devtype, sourcetype, &device_filter, &device_filter_unique_name)) < 0) { ret = r; goto error; } } if (ctx->device_filter[otherDevType]) { // avoid adding add two instances of the same device to the graph, one for video, one for audio // a few devices don't support this (could also do this check earlier to avoid double crossbars, etc. but they seem OK) if (strcmp(device_filter_unique_name, ctx->device_unique_name[otherDevType]) == 0) { av_log(avctx, AV_LOG_DEBUG, "reusing previous graph capture filter... %s\n", device_filter_unique_name); IBaseFilter_Release(device_filter); device_filter = ctx->device_filter[otherDevType]; IBaseFilter_AddRef(ctx->device_filter[otherDevType]); } else { av_log(avctx, AV_LOG_DEBUG, "not reusing previous graph capture filter %s != %s\n", device_filter_unique_name, ctx->device_unique_name[otherDevType]); } } ctx->device_filter [devtype] = device_filter; ctx->device_unique_name [devtype] = device_filter_unique_name; r = IGraphBuilder_AddFilter(graph, device_filter, NULL); if (r != S_OK) { av_log(avctx, AV_LOG_ERROR, "Could not add device filter to graph.\n"); goto error; } if ((r = dshow_cycle_pins(avctx, devtype, sourcetype, device_filter, &device_pin)) < 0) { ret = r; goto error; } ctx->device_pin[devtype] = device_pin; capture_filter = libAVFilter_Create(avctx, callback, devtype); if (!capture_filter) { av_log(avctx, AV_LOG_ERROR, "Could not create grabber filter.\n"); goto error; } ctx->capture_filter[devtype] = capture_filter; if ( ((ctx->audio_filter_save_file) && (strlen(ctx->audio_filter_save_file)>0) && (sourcetype == AudioSourceDevice)) || ((ctx->video_filter_save_file) && (strlen(ctx->video_filter_save_file)>0) && (sourcetype == VideoSourceDevice)) ) { HRESULT hr; char *filename = NULL; if (sourcetype == AudioSourceDevice) filename = ctx->audio_filter_save_file; else filename = ctx->video_filter_save_file; hr = SHCreateStreamOnFile ((LPCSTR) filename, STGM_CREATE | STGM_READWRITE, &ofile_stream); if (S_OK != hr) { av_log(avctx, AV_LOG_ERROR, "Could not create capture filter description file.\n"); goto error; } hr = IBaseFilter_QueryInterface(device_filter, &IID_IPersistStream, (void **) &pers_stream); if (hr != S_OK) { av_log(avctx, AV_LOG_ERROR, "Query for IPersistStream failed.\n"); goto error; } hr = OleSaveToStream(pers_stream, ofile_stream); if (hr != S_OK) { av_log(avctx, AV_LOG_ERROR, "Could not save capture filter \n"); goto error; } hr = IStream_Commit(ofile_stream, STGC_DEFAULT); if (S_OK != hr) { av_log(avctx, AV_LOG_ERROR, "Could not commit capture filter data to file.\n"); goto error; } if (sourcetype == AudioSourceDevice) av_log(avctx, AV_LOG_INFO, "Audio-"); else av_log(avctx, AV_LOG_INFO, "Video-"); av_log(avctx, AV_LOG_INFO, "Capture filter saved successfully to file \"%s\".\n", filename); } r = IGraphBuilder_AddFilter(graph, (IBaseFilter *) capture_filter, filter_name[devtype]); if (r != S_OK) { av_log(avctx, AV_LOG_ERROR, "Could not add capture filter to graph\n"); goto error; } libAVPin_AddRef(capture_filter->pin); capture_pin = capture_filter->pin; ctx->capture_pin[devtype] = capture_pin; r = CoCreateInstance(&CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC_SERVER, &IID_ICaptureGraphBuilder2, (void **) &graph_builder2); if (r != S_OK) { av_log(avctx, AV_LOG_ERROR, "Could not create CaptureGraphBuilder2\n"); goto error; } ICaptureGraphBuilder2_SetFiltergraph(graph_builder2, graph); if (r != S_OK) { av_log(avctx, AV_LOG_ERROR, "Could not set graph for CaptureGraphBuilder2\n"); goto error; } r = ICaptureGraphBuilder2_RenderStream(graph_builder2, NULL, NULL, (IUnknown *) device_pin, NULL /* no intermediate filter */, (IBaseFilter *) capture_filter); /* connect pins, optionally insert intermediate filters like crossbar if necessary */ if (r != S_OK) { av_log(avctx, AV_LOG_ERROR, "Could not RenderStream to connect pins\n"); goto error; } r = dshow_try_setup_crossbar_options(graph_builder2, device_filter, devtype, avctx); if (r != S_OK) { av_log(avctx, AV_LOG_ERROR, "Could not setup CrossBar\n"); goto error; } ret = 0; error: if (graph_builder2 != NULL) ICaptureGraphBuilder2_Release(graph_builder2); if (pers_stream) IPersistStream_Release(pers_stream); if (ifile_stream) IStream_Release(ifile_stream); if (ofile_stream) IStream_Release(ofile_stream); return ret; }
static ULONG WINAPI AVICompressorOut_AddRef(IPin *iface) { AVICompressor *This = impl_from_IPin(iface); return IBaseFilter_AddRef(&This->filter.IBaseFilter_iface); }
static ULONG WINAPI AVICompressorPropertyBag_AddRef(IPersistPropertyBag *iface) { AVICompressor *This = impl_from_IPersistPropertyBag(iface); return IBaseFilter_AddRef(&This->filter.IBaseFilter_iface); }
static ULONG WINAPI Parser_Seeking_AddRef(IMediaSeeking * iface) { ParserImpl *This = impl_from_IMediaSeeking(iface); return IBaseFilter_AddRef(&This->filter.IBaseFilter_iface); }
static HRESULT FGEVENT_KeepEvent( CFilterGraph* This, BOOL bKeep, long lEventCode, LONG_PTR lParam1, LONG_PTR lParam2 ) { switch ( lEventCode ) { case EC_COMPLETE: if ( ++This->m_cCompletes < This->m_cRenders ) return E_FAIL; /* don't tell app */ break; case EC_USERABORT: break; case EC_ERRORABORT: break; case EC_TIME: break; /*case EC_REPAINT:*/ case EC_STREAM_ERROR_STOPPED: break; case EC_STREAM_ERROR_STILLPLAYING: break; case EC_ERROR_STILLPLAYING: break; case EC_PALETTE_CHANGED: break; case EC_VIDEO_SIZE_CHANGED: break; case EC_QUALITY_CHANGE: break; /*case EC_SHUTTING_DOWN:*/ case EC_CLOCK_CHANGED: break; case EC_OPENING_FILE: break; case EC_BUFFERING_DATA: break; case EC_FULLSCREEN_LOST: if ( bKeep ) { if ( ((IBaseFilter*)lParam2) != NULL ) IBaseFilter_AddRef( (IBaseFilter*)lParam2 ); } else { if ( ((IBaseFilter*)lParam2) != NULL ) IBaseFilter_Release( (IBaseFilter*)lParam2 ); } break; /*case EC_ACTIVATE:*/ /*case EC_NEED_RESTART:*/ /*case EC_WINDOW_DESTROYED:*/ /*case EC_DISPLAY_CHANGED:*/ /*case EC_STARVATION:*/ /*case EC_OLE_EVENT:*/ /*case EC_NOTIFY_WINDOW:*/ /*case EC_STREAM_CONTROL_STOPPED:*/ /*case EC_STREAM_CONTROL_STARTED:*/ /*case EC_END_OF_SEGMENT:*/ /*case EC_SEGMENT_STARTED:*/ case EC_LENGTH_CHANGED: break; default: if ( lEventCode < EC_USER ) { FIXME( "unknown system event %08lx\n", lEventCode ); return E_INVALIDARG; } TRACE( "user event %08lx\n", lEventCode ); break; } return NOERROR; }