static GstElement * gst_auto_convert_add_element (GstAutoConvert * autoconvert, GstElementFactory * factory) { GstElement *element = NULL; GstPad *internal_sinkpad = NULL; GstPad *internal_srcpad = NULL; GstPad *sinkpad; GstPad *srcpad; GstPadLinkReturn padlinkret; GST_DEBUG_OBJECT (autoconvert, "Adding element %s to the autoconvert bin", gst_plugin_feature_get_name (GST_PLUGIN_FEATURE (factory))); element = gst_element_factory_create (factory, NULL); if (!element) return NULL; if (!gst_bin_add (GST_BIN (autoconvert), element)) { GST_ERROR_OBJECT (autoconvert, "Could not add element %s to the bin", GST_OBJECT_NAME (element)); gst_object_unref (element); return NULL; } srcpad = get_pad_by_direction (element, GST_PAD_SRC); if (!srcpad) { GST_ERROR_OBJECT (autoconvert, "Could not find source in %s", GST_OBJECT_NAME (element)); goto error; } sinkpad = get_pad_by_direction (element, GST_PAD_SINK); if (!sinkpad) { GST_ERROR_OBJECT (autoconvert, "Could not find sink in %s", GST_OBJECT_NAME (element)); goto error; } internal_sinkpad = gst_pad_new_from_static_template (&sink_internal_template, "sink_internal"); internal_srcpad = gst_pad_new_from_static_template (&src_internal_template, "src_internal"); if (!internal_sinkpad || !internal_srcpad) { GST_ERROR_OBJECT (autoconvert, "Could not create internal pads"); goto error; } g_object_weak_ref (G_OBJECT (element), (GWeakNotify) gst_object_unref, internal_sinkpad); g_object_weak_ref (G_OBJECT (element), (GWeakNotify) gst_object_unref, internal_srcpad); gst_pad_set_active (internal_sinkpad, TRUE); gst_pad_set_active (internal_srcpad, TRUE); g_object_set_qdata (G_OBJECT (internal_srcpad), parent_quark, autoconvert); g_object_set_qdata (G_OBJECT (internal_sinkpad), parent_quark, autoconvert); gst_pad_set_chain_function (internal_sinkpad, GST_DEBUG_FUNCPTR (gst_auto_convert_internal_sink_chain)); gst_pad_set_event_function (internal_sinkpad, GST_DEBUG_FUNCPTR (gst_auto_convert_internal_sink_event)); gst_pad_set_query_function (internal_sinkpad, GST_DEBUG_FUNCPTR (gst_auto_convert_internal_sink_query)); gst_pad_set_query_type_function (internal_sinkpad, GST_DEBUG_FUNCPTR (gst_auto_convert_internal_sink_query_type)); gst_pad_set_getcaps_function (internal_sinkpad, GST_DEBUG_FUNCPTR (gst_auto_convert_internal_sink_getcaps)); gst_pad_set_bufferalloc_function (internal_sinkpad, GST_DEBUG_FUNCPTR (gst_auto_convert_internal_sink_buffer_alloc)); gst_pad_set_fixatecaps_function (internal_sinkpad, GST_DEBUG_FUNCPTR (gst_auto_convert_internal_sink_fixatecaps)); gst_pad_set_event_function (internal_srcpad, GST_DEBUG_FUNCPTR (gst_auto_convert_internal_src_event)); gst_pad_set_query_function (internal_srcpad, GST_DEBUG_FUNCPTR (gst_auto_convert_internal_src_query)); gst_pad_set_query_type_function (internal_srcpad, GST_DEBUG_FUNCPTR (gst_auto_convert_internal_src_query_type)); padlinkret = gst_pad_link (internal_srcpad, sinkpad); if (GST_PAD_LINK_FAILED (padlinkret)) { GST_WARNING_OBJECT (autoconvert, "Could not links pad %s:%s to %s:%s" " for reason %d", GST_DEBUG_PAD_NAME (internal_srcpad), GST_DEBUG_PAD_NAME (sinkpad), padlinkret); goto error; } padlinkret = gst_pad_link (srcpad, internal_sinkpad); if (GST_PAD_LINK_FAILED (padlinkret)) { GST_WARNING_OBJECT (autoconvert, "Could not links pad %s:%s to %s:%s" " for reason %d", GST_DEBUG_PAD_NAME (internal_srcpad), GST_DEBUG_PAD_NAME (sinkpad), padlinkret); goto error; } g_object_set_qdata (G_OBJECT (element), internal_srcpad_quark, internal_srcpad); g_object_set_qdata (G_OBJECT (element), internal_sinkpad_quark, internal_sinkpad); /* Iffy */ gst_element_sync_state_with_parent (element); /* Increment the reference count we will return to the caller */ gst_object_ref (element); return element; error: gst_bin_remove (GST_BIN (autoconvert), element); return NULL; }
/* * create_post_tee_bin * * The following chain is created after the tee for each output from the * source: * * +-------+ +---------------------+ +-------+ * ---+ queue +---+ conversion elements +---+ queue +--- * +-------+ +---------------------+ +-------+ */ static GstElement *create_post_tee_bin(OwrMediaSource *media_source, GstElement *source_bin, GstCaps *caps, GstPad *ghostpad, guint source_id) { OwrMediaType media_type; GstElement *post_tee_bin, *queue_pre, *queue_post, *capsfilter; GstPad *bin_pad, *queue_pre_pad, *srcpad; GSList *list = NULL; gchar *bin_name; bin_name = g_strdup_printf("source-post-tee-bin-%u", source_id); post_tee_bin = gst_bin_new(bin_name); if (!gst_bin_add(GST_BIN(source_bin), post_tee_bin)) { GST_ERROR("Failed to add %s to source bin", bin_name); g_free(bin_name); g_object_unref(post_tee_bin); post_tee_bin = NULL; goto done; } g_free(bin_name); gst_element_sync_state_with_parent(post_tee_bin); CREATE_ELEMENT_WITH_ID(queue_pre, "queue", "source-post-tee-queue", source_id); CREATE_ELEMENT_WITH_ID(capsfilter, "capsfilter", "source-output-capsfilter", source_id); list = g_slist_append(list, capsfilter); CREATE_ELEMENT_WITH_ID(queue_post, "queue", "source-output-queue", source_id); list = g_slist_append(list, queue_post); g_object_get(media_source, "media-type", &media_type, NULL); switch (media_type) { case OWR_MEDIA_TYPE_AUDIO: { GstElement *audioresample, *audioconvert; g_object_set(capsfilter, "caps", caps, NULL); CREATE_ELEMENT_WITH_ID(audioresample, "audioresample", "source-audio-resample", source_id); list = g_slist_prepend(list, audioresample); CREATE_ELEMENT_WITH_ID(audioconvert, "audioconvert", "source-audio-convert", source_id); list = g_slist_prepend(list, audioconvert); list = g_slist_prepend(list, queue_pre); gst_bin_add_many(GST_BIN(post_tee_bin), queue_pre, audioconvert, audioresample, capsfilter, queue_post, NULL); LINK_ELEMENTS(capsfilter, queue_post); LINK_ELEMENTS(audioresample, capsfilter); LINK_ELEMENTS(audioconvert, audioresample); LINK_ELEMENTS(queue_pre, audioconvert); break; } case OWR_MEDIA_TYPE_VIDEO: { GstElement *videorate, *videoscale, *videoconvert; GstCaps *source_caps; GstStructure *source_structure; gint fps_n = 0, fps_d = 1; source_caps = gst_caps_copy(caps); source_structure = gst_caps_get_structure(source_caps, 0); if (gst_structure_get_fraction(source_structure, "framerate", &fps_n, &fps_d)) gst_structure_remove_field(source_structure, "framerate"); g_object_set(capsfilter, "caps", source_caps, NULL); gst_caps_unref(source_caps); CREATE_ELEMENT_WITH_ID(videoconvert, VIDEO_CONVERT, "source-video-convert", source_id); list = g_slist_prepend(list, videoconvert); CREATE_ELEMENT_WITH_ID(videoscale, "videoscale", "source-video-scale", source_id); list = g_slist_prepend(list, videoscale); CREATE_ELEMENT_WITH_ID(videorate, "videorate", "source-video-rate", source_id); g_object_set(videorate, "drop-only", TRUE, "max-rate", fps_n / fps_d, NULL); list = g_slist_prepend(list, videorate); list = g_slist_prepend(list, queue_pre); gst_bin_add_many(GST_BIN(post_tee_bin), queue_pre, videorate, videoscale, videoconvert, capsfilter, queue_post, NULL); LINK_ELEMENTS(capsfilter, queue_post); LINK_ELEMENTS(videoconvert, capsfilter); LINK_ELEMENTS(videoscale, videoconvert); LINK_ELEMENTS(videorate, videoscale); LINK_ELEMENTS(queue_pre, videorate); break; } case OWR_MEDIA_TYPE_UNKNOWN: default: g_assert_not_reached(); goto done; } srcpad = gst_element_get_static_pad(queue_post, "src"); g_assert(srcpad); bin_pad = gst_ghost_pad_new("src", srcpad); gst_pad_set_active(bin_pad, TRUE); gst_element_add_pad(post_tee_bin, bin_pad); gst_object_unref(srcpad); gst_ghost_pad_set_target(GST_GHOST_PAD(ghostpad), bin_pad); gst_pad_set_active(ghostpad, TRUE); gst_element_add_pad(source_bin, ghostpad); g_slist_foreach(list, sync_to_parent, NULL); queue_pre_pad = gst_element_get_static_pad(queue_pre, "sink"); g_assert(queue_pre_pad); bin_pad = gst_ghost_pad_new("sink", queue_pre_pad); gst_pad_set_active(bin_pad, TRUE); gst_element_add_pad(post_tee_bin, bin_pad); gst_object_unref(queue_pre_pad); done: g_slist_free(list); list = NULL; return post_tee_bin; }
/* * owr_local_media_source_get_pad * * The beginning of a media source chain in the pipeline looks like this: * * +--------+ +------------+ +-----+ * | source +---+ capsfilter +---+ tee +--- * +--------+ +------------+ +-----+ * * Only one such chain is created per media source for the initial get_pad * call. Subsequent calls will just obtain another tee pad. After these initial * elements are created, they are linked together and synced up to the PLAYING * state. * * Once the initial chain is created, a block is placed on the new src pad of * the tee. The rest of the new chain (conversion elements, capsfilter, queues, * etc.) is created, linked and synced in the pad block callback. */ static GstPad *owr_local_media_source_get_pad(OwrMediaSource *media_source, GstCaps *caps) { OwrLocalMediaSource *local_source; OwrLocalMediaSourcePrivate *priv; GstElement *source_bin, *post_tee_bin; GstElement *source = NULL, *capsfilter = NULL, *tee; GstPad *ghostpad = NULL; gchar *pad_name; OwrMediaType media_type = OWR_MEDIA_TYPE_UNKNOWN; OwrSourceType source_type = OWR_SOURCE_TYPE_UNKNOWN; OwrCodecType codec_type = OWR_CODEC_TYPE_NONE; guint source_id; g_assert(media_source); local_source = OWR_LOCAL_MEDIA_SOURCE(media_source); priv = local_source->priv; g_object_get(media_source, "media-type", &media_type, "type", &source_type, NULL); /* only create the source bin for this media source once */ if (_owr_media_source_get_element(media_source)) { GST_DEBUG_OBJECT(media_source, "Re-using existing source element/bin"); source_bin = _owr_media_source_get_element(media_source); tee = priv->source_tee; } else { GEnumClass *media_enum_class, *source_enum_class; GEnumValue *media_enum_value, *source_enum_value; gchar *bin_name; GstCaps *source_caps; GstStructure *source_structure; GstElement *fakesink; media_enum_class = G_ENUM_CLASS(g_type_class_ref(OWR_TYPE_MEDIA_TYPE)); source_enum_class = G_ENUM_CLASS(g_type_class_ref(OWR_TYPE_SOURCE_TYPE)); media_enum_value = g_enum_get_value(media_enum_class, media_type); source_enum_value = g_enum_get_value(source_enum_class, source_type); bin_name = g_strdup_printf("local-%s-%s-source-bin-%u", media_enum_value ? media_enum_value->value_nick : "unknown", source_enum_value ? source_enum_value->value_nick : "unknown", g_atomic_int_add(&unique_bin_id, 1)); g_type_class_unref(media_enum_class); g_type_class_unref(source_enum_class); source_bin = gst_bin_new(bin_name); g_free(bin_name); bin_name = NULL; gst_bin_add(GST_BIN(_owr_get_pipeline()), source_bin); gst_element_sync_state_with_parent(GST_ELEMENT(source_bin)); GST_DEBUG_OBJECT(local_source, "media_type: %d, type: %d", media_type, source_type); if (media_type == OWR_MEDIA_TYPE_UNKNOWN || source_type == OWR_SOURCE_TYPE_UNKNOWN) { GST_ERROR_OBJECT(local_source, "Cannot connect source with unknown type or media type to other component"); goto done; } switch (media_type) { case OWR_MEDIA_TYPE_AUDIO: { switch (source_type) { case OWR_SOURCE_TYPE_CAPTURE: CREATE_ELEMENT(source, AUDIO_SRC, "audio-source"); #if !defined(__APPLE__) || !TARGET_IPHONE_SIMULATOR g_object_set(source, "buffer-time", G_GINT64_CONSTANT(40000), "latency-time", G_GINT64_CONSTANT(10000), NULL); #ifdef __APPLE__ g_object_set(source, "device", priv->device_index, NULL); #endif #endif break; case OWR_SOURCE_TYPE_TEST: CREATE_ELEMENT(source, "audiotestsrc", "audio-source"); g_object_set(source, "is-live", TRUE, NULL); break; case OWR_SOURCE_TYPE_UNKNOWN: default: g_assert_not_reached(); goto done; } #if defined(__APPLE__) && !TARGET_IPHONE_SIMULATOR /* workaround for osxaudiosrc bug * https://bugzilla.gnome.org/show_bug.cgi?id=711764 */ CREATE_ELEMENT(capsfilter, "capsfilter", "audio-source-capsfilter"); source_caps = gst_caps_copy(caps); source_structure = gst_caps_get_structure(source_caps, 0); gst_structure_set(source_structure, "format", G_TYPE_STRING, "S32LE", "rate", G_TYPE_INT, 44100, NULL); gst_structure_remove_field(source_structure, "channels"); g_object_set(capsfilter, "caps", source_caps, NULL); gst_caps_unref(source_caps); gst_bin_add(GST_BIN(source_bin), capsfilter); #endif break; } case OWR_MEDIA_TYPE_VIDEO: { switch (source_type) { case OWR_SOURCE_TYPE_CAPTURE: CREATE_ELEMENT(source, VIDEO_SRC, "video-source"); #if defined(__APPLE__) && !TARGET_IPHONE_SIMULATOR g_object_set(source, "device-index", priv->device_index, NULL); #elif defined(__ANDROID__) g_object_set(source, "cam-index", priv->device_index, NULL); #elif defined(__linux__) { gchar *device = g_strdup_printf("/dev/video%u", priv->device_index); g_object_set(source, "device", device, NULL); g_free(device); } #endif break; case OWR_SOURCE_TYPE_TEST: CREATE_ELEMENT(source, "videotestsrc", "video-source"); g_object_set(source, "is-live", TRUE, NULL); break; case OWR_SOURCE_TYPE_UNKNOWN: default: g_assert_not_reached(); goto done; } CREATE_ELEMENT(capsfilter, "capsfilter", "video-source-capsfilter"); source_caps = gst_caps_copy(caps); source_structure = gst_caps_get_structure(source_caps, 0); gst_structure_remove_field(source_structure, "format"); gst_structure_remove_field(source_structure, "framerate"); g_object_set(capsfilter, "caps", source_caps, NULL); gst_caps_unref(source_caps); gst_bin_add(GST_BIN(source_bin), capsfilter); break; } case OWR_MEDIA_TYPE_UNKNOWN: default: g_assert_not_reached(); goto done; } g_assert(source); CREATE_ELEMENT(tee, "tee", "source-tee"); CREATE_ELEMENT(fakesink, "fakesink", "source-tee-fakesink"); g_object_set(fakesink, "async", FALSE, NULL); gst_bin_add_many(GST_BIN(source_bin), source, tee, fakesink, NULL); gst_element_sync_state_with_parent(fakesink); LINK_ELEMENTS(tee, fakesink); if (!source) GST_ERROR_OBJECT(media_source, "Failed to create source element!"); } codec_type = _owr_caps_to_codec_type(caps); source_id = g_atomic_int_add(&unique_pad_id, 1); pad_name = g_strdup_printf("src_%u_%u", codec_type, source_id); ghostpad = gst_ghost_pad_new_no_target(pad_name, GST_PAD_SRC); g_free(pad_name); post_tee_bin = create_post_tee_bin(media_source, source_bin, caps, ghostpad, source_id); if (!post_tee_bin) { gst_object_unref(ghostpad); ghostpad = NULL; goto done; } if (!gst_element_link(tee, post_tee_bin)) { GST_ERROR("Failed to link source tee to source-post-tee-bin-%u", source_id); g_object_unref(post_tee_bin); ghostpad = NULL; goto done; } if (!_owr_media_source_get_element(media_source)) { /* the next code block inside the if is a workaround for avfvideosrc * not handling on-the-fly reconfiguration * on upstream reconfigure events, we drop the event in the probe */ if (media_type == OWR_MEDIA_TYPE_VIDEO) { GstPad *tee_sinkpad; tee_sinkpad = gst_element_get_static_pad(tee, "sink"); gst_pad_add_probe(tee_sinkpad, GST_PAD_PROBE_TYPE_EVENT_UPSTREAM, drop_reconfigure_cb, NULL, NULL); } if (capsfilter) { LINK_ELEMENTS(capsfilter, tee); gst_element_sync_state_with_parent(tee); gst_element_sync_state_with_parent(capsfilter); LINK_ELEMENTS(source, capsfilter); } else { gst_element_sync_state_with_parent(tee); LINK_ELEMENTS(source, tee); } gst_element_sync_state_with_parent(source); _owr_media_source_set_element(media_source, source_bin); priv->source_tee = tee; } done: return ghostpad; }
void PlaybackPipeline::attachTrack(RefPtr<SourceBufferPrivateGStreamer> sourceBufferPrivate, RefPtr<TrackPrivateBase> trackPrivate, GstCaps* caps) { WebKitMediaSrc* webKitMediaSrc = m_webKitMediaSrc.get(); Stream* stream = 0; //GstCaps* appsrccaps = 0; GstStructure* s = 0; const gchar* appsrctypename = 0; const gchar* mediaType = 0; gchar *parserBinName; bool capsNotifyHandlerConnected = false; unsigned padId = 0; GST_OBJECT_LOCK(webKitMediaSrc); stream = getStreamBySourceBufferPrivate(webKitMediaSrc, sourceBufferPrivate.get()); GST_OBJECT_UNLOCK(webKitMediaSrc); ASSERT(stream != 0); //gst_app_src_set_caps(GST_APP_SRC(stream->appsrc), caps); //appsrccaps = gst_app_src_get_caps(GST_APP_SRC(stream->appsrc)); s = gst_caps_get_structure(caps, 0); appsrctypename = gst_structure_get_name(s); mediaType = appsrctypename; GST_OBJECT_LOCK(webKitMediaSrc); padId = stream->parent->priv->numberOfPads; stream->parent->priv->numberOfPads++; GST_OBJECT_UNLOCK(webKitMediaSrc); parserBinName = g_strdup_printf("streamparser%u", padId); ASSERT(caps != 0); stream->parser = gst_bin_new(parserBinName); g_free(parserBinName); GST_DEBUG_OBJECT(webKitMediaSrc, "Configured track %s: appsrc=%s, padId=%u, mediaType=%s, caps=%" GST_PTR_FORMAT, trackPrivate->id().string().utf8().data(), GST_ELEMENT_NAME(stream->appsrc), padId, mediaType, caps); if (!g_strcmp0(mediaType, "video/x-h264")) { GstElement* parser; GstElement* capsfilter; GstPad* pad = nullptr; GstCaps* filtercaps; filtercaps = gst_caps_new_simple("video/x-h264", "alignment", G_TYPE_STRING, "au", NULL); parser = gst_element_factory_make("h264parse", 0); capsfilter = gst_element_factory_make("capsfilter", 0); g_object_set(capsfilter, "caps", filtercaps, NULL); gst_caps_unref(filtercaps); gst_bin_add_many(GST_BIN(stream->parser), parser, capsfilter, NULL); gst_element_link_pads(parser, "src", capsfilter, "sink"); if (!pad) pad = gst_element_get_static_pad(parser, "sink"); gst_element_add_pad(stream->parser, gst_ghost_pad_new("sink", pad)); gst_object_unref(pad); pad = gst_element_get_static_pad(capsfilter, "src"); gst_element_add_pad(stream->parser, gst_ghost_pad_new("src", pad)); gst_object_unref(pad); } else if (!g_strcmp0(mediaType, "video/x-h265")) { GstElement* parser; GstElement* capsfilter; GstPad* pad = nullptr; GstCaps* filtercaps; filtercaps = gst_caps_new_simple("video/x-h265", "alignment", G_TYPE_STRING, "au", NULL); parser = gst_element_factory_make("h265parse", 0); capsfilter = gst_element_factory_make("capsfilter", 0); g_object_set(capsfilter, "caps", filtercaps, NULL); gst_caps_unref(filtercaps); gst_bin_add_many(GST_BIN(stream->parser), parser, capsfilter, NULL); gst_element_link_pads(parser, "src", capsfilter, "sink"); if (!pad) pad = gst_element_get_static_pad(parser, "sink"); gst_element_add_pad(stream->parser, gst_ghost_pad_new("sink", pad)); gst_object_unref(pad); pad = gst_element_get_static_pad(capsfilter, "src"); gst_element_add_pad(stream->parser, gst_ghost_pad_new("src", pad)); gst_object_unref(pad); } else if (!g_strcmp0(mediaType, "audio/mpeg")) { gint mpegversion = -1; GstElement* parser; GstPad* pad = nullptr; gst_structure_get_int(s, "mpegversion", &mpegversion); if (mpegversion == 1) { parser = gst_element_factory_make("mpegaudioparse", 0); } else if (mpegversion == 2 || mpegversion == 4) { parser = gst_element_factory_make("aacparse", 0); } else { ASSERT_NOT_REACHED(); } gst_bin_add(GST_BIN(stream->parser), parser); if (!pad) pad = gst_element_get_static_pad(parser, "sink"); gst_element_add_pad(stream->parser, gst_ghost_pad_new("sink", pad)); gst_object_unref(pad); pad = gst_element_get_static_pad(parser, "src"); gst_element_add_pad(stream->parser, gst_ghost_pad_new("src", pad)); gst_object_unref(pad); } else { GST_ERROR_OBJECT(stream->parent, "Unsupported caps: %" GST_PTR_FORMAT, caps); gst_object_unref(GST_OBJECT(stream->parser)); return; } //gst_caps_unref(appsrccaps); GST_OBJECT_LOCK(webKitMediaSrc); stream->type = Unknown; GST_OBJECT_UNLOCK(webKitMediaSrc); ASSERT(stream->parser); gst_bin_add(GST_BIN(stream->parent), stream->parser); gst_element_sync_state_with_parent(stream->parser); GstPad* sinkpad = gst_element_get_static_pad(stream->parser, "sink"); GstPad* srcpad = gst_element_get_static_pad(stream->appsrc, "src"); gst_pad_link(srcpad, sinkpad); gst_object_unref(srcpad); srcpad = 0; gst_object_unref(sinkpad); sinkpad = 0; srcpad = gst_element_get_static_pad(stream->parser, "src"); // TODO: Is padId the best way to identify the Stream? What about trackId? g_object_set_data(G_OBJECT(srcpad), "id", GINT_TO_POINTER(padId)); if (!capsNotifyHandlerConnected) g_signal_connect(srcpad, "notify::caps", G_CALLBACK(webKitMediaSrcParserNotifyCaps), stream); webKitMediaSrcLinkStreamToSrcPad(srcpad, stream); ASSERT(stream->parent->priv->mediaPlayerPrivate); int signal = -1; if (g_str_has_prefix(mediaType, "audio")) { GST_OBJECT_LOCK(webKitMediaSrc); stream->type = Audio; stream->parent->priv->nAudio++; GST_OBJECT_UNLOCK(webKitMediaSrc); signal = SIGNAL_AUDIO_CHANGED; stream->audioTrack = RefPtr<WebCore::AudioTrackPrivateGStreamer>(static_cast<WebCore::AudioTrackPrivateGStreamer*>(trackPrivate.get())); } else if (g_str_has_prefix(mediaType, "video")) { GST_OBJECT_LOCK(webKitMediaSrc); stream->type = Video; stream->parent->priv->nVideo++; GST_OBJECT_UNLOCK(webKitMediaSrc); signal = SIGNAL_VIDEO_CHANGED; stream->videoTrack = RefPtr<WebCore::VideoTrackPrivateGStreamer>(static_cast<WebCore::VideoTrackPrivateGStreamer*>(trackPrivate.get())); } else if (g_str_has_prefix(mediaType, "text")) { GST_OBJECT_LOCK(webKitMediaSrc); stream->type = Text; stream->parent->priv->nText++; GST_OBJECT_UNLOCK(webKitMediaSrc); signal = SIGNAL_TEXT_CHANGED; // TODO: Support text tracks. } if (signal != -1) g_signal_emit(G_OBJECT(stream->parent), webkit_media_src_signals[signal], 0, NULL); gst_object_unref(srcpad); srcpad = 0; }
GST_END_TEST GST_START_TEST (connect_chain_of_elements) { gchar *padname = NULL; KmsConnectData *data1, *data2; gchar *filter_factory; GstBus *bus; loop = g_main_loop_new (NULL, TRUE); pipeline = gst_pipeline_new (__FUNCTION__); bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline)); gst_bus_add_signal_watch (bus); g_signal_connect (bus, "message", G_CALLBACK (bus_msg), pipeline); data1 = kms_connect_data_create (0); data2 = kms_connect_data_create (MAX_CHECKS); data2->data_probe = (KmsProbeType) data_probe_cb; data2->audio_probe = (KmsProbeType) audio_probe_cb; data2->video_probe = (KmsProbeType) video_probe_cb; gst_element_set_state (pipeline, GST_STATE_PLAYING); data1->src = gst_element_factory_make ("dummysrc", NULL); data1->sink = gst_element_factory_make ("filterelement", NULL); data2->src = data1->sink; data2->sink = gst_element_factory_make ("dummysink", NULL); g_signal_connect (data1->src, "pad-added", G_CALLBACK (src_pads_added), data1); g_signal_connect (data1->sink, "pad-added", G_CALLBACK (sink_pads_added), data1); g_signal_connect (data2->src, "pad-added", G_CALLBACK (src_pads_added), data2); g_signal_connect (data2->sink, "pad-added", G_CALLBACK (sink_pads_added), data2); g_signal_connect (data2->sink, "pad-removed", G_CALLBACK (sink_pads_removed), data2); gst_bin_add_many (GST_BIN (pipeline), data1->src, data1->sink, data2->sink, NULL); /*******************************/ /* Connect dummysrc to filter */ /*******************************/ /* request src pad using action */ g_signal_emit_by_name (data1->src, "request-new-pad", KMS_ELEMENT_PAD_TYPE_VIDEO, NULL, GST_PAD_SRC, &data1->video_src); fail_if (data1->video_src == NULL); g_signal_emit_by_name (data1->src, "request-new-pad", KMS_ELEMENT_PAD_TYPE_AUDIO, NULL, GST_PAD_SRC, &data1->audio_src); fail_if (data1->audio_src == NULL); g_signal_emit_by_name (data1->src, "request-new-pad", KMS_ELEMENT_PAD_TYPE_DATA, NULL, GST_PAD_SRC, &data1->data_src); fail_if (data1->data_src == NULL); GST_DEBUG ("Video pad name %s", data1->video_src); GST_DEBUG ("Audio pad name %s", data1->audio_src); GST_DEBUG ("Data pad name %s", data1->data_src); filter_factory = "videoflip"; GST_DEBUG ("Setting property uri to : %s", filter_factory); g_object_set (G_OBJECT (data1->sink), "filter_factory", filter_factory, NULL); g_object_set (G_OBJECT (data1->src), "video", TRUE, "audio", TRUE, "data", TRUE, NULL); /*******************************/ /* Connect filter to dummysink */ /*******************************/ /* request src pad using action */ g_signal_emit_by_name (data2->src, "request-new-pad", KMS_ELEMENT_PAD_TYPE_VIDEO, NULL, GST_PAD_SRC, &data2->video_src); fail_if (data2->video_src == NULL); g_signal_emit_by_name (data2->src, "request-new-pad", KMS_ELEMENT_PAD_TYPE_AUDIO, NULL, GST_PAD_SRC, &data2->audio_src); fail_if (data2->audio_src == NULL); g_signal_emit_by_name (data2->src, "request-new-pad", KMS_ELEMENT_PAD_TYPE_DATA, NULL, GST_PAD_SRC, &data2->data_src); fail_if (data2->data_src == NULL); GST_DEBUG ("Video pad name %s", data2->video_src); GST_DEBUG ("Audio pad name %s", data2->audio_src); GST_DEBUG ("Data pad name %s", data2->data_src); g_object_set (G_OBJECT (data2->sink), "video", TRUE, "audio", TRUE, "data", TRUE, NULL); g_timeout_add_seconds (4, print_timedout_pipeline, NULL); gst_element_sync_state_with_parent (data1->src); gst_element_sync_state_with_parent (data1->sink); gst_element_sync_state_with_parent (data2->sink); g_main_loop_run (loop); gst_element_set_state (pipeline, GST_STATE_NULL); gst_bus_remove_signal_watch (bus); g_object_unref (bus); g_object_unref (pipeline); g_free (padname); g_main_loop_unref (loop); kms_connect_data_destroy (data1); kms_connect_data_destroy (data2); }
static GstPadProbeReturn set_appsrc_caps (GstPad * pad, GstPadProbeInfo * info, gpointer httpep) { KmsHttpPostEndpoint *self = KMS_HTTP_POST_ENDPOINT (httpep); GstEvent *event = GST_PAD_PROBE_INFO_EVENT (info); GstCaps *audio_caps = NULL, *video_caps = NULL; GstElement *appsrc, *appsink, *agnosticbin; GstCaps *caps; gpointer data; if (GST_EVENT_TYPE (event) != GST_EVENT_CAPS) { return GST_PAD_PROBE_OK; } gst_event_parse_caps (event, &caps); if (caps == NULL) { GST_ERROR_OBJECT (pad, "Invalid caps received"); return GST_PAD_PROBE_OK; } GST_TRACE ("caps are %" GST_PTR_FORMAT, caps); data = g_object_get_qdata (G_OBJECT (pad), appsrc_data_quark ()); if (data != NULL) { goto end; } /* Get the proper agnosticbin */ audio_caps = gst_caps_from_string (KMS_AGNOSTIC_AUDIO_CAPS); video_caps = gst_caps_from_string (KMS_AGNOSTIC_VIDEO_CAPS); if (gst_caps_can_intersect (audio_caps, caps)) agnosticbin = kms_element_get_audio_agnosticbin (KMS_ELEMENT (self)); else if (gst_caps_can_intersect (video_caps, caps)) agnosticbin = kms_element_get_video_agnosticbin (KMS_ELEMENT (self)); else { GST_ELEMENT_WARNING (self, CORE, CAPS, ("Unsupported media received: %" GST_PTR_FORMAT, caps), ("Unsupported media received: %" GST_PTR_FORMAT, caps)); goto end; } /* Create appsrc element and link to agnosticbin */ appsrc = gst_element_factory_make ("appsrc", NULL); g_object_set (G_OBJECT (appsrc), "is-live", TRUE, "do-timestamp", FALSE, "min-latency", G_GUINT64_CONSTANT (0), "max-latency", G_GUINT64_CONSTANT (0), "format", GST_FORMAT_TIME, "caps", caps, NULL); gst_bin_add (GST_BIN (self), appsrc); if (!gst_element_link (appsrc, agnosticbin)) { GST_ERROR ("Could not link %s to element %s", GST_ELEMENT_NAME (appsrc), GST_ELEMENT_NAME (agnosticbin)); } /* Connect new-sample signal to callback */ appsink = gst_pad_get_parent_element (pad); g_signal_connect (appsink, "new-sample", G_CALLBACK (new_sample_post_handler), appsrc); g_object_unref (appsink); g_object_set_qdata (G_OBJECT (pad), appsrc_data_quark (), appsrc); gst_element_sync_state_with_parent (appsrc); end: if (audio_caps != NULL) gst_caps_unref (audio_caps); if (video_caps != NULL) gst_caps_unref (video_caps); return GST_PAD_PROBE_OK; }
/* * The following chain is created after the tee for each output from the * source: * * +-----------+ +-------------------------------+ +----------+ * | inter*src +---+ converters/queues/capsfilters +---+ ghostpad | * +-----------+ +-------------------------------+ +----------+ * */ static GstElement *owr_media_source_request_source_default(OwrMediaSource *media_source, GstCaps *caps) { OwrMediaType media_type; GstElement *source_pipeline, *tee; GstElement *source_bin, *source = NULL, *queue_pre, *queue_post; GstElement *capsfilter; GstElement *sink, *sink_queue, *sink_bin; GstPad *bin_pad = NULL, *srcpad, *sinkpad; gchar *bin_name; guint source_id; gchar *channel_name; g_return_val_if_fail(media_source->priv->source_bin, NULL); g_return_val_if_fail(media_source->priv->source_tee, NULL); source_pipeline = gst_object_ref(media_source->priv->source_bin); tee = gst_object_ref(media_source->priv->source_tee); source_id = g_atomic_int_add(&unique_bin_id, 1); bin_name = g_strdup_printf("source-bin-%u", source_id); source_bin = gst_bin_new(bin_name); g_free(bin_name); CREATE_ELEMENT_WITH_ID(queue_pre, "queue", "source-queue", source_id); CREATE_ELEMENT_WITH_ID(capsfilter, "capsfilter", "source-output-capsfilter", source_id); CREATE_ELEMENT_WITH_ID(queue_post, "queue", "source-output-queue", source_id); CREATE_ELEMENT_WITH_ID(sink_queue, "queue", "sink-queue", source_id); g_object_get(media_source, "media-type", &media_type, NULL); switch (media_type) { case OWR_MEDIA_TYPE_AUDIO: { GstElement *audioresample, *audioconvert; CREATE_ELEMENT_WITH_ID(source, "interaudiosrc", "source", source_id); CREATE_ELEMENT_WITH_ID(sink, "interaudiosink", "sink", source_id); g_object_set(capsfilter, "caps", caps, NULL); CREATE_ELEMENT_WITH_ID(audioresample, "audioresample", "source-audio-resample", source_id); CREATE_ELEMENT_WITH_ID(audioconvert, "audioconvert", "source-audio-convert", source_id); gst_bin_add_many(GST_BIN(source_bin), queue_pre, audioconvert, audioresample, capsfilter, queue_post, NULL); LINK_ELEMENTS(capsfilter, queue_post); LINK_ELEMENTS(audioresample, capsfilter); LINK_ELEMENTS(audioconvert, audioresample); LINK_ELEMENTS(queue_pre, audioconvert); break; } case OWR_MEDIA_TYPE_VIDEO: { GstElement *videoscale, *videoconvert; CREATE_ELEMENT_WITH_ID(source, "intervideosrc", "source", source_id); CREATE_ELEMENT_WITH_ID(sink, "intervideosink", "sink", source_id); srcpad = gst_element_get_static_pad(source, "src"); gst_pad_add_probe(srcpad, GST_PAD_PROBE_TYPE_BUFFER, drop_gap_buffers, NULL, NULL); gst_object_unref(srcpad); g_object_set(capsfilter, "caps", caps, NULL); CREATE_ELEMENT_WITH_ID(videoconvert, VIDEO_CONVERT, "source-video-convert", source_id); CREATE_ELEMENT_WITH_ID(videoscale, "videoscale", "source-video-scale", source_id); gst_bin_add_many(GST_BIN(source_bin), queue_pre, videoscale, videoconvert, capsfilter, queue_post, NULL); LINK_ELEMENTS(capsfilter, queue_post); LINK_ELEMENTS(videoconvert, capsfilter); LINK_ELEMENTS(videoscale, videoconvert); LINK_ELEMENTS(queue_pre, videoscale); break; } case OWR_MEDIA_TYPE_UNKNOWN: default: g_assert_not_reached(); goto done; } channel_name = g_strdup_printf("source-%u", source_id); g_object_set(source, "channel", channel_name, NULL); g_object_set(sink, "channel", channel_name, NULL); g_free(channel_name); /* Add and link the inter*sink to the actual source pipeline */ bin_name = g_strdup_printf("source-sink-bin-%u", source_id); sink_bin = gst_bin_new(bin_name); g_free(bin_name); gst_bin_add_many(GST_BIN(sink_bin), sink, sink_queue, NULL); gst_element_sync_state_with_parent(sink); gst_element_sync_state_with_parent(sink_queue); LINK_ELEMENTS(sink_queue, sink); sinkpad = gst_element_get_static_pad(sink_queue, "sink"); bin_pad = gst_ghost_pad_new("sink", sinkpad); gst_object_unref(sinkpad); gst_pad_set_active(bin_pad, TRUE); gst_element_add_pad(sink_bin, bin_pad); bin_pad = NULL; gst_bin_add(GST_BIN(source_pipeline), sink_bin); gst_element_sync_state_with_parent(sink_bin); LINK_ELEMENTS(tee, sink_bin); /* Start up our new bin and link it all */ srcpad = gst_element_get_static_pad(queue_post, "src"); g_assert(srcpad); bin_pad = gst_ghost_pad_new("src", srcpad); gst_object_unref(srcpad); gst_pad_set_active(bin_pad, TRUE); gst_element_add_pad(source_bin, bin_pad); gst_bin_add(GST_BIN(source_bin), source); LINK_ELEMENTS(source, queue_pre); done: gst_object_unref(source_pipeline); gst_object_unref(tee); return source_bin; }
static GstPadProbeReturn pad_block_cb (GstPad *srcPad, GstPadProbeInfo *info, gpointer user_data) { BansheePlayer* player; player = (BansheePlayer*) user_data; g_return_val_if_fail (IS_BANSHEE_PLAYER (player), GST_PAD_PROBE_OK); // The pad_block_cb can get triggered multiple times, on different threads. // Lock around the link/unlink code, so we don't end up going through here // with inconsistent state. g_mutex_lock (player->replaygain_mutex); if ((player->replaygain_enabled && player->rgvolume_in_pipeline) || (!player->replaygain_enabled && !player->rgvolume_in_pipeline)) { // The pipeline is already in the correct state. Unblock the pad, and return. player->rg_pad_block_id = 0; g_mutex_unlock (player->replaygain_mutex); return GST_PAD_PROBE_REMOVE; } if (player->rgvolume_in_pipeline) { gst_element_unlink (player->before_rgvolume, player->rgvolume); gst_element_unlink (player->rgvolume, player->after_rgvolume); } else { gst_element_unlink (player->before_rgvolume, player->after_rgvolume); } if (player->replaygain_enabled) { player->rgvolume = _bp_rgvolume_new (player); if (!GST_IS_ELEMENT (player->rgvolume)) { player->replaygain_enabled = FALSE; } } else { gst_element_set_state (player->rgvolume, GST_STATE_NULL); gst_bin_remove (GST_BIN (player->audiobin), player->rgvolume); } if (player->replaygain_enabled && GST_IS_ELEMENT (player->rgvolume)) { g_signal_connect (player->rgvolume, "notify::target-gain", G_CALLBACK (on_target_gain_changed), player); gst_bin_add (GST_BIN (player->audiobin), player->rgvolume); gst_element_sync_state_with_parent (player->rgvolume); // link in rgvolume and connect to the real audio sink gst_element_link (player->before_rgvolume, player->rgvolume); gst_element_link (player->rgvolume, player->after_rgvolume); player->rgvolume_in_pipeline = TRUE; } else { // link the queue with the real audio sink gst_element_link (player->before_rgvolume, player->after_rgvolume); player->rgvolume_in_pipeline = FALSE; } // Our state is now consistent player->rg_pad_block_id = 0; g_mutex_unlock (player->replaygain_mutex); _bp_rgvolume_print_volume (player); return GST_PAD_PROBE_REMOVE; }
static void gst_wrapper_camera_bin_reset_video_src_caps (GstWrapperCameraBinSrc * self, GstCaps * caps) { GstClock *clock; gint64 base_time; GST_DEBUG_OBJECT (self, "Resetting src caps to %" GST_PTR_FORMAT, caps); if (self->src_vid_src) { GstCaps *old_caps; g_object_get (G_OBJECT (self->src_filter), "caps", &old_caps, NULL); if (gst_caps_is_equal (caps, old_caps)) { GST_DEBUG_OBJECT (self, "old and new caps are same, do not reset it"); if (old_caps) gst_caps_unref (old_caps); return; } if (old_caps) gst_caps_unref (old_caps); clock = gst_element_get_clock (self->src_vid_src); base_time = gst_element_get_base_time (self->src_vid_src); /* Ideally, we should only need to get the source to READY here, * but it seems v4l2src isn't happy with this. Putting to NULL makes * it work. * * TODO fix this in v4l2src */ gst_element_set_state (self->src_vid_src, GST_STATE_NULL); set_capsfilter_caps (self, caps); self->drop_newseg = TRUE; GST_DEBUG_OBJECT (self, "Bringing source up"); if (!gst_element_sync_state_with_parent (self->src_vid_src)) { GST_WARNING_OBJECT (self, "Failed to reset source caps"); gst_element_set_state (self->src_vid_src, GST_STATE_NULL); } if (clock) { gst_element_set_clock (self->src_vid_src, clock); gst_element_set_base_time (self->src_vid_src, base_time); if (GST_IS_BIN (self->src_vid_src)) { GstIterator *it = gst_bin_iterate_elements (GST_BIN (self->src_vid_src)); gpointer item = NULL; gboolean done = FALSE; while (!done) { switch (gst_iterator_next (it, &item)) { case GST_ITERATOR_OK: gst_element_set_base_time (GST_ELEMENT (item), base_time); gst_object_unref (item); break; case GST_ITERATOR_RESYNC: gst_iterator_resync (it); break; case GST_ITERATOR_ERROR: done = TRUE; break; case GST_ITERATOR_DONE: done = TRUE; break; } } gst_iterator_free (it); } gst_object_unref (clock); } } }
static void link_new_pad ( GstElement *source, GstPad *pad, char *queue_name, char *depayloader_name, char *decoder_name, char *overlay_name, char *selector_name, int channel, char media_type) { GstElement *selector, *queue, *depayloader, *decoder, *overlay; gchar *name; gchar *media; GstPad *sinkpad; GstPad *srcpad, *tosrc; GstPad *decsrc, *qsink; int returnCode; if (media_type == 'v') { media = g_strdup_printf ("video"); } else { media = g_strdup_printf ("audio"); } selector = gst_bin_get_by_name (GST_BIN (pipeline), selector_name); if (!selector) { g_print("Unable to get selector element %s.\n", selector_name); } queue = gst_bin_get_by_name (GST_BIN (pipeline), queue_name); if (!queue) { g_print("Unable to get selector queue %s.\n", queue_name); } depayloader = gst_bin_get_by_name (GST_BIN (pipeline), depayloader_name); if (!depayloader) { g_print("Unable to get depayloader%s.\n", depayloader_name); } decoder = gst_bin_get_by_name (GST_BIN (pipeline), decoder_name); if (!decoder) { g_print("Unable to get decoder%s.\n", decoder_name); } overlay = gst_bin_get_by_name (GST_BIN (pipeline), overlay_name); // Don't check for error, overlay not present on Audio channels, so this can fail /* get all the pads */ name = g_strdup_printf ("sink%d", channel); sinkpad = gst_element_get_request_pad (selector, name); if (!sinkpad) { g_print("Unable to create pad %s on %s selector.\n", name, media); } qsink = gst_element_get_static_pad (queue, "sink"); if (!qsink) { g_print("Unable to get sink pad on %s queue %d.\n", media, channel); } /* Now, link it all up */ if (returnCode = gst_pad_link (pad, qsink)) { g_print("Link of %s source%d pad to queue failed with Code %d\n", media, channel, returnCode); } if (gst_element_link_many (queue, depayloader, decoder, NULL) != TRUE) { g_print("Unable to link %s %s and %s.\n", queue_name, depayloader_name, decoder_name); } if (overlay) { if (gst_element_link (decoder, overlay) != TRUE) { g_print("Unable to link %s and %s.\n", decoder_name, overlay_name); } tosrc = gst_element_get_static_pad (overlay, "src"); if (!tosrc) { g_print("Unable to get %s overlay src pad on channel %d.\n", media, channel); } if (returnCode = gst_pad_link (tosrc, sinkpad)) { g_print("Link of %s timeoverlay src %d pad to selector failed with Code %d\n", media, channel, returnCode); } } else { decsrc = gst_element_get_static_pad (decoder, "src"); if (!decsrc) { g_print("Unable to get %s decoder src pad on channel %d.\n", media, channel); } if (returnCode = gst_pad_link (decsrc, sinkpad)) { g_print("Link of %s decoder src %d pad to selector failed with Code %d\n", media, channel, returnCode); } } // g_print("Linked %s source %d to %s %s and %s selector pad %s.\n", // media, channel, depayloader_name, decoder_name, queue_name, name); gst_element_sync_state_with_parent(source); }
bool GStreamerGWorld::enterFullscreen() { if (m_dynamicPadName) return false; if (!m_videoWindow) m_videoWindow = PlatformVideoWindow::createWindow(); GstElement* platformVideoSink = gst_element_factory_make("autovideosink", "platformVideoSink"); GstElement* colorspace = gst_element_factory_make("ffmpegcolorspace", "colorspace"); GstElement* queue = gst_element_factory_make("queue", "queue"); GstElement* videoScale = gst_element_factory_make("videoscale", "videoScale"); // Get video sink bin and the tee inside. GOwnPtr<GstElement> videoSink; g_object_get(m_pipeline, "video-sink", &videoSink.outPtr(), NULL); GstElement* tee = gst_bin_get_by_name(GST_BIN(videoSink.get()), "videoTee"); // Add and link a queue, ffmpegcolorspace and sink in the bin. gst_bin_add_many(GST_BIN(videoSink.get()), platformVideoSink, videoScale, colorspace, queue, NULL); gst_element_link_many(queue, colorspace, videoScale, platformVideoSink, NULL); // Link a new src pad from tee to queue. GstPad* srcPad = gst_element_get_request_pad(tee, "src%d"); GstPad* sinkPad = gst_element_get_static_pad(queue, "sink"); gst_pad_link(srcPad, sinkPad); gst_object_unref(GST_OBJECT(sinkPad)); m_dynamicPadName = gst_pad_get_name(srcPad); // Roll new elements to pipeline state. gst_element_sync_state_with_parent(queue); gst_element_sync_state_with_parent(colorspace); gst_element_sync_state_with_parent(videoScale); gst_element_sync_state_with_parent(platformVideoSink); gst_object_unref(tee); // Query the current media segment informations and send them towards // the new tee branch downstream. GstQuery* query = gst_query_new_segment(GST_FORMAT_TIME); gboolean queryResult = gst_element_query(m_pipeline, query); #if GST_CHECK_VERSION(0, 10, 30) if (!queryResult) { gst_query_unref(query); gst_object_unref(GST_OBJECT(srcPad)); return true; } #else // GStreamer < 0.10.30 doesn't set the query result correctly, so // just ignore it to avoid a compilation warning. // See https://bugzilla.gnome.org/show_bug.cgi?id=620490. (void) queryResult; #endif GstFormat format; gint64 position; if (!gst_element_query_position(m_pipeline, &format, &position)) position = 0; gdouble rate; gint64 startValue, stopValue; gst_query_parse_segment(query, &rate, &format, &startValue, &stopValue); GstEvent* event = gst_event_new_new_segment(FALSE, rate, format, startValue, stopValue, position); gst_pad_push_event(srcPad, event); gst_query_unref(query); gst_object_unref(GST_OBJECT(srcPad)); return true; }
static FsRtpSpecialSource * fs_rtp_special_source_new (FsRtpSpecialSourceClass *klass, GList **negotiated_codec_associations, GMutex *mutex, FsCodec *selected_codec, GstElement *bin, GstElement *rtpmuxer) { FsRtpSpecialSource *source = NULL; GstPad *pad = NULL; g_return_val_if_fail (klass, NULL); g_return_val_if_fail (klass->build, NULL); g_return_val_if_fail (GST_IS_BIN (bin), NULL); g_return_val_if_fail (GST_IS_ELEMENT (rtpmuxer), NULL); source = g_object_new (G_OBJECT_CLASS_TYPE (klass), NULL); g_return_val_if_fail (source, NULL); g_mutex_lock (mutex); source->priv->rtpmuxer = gst_object_ref (rtpmuxer); source->priv->outer_bin = gst_object_ref (bin); source->priv->src = klass->build (source, *negotiated_codec_associations, selected_codec); g_mutex_unlock (mutex); if (!source->priv->src) goto error; if (!gst_bin_add (GST_BIN (source->priv->outer_bin), source->priv->src)) { GST_ERROR ("Could not add bin to outer bin"); gst_object_unref (source->priv->src); source->priv->src = NULL; goto error; } source->priv->muxer_request_pad = gst_element_get_request_pad (rtpmuxer, "priority_sink_%d"); if (!source->priv->muxer_request_pad) source->priv->muxer_request_pad = gst_element_get_request_pad (rtpmuxer, "sink_%d"); if (!source->priv->muxer_request_pad) { GST_ERROR ("Could not get request pad from muxer"); goto error_added; } pad = gst_element_get_static_pad (source->priv->src, "src"); if (GST_PAD_LINK_FAILED (gst_pad_link (pad, source->priv->muxer_request_pad))) { GST_ERROR ("Could not link rtpdtmfsrc src to muxer sink"); gst_object_unref (pad); goto error_added; } gst_object_unref (pad); if (!gst_element_sync_state_with_parent (source->priv->src)) { GST_ERROR ("Could not sync capsfilter state with its parent"); goto error_added; } return source; error_added: gst_element_set_state (source->priv->src, GST_STATE_NULL); gst_bin_remove (GST_BIN (source->priv->outer_bin), source->priv->src); source->priv->src = NULL; error: g_object_unref (source); return NULL; }
bool MediaSinkImpl::linkPad (std::shared_ptr<MediaSourceImpl> mediaSrc, GstPad *src) { RecMutex::Lock lock (mutex); std::shared_ptr<MediaSourceImpl> connectedSrcLocked; GstPad *sink; bool ret = false; try { connectedSrcLocked = connectedSrc.lock(); } catch (const std::bad_weak_ptr &e) { } if ( (sink = gst_element_get_static_pad (getGstreamerElement(), getPadName().c_str() ) ) == NULL) { sink = gst_element_get_request_pad (getGstreamerElement(), getPadName().c_str() ); } if (gst_pad_is_linked (sink) ) { unlink (connectedSrcLocked, sink); } if (std::dynamic_pointer_cast<MediaObjectImpl> (mediaSrc)->getParent() == getParent() ) { GstBin *container; GstElement *filter, *parent; GstPad *aux_sink, *aux_src; GST_DEBUG ("Connecting loopback, adding a capsfilter to allow connection"); parent = GST_ELEMENT (GST_OBJECT_PARENT (sink) ); if (parent == NULL) { goto end; } container = GST_BIN (GST_OBJECT_PARENT (parent) ); if (container == NULL) { goto end; } filter = gst_element_factory_make ("capsfilter", NULL); aux_sink = gst_element_get_static_pad (filter, "sink"); aux_src = gst_element_get_static_pad (filter, "src"); g_signal_connect (G_OBJECT (aux_sink), "unlinked", G_CALLBACK (sink_unlinked), filter ); g_signal_connect (G_OBJECT (aux_src), "unlinked", G_CALLBACK (src_unlinked), filter ); gst_bin_add (container, filter); gst_element_sync_state_with_parent (filter); if (gst_pad_link_full (aux_src, sink, GST_PAD_LINK_CHECK_NOTHING) == GST_PAD_LINK_OK) { if (gst_pad_link_full (src, aux_sink, GST_PAD_LINK_CHECK_NOTHING) == GST_PAD_LINK_OK) { ret = true; } else { gst_pad_unlink (aux_src, sink); } } g_object_unref (aux_sink); g_object_unref (aux_src); gst_debug_bin_to_dot_file_with_ts (GST_BIN (container), GST_DEBUG_GRAPH_SHOW_ALL, "loopback"); } else { if (gst_pad_link_full (src, sink, GST_PAD_LINK_CHECK_NOTHING) == GST_PAD_LINK_OK) { ret = true; } } if (ret == true) { connectedSrc = std::weak_ptr<MediaSourceImpl> (mediaSrc); } else { gst_element_release_request_pad (getGstreamerElement(), sink); } end: g_object_unref (sink); return ret; }
static gboolean skype_audio_stream_post_create_stream (SkypeBaseStream *self, gpointer stream) { SkypeAudioStreamPrivate *priv = SKYPE_AUDIO_STREAM (self)->priv; FsuConference *fsuconf; FsConference *fsconf; FsuFilterManager *manager; FsuFilter *filter; FsuSink *sink; GstElement *parent; GstElement *src; /* Setup dynamic properties */ g_object_get (G_OBJECT (stream), "fsu-conference", &fsuconf, NULL); if (fsuconf == NULL) { g_warning ("Error fetching FsuConference"); return FALSE; } g_object_get (G_OBJECT (fsuconf), "fs-conference", &fsconf, NULL); g_object_unref (fsuconf); if (fsconf == NULL) { g_warning ("Error fetching FsConference"); return FALSE; } fs_element_added_notifier_add (priv->notifier, GST_BIN (fsconf)); g_object_unref (fsconf); /* Setup volume filter */ g_object_get (G_OBJECT (stream), "filter-manager", &manager, NULL); if (manager == NULL) { g_warning ("Error fetching FsuFilterManager"); return FALSE; } filter = FSU_FILTER (fsu_volume_filter_new ()); g_object_set (filter, "volume", priv->volume, NULL); priv->volume_id = fsu_filter_manager_prepend_filter (manager, filter); g_object_unref (manager); g_object_unref (filter); if (priv->volume_id == NULL) { g_warning ("Error adding resolution filter to filter manager."); return FALSE; } /* Setup DTMF source */ g_object_get (stream, "sink", &sink, NULL); if (sink == NULL) { g_warning ("Error fetching sink"); return FALSE; } src = gst_element_factory_make ("dtmfsrc", NULL); if (src == NULL) { g_warning ("Error creating dtmfsrc"); gst_object_unref (sink); return FALSE; } parent = GST_ELEMENT (gst_element_get_parent (sink)); if (parent == NULL) { g_warning ("Error fetching FsuSink parent"); gst_object_unref (src); gst_object_unref (sink); return FALSE; } if (!gst_bin_add (GST_BIN (parent), gst_object_ref (src))) { g_warning ("Error adding dtmfsrc to FsuSink parent"); /* Unref src twice if it fails as it's reffed in the call */ gst_object_unref (src); gst_object_unref (src); gst_object_unref (sink); gst_object_unref (parent); return FALSE; } gst_object_unref (parent); if (!gst_element_link (src, GST_ELEMENT (sink))) { g_warning ("Error adding dtmfsrc to FsuSink parent"); gst_object_unref (src); gst_object_unref (sink); return FALSE; } gst_object_unref (sink); if (!gst_element_sync_state_with_parent (src)) { g_warning ("Error syncing dtmfsrc state with parent"); gst_object_unref (src); return FALSE; } priv->dtmfsrc = src; priv->stream = g_object_ref (stream); return TRUE; }
static void uridecodebin_pad_added_cb (GstElement * uridecodebin, GstPad * pad, GstDiscoverer * dc) { PrivateStream *ps; GstPad *sinkpad = NULL; GstCaps *caps; static GstCaps *subs_caps = NULL; if (!subs_caps) { subs_caps = gst_caps_from_string ("text/plain; text/x-pango-markup; " "subpicture/x-pgs; subpicture/x-dvb; application/x-subtitle-unknown; " "application/x-ssa; application/x-ass; subtitle/x-kate; " "video/x-dvd-subpicture; "); } GST_DEBUG_OBJECT (dc, "pad %s:%s", GST_DEBUG_PAD_NAME (pad)); ps = g_slice_new0 (PrivateStream); ps->dc = dc; ps->pad = pad; ps->queue = gst_element_factory_make ("queue", NULL); ps->sink = gst_element_factory_make ("fakesink", NULL); if (G_UNLIKELY (ps->queue == NULL || ps->sink == NULL)) goto error; g_object_set (ps->sink, "silent", TRUE, NULL); g_object_set (ps->queue, "max-size-buffers", 1, "silent", TRUE, NULL); caps = gst_pad_get_caps_reffed (pad); if (gst_caps_can_intersect (caps, subs_caps)) { /* Subtitle streams are sparse and don't provide any information - don't * wait for data to preroll */ g_object_set (ps->sink, "async", FALSE, NULL); } gst_caps_unref (caps); gst_bin_add_many (dc->priv->pipeline, ps->queue, ps->sink, NULL); if (!gst_element_link_pads_full (ps->queue, "src", ps->sink, "sink", GST_PAD_LINK_CHECK_NOTHING)) goto error; if (!gst_element_sync_state_with_parent (ps->sink)) goto error; if (!gst_element_sync_state_with_parent (ps->queue)) goto error; sinkpad = gst_element_get_static_pad (ps->queue, "sink"); if (sinkpad == NULL) goto error; if (gst_pad_link_full (pad, sinkpad, GST_PAD_LINK_CHECK_NOTHING) != GST_PAD_LINK_OK) goto error; gst_object_unref (sinkpad); /* Add an event probe */ gst_pad_add_event_probe (pad, G_CALLBACK (_event_probe), ps); DISCO_LOCK (dc); dc->priv->streams = g_list_append (dc->priv->streams, ps); DISCO_UNLOCK (dc); GST_DEBUG_OBJECT (dc, "Done handling pad"); return; error: GST_ERROR_OBJECT (dc, "Error while handling pad"); if (sinkpad) gst_object_unref (sinkpad); if (ps->queue) gst_object_unref (ps->queue); if (ps->sink) gst_object_unref (ps->sink); g_slice_free (PrivateStream, ps); return; }
static void gst_nle_source_pad_added_cb (GstElement * element, GstPad * pad, GstNleSource * nlesrc) { GstCaps *caps; const GstStructure *s; const gchar *mime; GstElement *appsink = NULL; GstPad *sink_pad; GstAppSinkCallbacks appsink_cbs; GstNleSrcItem *item; item = (GstNleSrcItem *) g_list_nth_data (nlesrc->queue, nlesrc->index); caps = gst_pad_get_caps_reffed (pad); s = gst_caps_get_structure (caps, 0); mime = gst_structure_get_name (s); GST_DEBUG_OBJECT (nlesrc, "Found mime type: %s", mime); if (g_strrstr (mime, "video") && !nlesrc->video_linked) { appsink = gst_element_factory_make ("appsink", NULL); memset (&appsink_cbs, 0, sizeof (appsink_cbs)); appsink_cbs.eos = gst_nle_source_on_video_eos; appsink_cbs.new_preroll = gst_nle_source_on_preroll_buffer; appsink_cbs.new_buffer = gst_nle_source_on_video_buffer; nlesrc->video_linked = TRUE; if (!nlesrc->video_srcpad_added) { gst_pad_set_active (nlesrc->video_srcpad, TRUE); gst_element_add_pad (GST_ELEMENT (nlesrc), gst_object_ref (nlesrc->video_srcpad)); nlesrc->video_srcpad_added = TRUE; } gst_pad_add_event_probe (GST_BASE_SINK_PAD (GST_BASE_SINK (appsink)), (GCallback) gst_nle_source_video_pad_probe_cb, nlesrc); nlesrc->video_eos = FALSE; } else if (g_strrstr (mime, "audio") && nlesrc->with_audio && !nlesrc->audio_linked && (item ? item->rate == 1.0 : TRUE)) { appsink = gst_element_factory_make ("appsink", NULL); memset (&appsink_cbs, 0, sizeof (appsink_cbs)); appsink_cbs.eos = gst_nle_source_on_audio_eos; appsink_cbs.new_preroll = gst_nle_source_on_preroll_buffer; appsink_cbs.new_buffer = gst_nle_source_on_audio_buffer; nlesrc->audio_linked = TRUE; if (!nlesrc->audio_srcpad_added) { gst_pad_set_active (nlesrc->audio_srcpad, TRUE); gst_element_add_pad (GST_ELEMENT (nlesrc), gst_object_ref (nlesrc->audio_srcpad)); nlesrc->audio_srcpad_added = TRUE; } gst_pad_add_event_probe (GST_BASE_SINK_PAD (GST_BASE_SINK (appsink)), (GCallback) gst_nle_source_audio_pad_probe_cb, nlesrc); nlesrc->audio_eos = FALSE; } if (appsink != NULL) { g_object_set (appsink, "sync", FALSE, NULL); gst_app_sink_set_callbacks (GST_APP_SINK (appsink), &appsink_cbs, nlesrc, NULL); gst_bin_add (GST_BIN (nlesrc->decoder), appsink); sink_pad = gst_element_get_static_pad (appsink, "sink"); gst_pad_link (pad, sink_pad); gst_element_sync_state_with_parent (appsink); gst_object_unref (sink_pad); } }
static void gst_insert_bin_do_change (GstInsertBin * self, GstPad * pad) { struct ChangeData *data; GST_OBJECT_LOCK (self); if (!is_right_direction_for_block (pad)) { GST_WARNING_OBJECT (self, "Block pad does not have the expected direction"); goto next; } while ((data = g_queue_pop_head (&self->priv->change_queue)) != NULL) { GstPad *peer = NULL; GstPad *other_peer = NULL; GST_OBJECT_UNLOCK (self); if (data->action == GST_INSERT_BIN_ACTION_ADD && !validate_element (self, data->element)) goto error; peer = gst_pad_get_peer (pad); if (peer == NULL) { GST_WARNING_OBJECT (self, "Blocked pad has no peer"); goto error; } if (data->action == GST_INSERT_BIN_ACTION_ADD) { GstPad *srcpad = NULL, *sinkpad = NULL; GstPad *peersrcpad, *peersinkpad; /* First let's make sure we have the right pad */ if (data->sibling) { GstElement *parent = NULL; GstPad *siblingpad; if ((gst_pad_get_direction (pad) == GST_PAD_SRC && data->direction == DIRECTION_BEFORE) || (gst_pad_get_direction (pad) == GST_PAD_SINK && data->direction == DIRECTION_AFTER)) siblingpad = peer; else siblingpad = pad; parent = gst_pad_get_parent_element (siblingpad); if (parent != NULL) gst_object_unref (parent); if (parent != data->sibling) goto retry; } else { GstObject *parent; GstPad *ghost; GstPad *proxypad; if (data->direction == DIRECTION_BEFORE) { ghost = self->priv->srcpad; if (gst_pad_get_direction (pad) == GST_PAD_SINK) proxypad = pad; else proxypad = peer; } else { ghost = self->priv->sinkpad; if (gst_pad_get_direction (pad) == GST_PAD_SINK) proxypad = peer; else proxypad = pad; } if (!GST_IS_PROXY_PAD (proxypad)) goto retry; parent = gst_pad_get_parent (proxypad); if (!parent) goto retry; gst_object_unref (parent); if (GST_PAD_CAST (parent) != ghost) goto retry; } if (gst_pad_get_direction (pad) == GST_PAD_SRC) { peersrcpad = pad; peersinkpad = peer; } else { peersrcpad = peer; peersinkpad = pad; } if (GST_IS_PROXY_PAD (peersrcpad)) { GstObject *parent = gst_pad_get_parent (peersrcpad); if (GST_PAD_CAST (parent) == self->priv->sinkpad) peersrcpad = NULL; if (parent) gst_object_unref (parent); } if (GST_IS_PROXY_PAD (peersinkpad)) { GstObject *parent = gst_pad_get_parent (peersinkpad); if (GST_PAD_CAST (parent) == self->priv->srcpad) peersinkpad = NULL; if (parent) gst_object_unref (parent); } if (peersinkpad && peersrcpad) { gst_pad_unlink (peersrcpad, peersinkpad); } else { if (!peersinkpad) gst_ghost_pad_set_target (GST_GHOST_PAD (self->priv->srcpad), NULL); if (!peersrcpad) gst_ghost_pad_set_target (GST_GHOST_PAD (self->priv->sinkpad), NULL); } srcpad = get_single_pad (data->element, GST_PAD_SRC); sinkpad = get_single_pad (data->element, GST_PAD_SINK); if (srcpad == NULL || sinkpad == NULL) { GST_WARNING_OBJECT (self, "Can not get element src or sink pad"); goto error; } if (!gst_bin_add (GST_BIN (self), data->element)) { GST_WARNING_OBJECT (self, "Can not add element to bin"); goto error; } if (peersrcpad) { if (GST_PAD_LINK_FAILED (gst_pad_link (peersrcpad, sinkpad))) { GST_WARNING_OBJECT (self, "Can not link sibling's %s:%s pad" " to element's %s:%s pad", GST_DEBUG_PAD_NAME (peersrcpad), GST_DEBUG_PAD_NAME (sinkpad)); goto error; } } else { if (!gst_ghost_pad_set_target (GST_GHOST_PAD (self->priv->sinkpad), sinkpad)) { GST_WARNING_OBJECT (self, "Can not set %s:%s as target for %s:%s", GST_DEBUG_PAD_NAME (sinkpad), GST_DEBUG_PAD_NAME (self->priv->sinkpad)); goto error; } } if (peersinkpad) { if (GST_PAD_LINK_FAILED (gst_pad_link (srcpad, peersinkpad))) { GST_WARNING_OBJECT (self, "Can not link element's %s:%s pad" " to sibling's %s:%s pad", GST_DEBUG_PAD_NAME (srcpad), GST_DEBUG_PAD_NAME (peersinkpad)); goto error; } } else { if (!gst_ghost_pad_set_target (GST_GHOST_PAD (self->priv->srcpad), srcpad)) { GST_WARNING_OBJECT (self, "Can not set %s:%s as target for %s:%s", GST_DEBUG_PAD_NAME (srcpad), GST_DEBUG_PAD_NAME (self->priv->srcpad)); goto error; } } gst_object_unref (srcpad); gst_object_unref (sinkpad); if (!gst_element_sync_state_with_parent (data->element)) { GST_WARNING_OBJECT (self, "Can not sync element's state with parent"); goto error; } } else { GstElement *parent = NULL; GstPad *other_pad; GstCaps *caps = NULL, *peercaps = NULL; gboolean can_intersect; gboolean success; parent = gst_pad_get_parent_element (peer); if (parent != NULL) gst_object_unref (parent); if (parent != data->element) goto retry; if (gst_pad_get_direction (peer) == GST_PAD_SRC) other_pad = get_single_pad (data->element, GST_PAD_SINK); else other_pad = get_single_pad (data->element, GST_PAD_SRC); if (!other_pad) { GST_WARNING_OBJECT (self, "Can not get element's other pad"); goto error; } other_peer = gst_pad_get_peer (other_pad); gst_object_unref (other_pad); if (!other_peer) { GST_WARNING_OBJECT (self, "Can not get element's other peer"); goto error; } /* Get the negotiated caps for the source pad peer, * because renegotiation while the pipeline is playing doesn't work * that fast. */ if (gst_pad_get_direction (pad) == GST_PAD_SRC) caps = gst_pad_get_current_caps (pad); else peercaps = gst_pad_get_current_caps (other_peer); if (!caps) caps = gst_pad_query_caps (pad, NULL); if (!peercaps) peercaps = gst_pad_query_caps (other_peer, NULL); can_intersect = gst_caps_can_intersect (caps, peercaps); gst_caps_unref (caps); gst_caps_unref (peercaps); if (!can_intersect) { GST_WARNING_OBJECT (self, "Pads are incompatible without the element"); goto error; } if (gst_pad_get_direction (other_peer) == GST_PAD_SRC && gst_pad_is_active (other_peer)) { gulong probe_id; probe_id = gst_pad_add_probe (other_peer, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, wait_and_drop_eos_cb, NULL, NULL); gst_pad_send_event (peer, gst_event_new_eos ()); gst_pad_remove_probe (other_peer, probe_id); } gst_element_set_locked_state (data->element, TRUE); gst_element_set_state (data->element, GST_STATE_NULL); if (!gst_bin_remove (GST_BIN (self), data->element)) { GST_WARNING_OBJECT (self, "Element removal rejected"); goto error; } gst_element_set_locked_state (data->element, FALSE); if (gst_pad_get_direction (pad) == GST_PAD_SRC) success = GST_PAD_LINK_SUCCESSFUL (gst_pad_link_full (pad, other_peer, GST_PAD_LINK_CHECK_HIERARCHY | GST_PAD_LINK_CHECK_TEMPLATE_CAPS)); else success = GST_PAD_LINK_SUCCESSFUL (gst_pad_link_full (other_peer, pad, GST_PAD_LINK_CHECK_HIERARCHY | GST_PAD_LINK_CHECK_TEMPLATE_CAPS)); gst_object_unref (other_peer); other_peer = NULL; if (!success) { GST_ERROR_OBJECT (self, "Could not re-link after the element's" " removal"); goto error; } } gst_insert_bin_change_data_complete (self, data, TRUE); gst_object_unref (peer); GST_OBJECT_LOCK (self); continue; done: if (other_peer != NULL) gst_object_unref (other_peer); if (peer != NULL) gst_object_unref (peer); break; retry: GST_OBJECT_LOCK (self); g_queue_push_head (&self->priv->change_queue, data); goto done; error: /* Handle error */ gst_insert_bin_change_data_complete (self, data, FALSE); GST_OBJECT_LOCK (self); goto done; } next: gst_insert_bin_block_pad_unlock (self); }
static void kms_agnostic_bin2_link_to_tee (KmsAgnosticBin2 * self, GstPad * pad, GstElement * tee, GstCaps * caps) { GstElement *queue = gst_element_factory_make ("queue", NULL); GstPad *target; GstProxyPad *proxy; gst_bin_add (GST_BIN (self), queue); gst_element_sync_state_with_parent (queue); if (!(gst_caps_is_any (caps) || gst_caps_is_empty (caps)) && kms_utils_caps_are_raw (caps)) { GstElement *convert = kms_utils_create_convert_for_caps (caps); GstElement *rate = kms_utils_create_rate_for_caps (caps); GstElement *mediator = kms_utils_create_mediator_element (caps); if (kms_utils_caps_are_video (caps)) { g_object_set (queue, "leaky", 2, "max-size-time", LEAKY_TIME, NULL); } remove_element_on_unlinked (convert, "src", "sink"); if (rate) { remove_element_on_unlinked (rate, "src", "sink"); } remove_element_on_unlinked (mediator, "src", "sink"); if (rate) { gst_bin_add (GST_BIN (self), rate); } gst_bin_add_many (GST_BIN (self), convert, mediator, NULL); gst_element_sync_state_with_parent (mediator); gst_element_sync_state_with_parent (convert); if (rate) { gst_element_sync_state_with_parent (rate); } if (rate) { gst_element_link_many (queue, rate, mediator, NULL); } else { gst_element_link (queue, mediator); } gst_element_link_many (mediator, convert, NULL); target = gst_element_get_static_pad (convert, "src"); } else { target = gst_element_get_static_pad (queue, "src"); } gst_ghost_pad_set_target (GST_GHOST_PAD (pad), target); proxy = gst_proxy_pad_get_internal (GST_PROXY_PAD (pad)); gst_pad_set_query_function (GST_PAD_CAST (proxy), proxy_src_pad_query_function); g_object_unref (proxy); g_object_unref (target); link_element_to_tee (tee, queue); }
static void gst_play_sink_convert_bin_on_element_added (GstElement * element, GstPlaySinkConvertBin * self) { gst_element_sync_state_with_parent (element); }
static void pad_blocked_cb (GstPad * pad, gboolean blocked, GstPlaySinkAudioConvert * self) { GstPad *peer; GstCaps *caps; gboolean raw; GST_PLAY_SINK_AUDIO_CONVERT_LOCK (self); self->sink_proxypad_blocked = blocked; GST_DEBUG_OBJECT (self, "Pad blocked: %d", blocked); if (!blocked) goto done; /* There must be a peer at this point */ peer = gst_pad_get_peer (self->sinkpad); caps = gst_pad_get_negotiated_caps (peer); if (!caps) caps = gst_pad_get_caps_reffed (peer); gst_object_unref (peer); raw = is_raw_caps (caps); GST_DEBUG_OBJECT (self, "Caps %" GST_PTR_FORMAT " are raw: %d", caps, raw); gst_caps_unref (caps); if (raw == self->raw) goto unblock; self->raw = raw; if (raw) { GstBin *bin = GST_BIN_CAST (self); GstElement *head = NULL, *prev = NULL; GstPad *pad; GST_DEBUG_OBJECT (self, "Creating raw conversion pipeline"); gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (self->sinkpad), NULL); gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (self->srcpad), NULL); if (self->use_converters) { self->conv = gst_element_factory_make ("audioconvert", "conv"); if (self->conv == NULL) { post_missing_element_message (self, "audioconvert"); GST_ELEMENT_WARNING (self, CORE, MISSING_PLUGIN, (_("Missing element '%s' - check your GStreamer installation."), "audioconvert"), ("audio rendering might fail")); } else { gst_bin_add (bin, self->conv); gst_element_sync_state_with_parent (self->conv); distribute_running_time (self->conv, &self->segment); prev = head = self->conv; } self->resample = gst_element_factory_make ("audioresample", "resample"); if (self->resample == NULL) { post_missing_element_message (self, "audioresample"); GST_ELEMENT_WARNING (self, CORE, MISSING_PLUGIN, (_("Missing element '%s' - check your GStreamer installation."), "audioresample"), ("possibly a liboil version mismatch?")); } else { gst_bin_add (bin, self->resample); gst_element_sync_state_with_parent (self->resample); distribute_running_time (self->resample, &self->segment); if (prev) { if (!gst_element_link_pads_full (prev, "src", self->resample, "sink", GST_PAD_LINK_CHECK_TEMPLATE_CAPS)) goto link_failed; } else { head = self->resample; } prev = self->resample; } } if (self->use_volume && self->volume) { gst_bin_add (bin, gst_object_ref (self->volume)); gst_element_sync_state_with_parent (self->volume); distribute_running_time (self->volume, &self->segment); if (prev) { if (!gst_element_link_pads_full (prev, "src", self->volume, "sink", GST_PAD_LINK_CHECK_TEMPLATE_CAPS)) goto link_failed; } else { head = self->volume; } prev = self->volume; } if (head) { pad = gst_element_get_static_pad (head, "sink"); gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (self->sinkpad), pad); gst_object_unref (pad); } if (prev) { pad = gst_element_get_static_pad (prev, "src"); gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (self->srcpad), pad); gst_object_unref (pad); } if (!head && !prev) { gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (self->srcpad), self->sink_proxypad); } GST_DEBUG_OBJECT (self, "Raw conversion pipeline created"); } else { GstBin *bin = GST_BIN_CAST (self); GST_DEBUG_OBJECT (self, "Removing raw conversion pipeline"); gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (self->sinkpad), NULL); gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (self->srcpad), NULL); if (self->conv) { gst_element_set_state (self->conv, GST_STATE_NULL); gst_bin_remove (bin, self->conv); self->conv = NULL; } if (self->resample) { gst_element_set_state (self->resample, GST_STATE_NULL); gst_bin_remove (bin, self->resample); self->resample = NULL; } if (self->volume) { gst_element_set_state (self->volume, GST_STATE_NULL); if (GST_OBJECT_PARENT (self->volume) == GST_OBJECT_CAST (self)) { gst_bin_remove (GST_BIN_CAST (self), self->volume); } } gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (self->srcpad), self->sink_proxypad); GST_DEBUG_OBJECT (self, "Raw conversion pipeline removed"); } unblock: gst_pad_set_blocked_async_full (self->sink_proxypad, FALSE, (GstPadBlockCallback) pad_blocked_cb, gst_object_ref (self), (GDestroyNotify) gst_object_unref); done: GST_PLAY_SINK_AUDIO_CONVERT_UNLOCK (self); return; link_failed: { GST_ELEMENT_ERROR (self, CORE, PAD, (NULL), ("Failed to configure the audio converter.")); gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (self->srcpad), self->sink_proxypad); gst_pad_set_blocked_async_full (self->sink_proxypad, FALSE, (GstPadBlockCallback) pad_blocked_cb, gst_object_ref (self), (GDestroyNotify) gst_object_unref); return; } }
static GstPad * empathy_audio_sink_request_new_pad (GstElement *element, GstPadTemplate *templ, const gchar* name) { EmpathyGstAudioSink *self = EMPATHY_GST_AUDIO_SINK (element); GstElement *bin, *resample, *audioconvert0, *audioconvert1; GstPad *pad = NULL; GstPad *subpad, *filterpad; bin = gst_bin_new (NULL); audioconvert0 = gst_element_factory_make ("audioconvert", NULL); if (audioconvert0 == NULL) goto error; gst_bin_add (GST_BIN (bin), audioconvert0); resample = gst_element_factory_make ("audioresample", NULL); if (resample == NULL) goto error; gst_bin_add (GST_BIN (bin), resample); audioconvert1 = gst_element_factory_make ("audioconvert", NULL); if (audioconvert1 == NULL) goto error; gst_bin_add (GST_BIN (bin), audioconvert1); self->priv->sink = create_sink (self); if (self->priv->sink == NULL) goto error; if (GST_IS_STREAM_VOLUME (self->priv->sink)) { g_static_mutex_lock (&self->priv->volume_mutex); if (self->priv->volume_idle_id == 0) self->priv->volume_idle_id = g_idle_add ( empathy_audio_sink_volume_idle_setup, self); g_static_mutex_unlock (&self->priv->volume_mutex); } else { gchar *n = gst_element_get_name (self->priv->sink); DEBUG ("Element %s doesn't support volume", n); g_free (n); } gst_bin_add (GST_BIN (bin), self->priv->sink); if (!gst_element_link_many (audioconvert0, resample, audioconvert1, self->priv->sink, NULL)) goto error; filterpad = gst_element_get_static_pad (audioconvert0, "sink"); if (filterpad == NULL) goto error; subpad = gst_ghost_pad_new ("sink", filterpad); if (!gst_element_add_pad (GST_ELEMENT (bin), subpad)) goto error; gst_bin_add (GST_BIN (self), bin); pad = gst_ghost_pad_new (name, subpad); g_assert (pad != NULL); if (!gst_element_sync_state_with_parent (bin)) goto error; if (!gst_pad_set_active (pad, TRUE)) goto error; if (!gst_element_add_pad (GST_ELEMENT (self), pad)) goto error; return pad; error: if (pad != NULL) { gst_object_unref (pad); } gst_object_unref (bin); g_warning ("Failed to create output subpipeline"); return NULL; }
static void kms_dummy_src_set_property (GObject * object, guint property_id, const GValue * value, GParamSpec * pspec) { KmsDummySrc *self = KMS_DUMMY_SRC (object); KMS_ELEMENT_LOCK (KMS_ELEMENT (self)); switch (property_id) { case PROP_DATA: self->priv->data = g_value_get_boolean (value); if (self->priv->data && self->priv->dataappsrc == NULL) { GstElement *tee; GstCaps *caps; GST_DEBUG_OBJECT (self, "Creating data stream"); tee = kms_element_get_data_tee (KMS_ELEMENT (self)); caps = gst_caps_from_string (KMS_AGNOSTIC_DATA_CAPS); self->priv->dataappsrc = gst_element_factory_make ("appsrc", NULL); g_object_set (G_OBJECT (self->priv->dataappsrc), "is-live", TRUE, "caps", caps, "emit-signals", TRUE, "stream-type", 0, "format", GST_FORMAT_TIME, NULL); gst_caps_unref (caps); g_signal_connect (self->priv->dataappsrc, "need-data", G_CALLBACK (kms_dummy_src_feed_data_channel), self); gst_bin_add (GST_BIN (self), self->priv->dataappsrc); gst_element_link_pads (self->priv->dataappsrc, "src", tee, "sink"); gst_element_sync_state_with_parent (self->priv->dataappsrc); } break; case PROP_AUDIO: self->priv->audio = g_value_get_boolean (value); if (self->priv->audio && self->priv->audioappsrc == NULL) { GstElement *agnosticbin; GST_DEBUG_OBJECT (self, "Creating audio stream"); agnosticbin = kms_element_get_audio_agnosticbin (KMS_ELEMENT (self)); self->priv->audioappsrc = gst_element_factory_make ("audiotestsrc", NULL); g_object_set (G_OBJECT (self->priv->audioappsrc), "is-live", TRUE, NULL); gst_bin_add (GST_BIN (self), self->priv->audioappsrc); gst_element_link_pads (self->priv->audioappsrc, "src", agnosticbin, "sink"); gst_element_sync_state_with_parent (self->priv->audioappsrc); } break; case PROP_VIDEO: self->priv->video = g_value_get_boolean (value); if (self->priv->video && self->priv->videoappsrc == NULL) { GstElement *agnosticbin; GST_DEBUG_OBJECT (self, "Creating video stream"); agnosticbin = kms_element_get_video_agnosticbin (KMS_ELEMENT (self)); self->priv->videoappsrc = gst_element_factory_make ("videotestsrc", NULL); g_object_set (G_OBJECT (self->priv->videoappsrc), "is-live", TRUE, NULL); gst_bin_add (GST_BIN (self), self->priv->videoappsrc); gst_element_link_pads (self->priv->videoappsrc, "src", agnosticbin, "sink"); gst_element_sync_state_with_parent (self->priv->videoappsrc); } break; default: G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec); break; } KMS_ELEMENT_UNLOCK (KMS_ELEMENT (self)); }