static GstPad * gst_gl_mixer_bin_request_new_pad (GstElement * element, GstPadTemplate * templ, const gchar * req_name, const GstCaps * caps) { GstGLMixerBin *self = GST_GL_MIXER_BIN (element); GstPadTemplate *mixer_templ; struct input_chain *chain; GstPad *mixer_pad; chain = g_new0 (struct input_chain, 1); mixer_templ = _find_element_pad_template (self->mixer, GST_PAD_TEMPLATE_DIRECTION (templ), GST_PAD_TEMPLATE_PRESENCE (templ)); g_return_val_if_fail (mixer_templ, NULL); mixer_pad = gst_element_request_pad (self->mixer, mixer_templ, req_name, NULL); g_return_val_if_fail (mixer_pad, NULL); if (!_create_input_chain (self, chain, mixer_pad)) { gst_element_release_request_pad (self->mixer, mixer_pad); _free_input_chain (chain); return NULL; } GST_OBJECT_LOCK (element); self->priv->input_chains = g_list_prepend (self->priv->input_chains, chain); GST_OBJECT_UNLOCK (element); gst_child_proxy_child_added (GST_CHILD_PROXY (self), G_OBJECT (chain->ghost_pad), GST_OBJECT_NAME (chain->ghost_pad)); return GST_PAD (chain->ghost_pad); }
/**************************************************** * GstElement vmetods * ****************************************************/ static GstPad * _request_new_pad (GstElement * element, GstPadTemplate * templ, const gchar * name, const GstCaps * caps) { GstPad *audioresample_srcpad, *audioconvert_sinkpad, *tmpghost; GstPad *ghost; GstElement *audioconvert, *audioresample; PadInfos *infos = g_slice_new0 (PadInfos); GESSmartAdder *self = GES_SMART_ADDER (element); infos->adder_pad = gst_element_request_pad (self->adder, templ, NULL, caps); if (infos->adder_pad == NULL) { GST_WARNING_OBJECT (element, "Could not get any pad from GstAdder"); return NULL; } infos->self = self; infos->bin = gst_bin_new (NULL); audioconvert = gst_element_factory_make ("audioconvert", NULL); audioresample = gst_element_factory_make ("audioresample", NULL); gst_bin_add_many (GST_BIN (infos->bin), audioconvert, audioresample, NULL); gst_element_link_many (audioconvert, audioresample, NULL); audioconvert_sinkpad = gst_element_get_static_pad (audioconvert, "sink"); tmpghost = GST_PAD (gst_ghost_pad_new (NULL, audioconvert_sinkpad)); gst_object_unref (audioconvert_sinkpad); gst_pad_set_active (tmpghost, TRUE); gst_element_add_pad (GST_ELEMENT (infos->bin), tmpghost); gst_bin_add (GST_BIN (self), infos->bin); ghost = gst_ghost_pad_new (NULL, tmpghost); gst_pad_set_active (ghost, TRUE); if (!gst_element_add_pad (GST_ELEMENT (self), ghost)) goto could_not_add; audioresample_srcpad = gst_element_get_static_pad (audioresample, "src"); tmpghost = GST_PAD (gst_ghost_pad_new (NULL, audioresample_srcpad)); gst_object_unref (audioresample_srcpad); gst_pad_set_active (tmpghost, TRUE); gst_element_add_pad (GST_ELEMENT (infos->bin), tmpghost); gst_pad_link (tmpghost, infos->adder_pad); LOCK (self); g_hash_table_insert (self->pads_infos, ghost, infos); UNLOCK (self); GST_DEBUG_OBJECT (self, "Returning new pad %" GST_PTR_FORMAT, ghost); return ghost; could_not_add: { GST_ERROR_OBJECT (self, "could not add pad"); destroy_pad (infos); return NULL; } }
/** * \brief Create a branch from input, to branch1 and branch2. In this case it is used to add a typefind element for ROI management and the udpsrc * \param pipeline the pipeline associated to this SP * \param input last element added in pipeline from which we want to branch * \param branch1 the next element in branch 1 * \param branch2 the next element in branch 2 * \return TRUE on succes, FALSE on FAILURE */ static gboolean create_branch_in_pipeline( GstElement *pipeline , GstElement *input , GstElement *branch1 , GstElement *branch2){ GstPadTemplate *tee_src_pad_template; GstPad *tee_branch1_sink_pad, *tee_branch2_sink_pad; GstPad *branch1_src_pad, *branch2_src_pad; g_debug("create a branch in pipeline, using %s", TEE_NAME ); /* * Create the tee element, use to create the branchs in pipeline */ GstElement *tee = gst_element_factory_make_log( "tee", TEE_NAME ); /* add it in pipeline */ if ( !gst_bin_add ( GST_BIN(pipeline), tee) ){ g_critical("Failed to add %s element in pipeline", TEE_NAME ); return FALSE; } if ( ! gst_element_link_log ( input , tee)) return FALSE; /* retrieve tee source pad template (after linking it to input element*/ tee_src_pad_template = gst_element_class_get_pad_template(GST_ELEMENT_GET_CLASS(tee), "src_%u"); tee_branch1_sink_pad = gst_element_request_pad(tee, tee_src_pad_template, NULL, NULL ); /* get sink pad from branch 1 */ branch1_src_pad = gst_element_get_static_pad( branch1 , "sink"); tee_branch2_sink_pad = gst_element_request_pad(tee, tee_src_pad_template, NULL, NULL ); /* get sink pad from branch 2 */ branch2_src_pad = gst_element_get_static_pad(branch2, "sink"); if(gst_pad_link(tee_branch1_sink_pad, branch1_src_pad) != GST_PAD_LINK_OK || gst_pad_link(tee_branch2_sink_pad, branch2_src_pad) != GST_PAD_LINK_OK) { g_critical("%s could not be linked to %s and %s", TEE_NAME , GST_ELEMENT_NAME(branch1) , GST_ELEMENT_NAME(branch2) ); return FALSE; } gst_object_unref(branch1_src_pad); gst_object_unref(branch2_src_pad); return TRUE; }
static GstPad * gst_switch_get_case_sink_pad (GstElement * swcase, const GstCaps * caps) { GstPad *basepad = gst_element_get_static_pad (swcase, "sink"); if (!basepad) { gint num = GST_ELEMENT (swcase)->numsinkpads; gchar *name = g_strdup_printf ("sink_%u", num); basepad = gst_element_request_pad (swcase, gst_static_pad_template_get (&gst_switch_sink_factory), name, caps); g_free (name); } return basepad; }
MxGstGraphElementPad * mx_gst_graph_element_pad_new (GstStaticPadTemplate *pad_template, GstElement *element) { GstPad *pad = NULL; if(GST_PAD_ALWAYS == pad_template->presence) { pad = gst_element_get_pad(element, (gchar *)pad_template->name_template); } else { pad = gst_element_request_pad(element, gst_static_pad_template_get(pad_template), NULL, NULL); } GString *caps_str = _print_caps(gst_pad_get_caps(pad), FALSE); gchar *name = g_strdup_printf("Name: %s", gst_pad_get_name(pad)); MxGstGraphElementPad *eltPad = MX_GST_GRAPH_ELEMENT_PAD( g_object_new(MX_TYPE_GST_GRAPH_ELEMENT_PAD, "name", name, "blurb", caps_str->str, "is-compatible-func", mx_gst_pad_is_compatible, NULL)); g_string_free(caps_str, TRUE); MxGstGraphElementPadPrivate *priv = eltPad->priv; priv->pad_template = pad_template; priv->pad = pad; priv->position = (GST_PAD_SRC == pad_template->direction) ? PAD_POSITION_EAST : (GST_PAD_SINK == pad_template->direction) ? PAD_POSITION_WEST: PAD_POSITION_SOUTH; mx_gst_graph_element_pad_create_info_txt (eltPad); return eltPad; }
static gboolean kms_webrtc_data_session_bin_link_data_channel_src (KmsWebRtcDataSessionBin * self, GstElement * channel) { GstPadTemplate *pad_template; GstPad *srcpad, *sinkpad; guint sctp_stream_id; gboolean ret = FALSE; GstCaps *caps; gchar *name; g_object_get (G_OBJECT (channel), "id", &sctp_stream_id, NULL); caps = kms_webrtc_data_channel_bin_create_caps (KMS_WEBRTC_DATA_CHANNEL_BIN (channel)); if (caps == NULL) { return FALSE; } pad_template = gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (self-> priv->sctpenc), "sink_%u"); name = g_strdup_printf ("sink_%u", sctp_stream_id); sinkpad = gst_element_request_pad (self->priv->sctpenc, pad_template, name, caps); g_free (name); srcpad = gst_element_get_static_pad (channel, "src"); ret = gst_pad_link (srcpad, sinkpad) == GST_PAD_LINK_OK; g_object_unref (srcpad); g_object_unref (sinkpad); gst_caps_unref (caps); return ret; }
static void cb_pad_added (GstElement *dec, GstPad *pad, gpointer data) { GstCaps *caps; GstStructure *str; const gchar *name; GstPadTemplate *templ; GstElementClass *klass; /* check media type */ caps = gst_pad_query_caps (pad, NULL); str = gst_caps_get_structure (caps, 0); name = gst_structure_get_name (str); klass = GST_ELEMENT_GET_CLASS (sink); if (g_str_has_prefix (name, "audio")) templ = gst_element_class_get_pad_template (klass, "audio_sink"); else if (g_str_has_prefix (name, "video")) templ = gst_element_class_get_pad_template (klass, "video_sink"); else if (g_str_has_prefix (name, "text")) templ = gst_element_class_get_pad_template (klass, "text_sink"); else templ = NULL; if (templ) { GstPad *sinkpad; sinkpad = gst_element_request_pad (sink, templ, NULL, NULL); if (!gst_pad_is_linked (sinkpad)) gst_pad_link (pad, sinkpad); gst_object_unref (sinkpad); } }
/**************************************************** * GstElement vmetods * ****************************************************/ static GstPad * _request_new_pad (GstElement * element, GstPadTemplate * templ, const gchar * name, const GstCaps * caps) { GstPad *videoconvert_srcpad, *videoconvert_sinkpad, *tmpghost; PadInfos *infos = g_slice_new0 (PadInfos); GESSmartMixer *self = GES_SMART_MIXER (element); GstPad *ghost; GstElement *videoconvert; infos->mixer_pad = gst_element_request_pad (self->mixer, gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (self->mixer), "sink_%u"), NULL, NULL); if (infos->mixer_pad == NULL) { GST_WARNING_OBJECT (element, "Could not get any pad from GstMixer"); g_slice_free (PadInfos, infos); return NULL; } infos->self = self; infos->bin = gst_bin_new (NULL); videoconvert = gst_element_factory_make ("videoconvert", NULL); gst_bin_add (GST_BIN (infos->bin), videoconvert); videoconvert_sinkpad = gst_element_get_static_pad (videoconvert, "sink"); tmpghost = GST_PAD (gst_ghost_pad_new (NULL, videoconvert_sinkpad)); gst_object_unref (videoconvert_sinkpad); gst_pad_set_active (tmpghost, TRUE); gst_element_add_pad (GST_ELEMENT (infos->bin), tmpghost); gst_bin_add (GST_BIN (self), infos->bin); ghost = gst_ghost_pad_new (NULL, tmpghost); gst_pad_set_active (ghost, TRUE); if (!gst_element_add_pad (GST_ELEMENT (self), ghost)) goto could_not_add; videoconvert_srcpad = gst_element_get_static_pad (videoconvert, "src"); tmpghost = GST_PAD (gst_ghost_pad_new (NULL, videoconvert_srcpad)); gst_object_unref (videoconvert_srcpad); gst_pad_set_active (tmpghost, TRUE); gst_element_add_pad (GST_ELEMENT (infos->bin), tmpghost); gst_pad_link (tmpghost, infos->mixer_pad); infos->probe_id = gst_pad_add_probe (infos->mixer_pad, GST_PAD_PROBE_TYPE_BUFFER, (GstPadProbeCallback) parse_metadata, self, NULL); LOCK (self); g_hash_table_insert (self->pads_infos, ghost, infos); UNLOCK (self); GST_DEBUG_OBJECT (self, "Returning new pad %" GST_PTR_FORMAT, ghost); return ghost; could_not_add: { GST_ERROR_OBJECT (self, "could not add pad"); destroy_pad (infos); return NULL; } }
static gboolean pad_added_cb (GstElement * element, GstPad * new_pad, InsanityTest * test) { GstElement *fakesink; GstPadTemplate *mqsinktmpl; GstPadLinkReturn linkret; GstIterator *it = NULL; GstCaps *caps = NULL; gboolean ret = TRUE; gulong probe_id; GstPad *mqsinkpad = NULL, *mqsrcpad = NULL, *ssinkpad = NULL, *decodesinkpad = NULL, *decodesrcpad = NULL, *tmppad; DECODER_TEST_LOCK (); /* First check if the pad caps are compatible with the decoder or the parser */ caps = gst_pad_get_current_caps (new_pad); if (glob_parser) decodesinkpad = gst_element_get_compatible_pad (glob_parser, new_pad, caps); else decodesinkpad = gst_element_get_compatible_pad (glob_decoder, new_pad, caps); if (decodesinkpad == NULL) goto error; mqsinktmpl = gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (glob_multiqueue), "sink%d"); if (mqsinktmpl == NULL) goto error; mqsinkpad = gst_element_request_pad (glob_multiqueue, mqsinktmpl, NULL, NULL); it = gst_pad_iterate_internal_links (mqsinkpad); if (!it || (gst_iterator_next (it, (gpointer) & mqsrcpad)) != GST_ITERATOR_OK || mqsrcpad == NULL) { ERROR (test, "Couldn't get srcpad from multiqueue for sinkpad %" GST_PTR_FORMAT, mqsinkpad); goto error; } /* Finnish creating and add to bin */ fakesink = gst_element_factory_make ("fakesink", NULL); gst_bin_add (GST_BIN (glob_pipeline), fakesink); gst_element_sync_state_with_parent (fakesink); gst_element_sync_state_with_parent (glob_decoder); linkret = gst_pad_link (new_pad, mqsinkpad); if (linkret != GST_PAD_LINK_OK) { ERROR (test, "Getting linking %" GST_PTR_FORMAT " with %" GST_PTR_FORMAT, new_pad, mqsinkpad); goto error; } /* Link to the decoder */ linkret = gst_pad_link (mqsrcpad, decodesinkpad); if (linkret != GST_PAD_LINK_OK) { ERROR (test, "Getting linking %" GST_PTR_FORMAT " with %" GST_PTR_FORMAT, mqsrcpad, decodesinkpad); goto error; } if (glob_parser) { if (!gst_element_link (glob_parser, glob_decoder)) { ERROR (test, "Linking parser with decoder"); goto error; } } /* Now link to the faksink */ decodesrcpad = gst_element_get_static_pad (glob_decoder, "src"); if (decodesrcpad == NULL) { ERROR (test, "Getting decoder srcpad"); goto error; } ssinkpad = gst_element_get_static_pad (fakesink, "sink"); if (ssinkpad == NULL) { ERROR (test, "Getting fakesink sinkpad"); goto error; } linkret = gst_pad_link (decodesrcpad, ssinkpad); if (linkret != GST_PAD_LINK_OK) { ERROR (test, "Getting linking %" GST_PTR_FORMAT " with %" GST_PTR_FORMAT, decodesrcpad, ssinkpad); goto error; } /* And install a probe to the decoder src pad */ if (insanity_gst_test_add_data_probe (INSANITY_GST_TEST (test), GST_BIN (glob_pipeline), GST_OBJECT_NAME (glob_decoder), GST_ELEMENT_NAME (decodesrcpad), &tmppad, &probe_id, &probe_cb, NULL, NULL) == TRUE) { glob_prob_ctx = g_slice_new0 (ProbeContext); glob_prob_ctx->probe_id = probe_id; glob_prob_ctx->pad = tmppad; glob_prob_ctx->decoder = glob_decoder; glob_prob_ctx->fakesink = fakesink; glob_prob_ctx->test = test; insanity_test_validate_checklist_item (test, "install-probes", TRUE, NULL); } else { insanity_test_validate_checklist_item (test, "install-probes", FALSE, "Failed to attach probe to fakesink"); /* No reason to keep the test alive if there is a probe we can't add */ insanity_test_done (test); goto error; } if (glob_media_desc_parser) media_descriptor_parser_add_stream (glob_media_desc_parser, new_pad); done: DECODER_TEST_UNLOCK (); if (it) gst_iterator_free (it); if (decodesinkpad) gst_object_unref (decodesinkpad); if (caps) gst_caps_unref (caps); if (mqsinkpad) gst_object_unref (mqsinkpad); if (ssinkpad) gst_object_unref (ssinkpad); return ret; error: ret = FALSE; goto done; }
static GstPad * gst_splitmux_sink_request_new_pad (GstElement * element, GstPadTemplate * templ, const gchar * name, const GstCaps * caps) { GstSplitMuxSink *splitmux = (GstSplitMuxSink *) element; GstPadTemplate *mux_template = NULL; GstPad *res = NULL; GstPad *mq_sink, *mq_src; gchar *gname; gboolean is_video = FALSE; MqStreamCtx *ctx; GST_DEBUG_OBJECT (element, "templ:%s, name:%s", templ->name_template, name); GST_SPLITMUX_LOCK (splitmux); if (!create_elements (splitmux)) goto fail; if (templ->name_template) { if (g_str_equal (templ->name_template, "video")) { /* FIXME: Look for a pad template with matching caps, rather than by name */ mux_template = gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (splitmux->muxer), "video_%u"); is_video = TRUE; name = NULL; } else { mux_template = gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (splitmux->muxer), templ->name_template); } } res = gst_element_request_pad (splitmux->muxer, mux_template, name, caps); if (res == NULL) goto fail; if (is_video) gname = g_strdup ("video"); else if (name == NULL) gname = gst_pad_get_name (res); else gname = g_strdup (name); if (!get_pads_from_mq (splitmux, &mq_sink, &mq_src)) { gst_element_release_request_pad (splitmux->muxer, res); gst_object_unref (GST_OBJECT (res)); goto fail; } if (gst_pad_link (mq_src, res) != GST_PAD_LINK_OK) { gst_element_release_request_pad (splitmux->muxer, res); gst_object_unref (GST_OBJECT (res)); gst_element_release_request_pad (splitmux->mq, mq_sink); gst_object_unref (GST_OBJECT (mq_sink)); goto fail; } gst_object_unref (GST_OBJECT (res)); ctx = mq_stream_ctx_new (splitmux); ctx->is_video = is_video; ctx->srcpad = mq_src; ctx->sinkpad = mq_sink; mq_stream_ctx_ref (ctx); ctx->src_pad_block_id = gst_pad_add_probe (mq_src, GST_PAD_PROBE_TYPE_DATA_DOWNSTREAM, (GstPadProbeCallback) handle_mq_output, ctx, (GDestroyNotify) _pad_block_destroy_src_notify); if (is_video) splitmux->video_ctx = ctx; res = gst_ghost_pad_new (gname, mq_sink); g_object_set_qdata ((GObject *) (res), PAD_CONTEXT, ctx); mq_stream_ctx_ref (ctx); ctx->sink_pad_block_id = gst_pad_add_probe (res, GST_PAD_PROBE_TYPE_DATA_DOWNSTREAM, (GstPadProbeCallback) handle_mq_input, ctx, (GDestroyNotify) _pad_block_destroy_sink_notify); GST_DEBUG_OBJECT (splitmux, "Request pad %" GST_PTR_FORMAT " is mq pad %" GST_PTR_FORMAT, res, mq_sink); splitmux->contexts = g_list_prepend (splitmux->contexts, ctx); g_free (gname); gst_object_unref (mq_sink); gst_object_unref (mq_src); gst_pad_set_active (res, TRUE); gst_element_add_pad (element, res); GST_SPLITMUX_UNLOCK (splitmux); return res; fail: GST_SPLITMUX_UNLOCK (splitmux); return NULL; }
static GstPadProbeReturn link_to_videomixer (GstPad * pad, GstPadProbeInfo * info, KmsCompositeMixerData * data) { GstPadTemplate *sink_pad_template; KmsCompositeMixer *mixer; if (GST_EVENT_TYPE (GST_PAD_PROBE_INFO_EVENT (info)) != GST_EVENT_STREAM_START) { return GST_PAD_PROBE_PASS; } mixer = KMS_COMPOSITE_MIXER (data->mixer); GST_DEBUG ("stream start detected %d", data->id); KMS_COMPOSITE_MIXER_LOCK (mixer); data->link_probe_id = 0; sink_pad_template = gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (mixer->priv-> videomixer), "sink_%u"); if (G_UNLIKELY (sink_pad_template == NULL)) { GST_ERROR_OBJECT (mixer, "Error taking a new pad from videomixer"); KMS_COMPOSITE_MIXER_UNLOCK (mixer); return GST_PAD_PROBE_DROP; } if (mixer->priv->videotestsrc == NULL) { GstElement *capsfilter; GstCaps *filtercaps; GstPad *pad; mixer->priv->videotestsrc = gst_element_factory_make ("videotestsrc", NULL); capsfilter = gst_element_factory_make ("capsfilter", NULL); g_object_set (mixer->priv->videotestsrc, "is-live", TRUE, "pattern", /*black */ 2, NULL); filtercaps = gst_caps_new_simple ("video/x-raw", "format", G_TYPE_STRING, "AYUV", "width", G_TYPE_INT, mixer->priv->output_width, "height", G_TYPE_INT, mixer->priv->output_height, "framerate", GST_TYPE_FRACTION, 15, 1, NULL); g_object_set (G_OBJECT (capsfilter), "caps", filtercaps, NULL); gst_caps_unref (filtercaps); gst_bin_add_many (GST_BIN (mixer), mixer->priv->videotestsrc, capsfilter, NULL); gst_element_link (mixer->priv->videotestsrc, capsfilter); /*link capsfilter -> videomixer */ pad = gst_element_request_pad (mixer->priv->videomixer, sink_pad_template, NULL, NULL); gst_element_link_pads (capsfilter, NULL, mixer->priv->videomixer, GST_OBJECT_NAME (pad)); g_object_set (pad, "xpos", 0, "ypos", 0, "alpha", 0.0, NULL); g_object_unref (pad); gst_element_sync_state_with_parent (capsfilter); gst_element_sync_state_with_parent (mixer->priv->videotestsrc); } data->videoscale = gst_element_factory_make ("videoscale", NULL); data->capsfilter = gst_element_factory_make ("capsfilter", NULL); data->videorate = gst_element_factory_make ("videorate", NULL); data->queue = gst_element_factory_make ("queue", NULL); data->input = TRUE; gst_bin_add_many (GST_BIN (mixer), data->queue, data->videorate, data->videoscale, data->capsfilter, NULL); g_object_set (data->videorate, "average-period", 200 * GST_MSECOND, NULL); g_object_set (data->queue, "flush-on-eos", TRUE, "max-size-buffers", 60, NULL); gst_element_link_many (data->videorate, data->queue, data->videoscale, data->capsfilter, NULL); /*link capsfilter -> videomixer */ data->video_mixer_pad = gst_element_request_pad (mixer->priv->videomixer, sink_pad_template, NULL, NULL); gst_element_link_pads (data->capsfilter, NULL, mixer->priv->videomixer, GST_OBJECT_NAME (data->video_mixer_pad)); gst_element_link (data->videoconvert, data->videorate); data->probe_id = gst_pad_add_probe (data->video_mixer_pad, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, (GstPadProbeCallback) cb_EOS_received, KMS_COMPOSITE_MIXER_REF (data), (GDestroyNotify) kms_ref_struct_unref); gst_element_sync_state_with_parent (data->videoscale); gst_element_sync_state_with_parent (data->capsfilter); gst_element_sync_state_with_parent (data->videorate); gst_element_sync_state_with_parent (data->queue); /*recalculate the output sizes */ mixer->priv->n_elems++; kms_composite_mixer_recalculate_sizes (mixer); KMS_COMPOSITE_MIXER_UNLOCK (mixer); return GST_PAD_PROBE_REMOVE; }
bool GstShow::init_pipeline(const int xwinid) { pipeline = gst_pipeline_new ("xvoverlay"); //create base pipeline elements videosink = gst_element_factory_make("xvimagesink", NULL); gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (videosink), xwinid); mixer = gst_element_factory_make("videomixer", "mix"); ///* Manually linking the videoboxes to the mixer */ GstPadTemplate *mixer_sink_pad_template = gst_element_class_get_pad_template(GST_ELEMENT_GET_CLASS(mixer), "sink_%u"); if(mixer_sink_pad_template == NULL) { g_printerr("Could not get mixer pad template.\n"); // gst_object_unref(something); return false; } GstPad* mixerpads[2]; mixerpads[0] = gst_element_request_pad(mixer, mixer_sink_pad_template, NULL, NULL); mixerpads[1] = gst_element_request_pad(mixer, mixer_sink_pad_template, NULL, NULL); g_object_set(mixerpads[0], "xpos", 0, NULL); g_object_set(mixerpads[0], "ypos", 0, NULL); g_object_set(mixerpads[0], "alpha",1.0, NULL); g_object_set(mixerpads[1], "xpos", 640, NULL); g_object_set(mixerpads[1], "ypos", 0, NULL); g_object_set(mixerpads[1], "alpha",1.0, NULL); gst_object_unref(mixerpads[0]); gst_object_unref(mixerpads[1]); // prepare queue and scale for (int i = 0; i<2; i++) { queue[i] = gst_element_factory_make("queue", NULL); scale[i] = gst_element_factory_make("videoscale", NULL); scalefilter[i] = gst_element_factory_make("capsfilter", NULL); GstCaps *caps = gst_caps_new_simple("video/x-raw", "width", G_TYPE_INT, 640, "height", G_TYPE_INT, 480, //"format", G_TYPE_STRING, "BGR", NULL); caps = gst_caps_fixate(caps); g_object_set(G_OBJECT(scalefilter[i]), "caps", caps, NULL); gst_caps_unref(caps); } gst_bin_add_many(GST_BIN(pipeline), queue[0], queue[1], scale[0], scale[1], scalefilter[0], scalefilter[1], mixer, videosink, NULL); return true; }
/******************************************************************************* Gstreamer pipeline creation and init *******************************************************************************/ int vc_gst_pipeline_init(vc_data *data) { GstStateChangeReturn ret; GstElement *rtp_udpsrc, *rtcp_udpsrc, *rtcp_udpsink, *decoder, *depayloader, *converter, *sink, *rtpbin; GstCaps *caps; /* Request Pads */ /* Template */ GstPadTemplate* rtpbin_pad_template; /* TODO - Find a way to free the pads when the pipeline is closed */ /* Create a new GMainLoop */ data->gst_data.loop = g_main_loop_new (NULL, FALSE); data->gst_data.context = g_main_loop_get_context(data->gst_data.loop); /* Create gstreamer elements */ data->gst_data.pipeline = gst_pipeline_new ("videoclient"); VC_CHECK_ELEMENT_ERROR(data->gst_data.pipeline, "pipeline"); /* * RTP UDP Source - for received RTP messages */ rtp_udpsrc = gst_element_factory_make ("udpsrc", "rtp-udpsrc"); VC_CHECK_ELEMENT_ERROR(rtp_udpsrc,"rtp-udpsrc"); g_print ("Setting RTP source port to: %d\n", data->cfg.rtp_recv_port); g_object_set (G_OBJECT (rtp_udpsrc),"port", data->cfg.rtp_recv_port, NULL); /* Create GstCaps structure from string. This function allocates memory for the structure */ caps = gst_caps_from_string( "application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H264"); g_object_set (G_OBJECT (rtp_udpsrc), "caps", caps, NULL); gst_caps_unref(caps); /* Free the structure */ /* * RTCP UDP Source */ rtcp_udpsrc = gst_element_factory_make ("udpsrc", "rtcp-udpsrc"); VC_CHECK_ELEMENT_ERROR(rtcp_udpsrc,"rtcp-udpsrc"); g_print ("Setting RTCP udp source port to: %d\n", data->cfg.rtcp_recv_port); g_object_set (G_OBJECT (rtcp_udpsrc), "port", data->cfg.rtcp_recv_port, NULL); /* * RTCP UDP Sink (transmits data from rtpbin back to server) */ rtcp_udpsink = gst_element_factory_make ("udpsink", "rtcp-udpsink"); VC_CHECK_ELEMENT_ERROR(rtcp_udpsink,"rtcp-udpsink"); g_print ("Setting RTCP udp sink port to: %d\n", data->cfg.rtcp_send_port); g_object_set (G_OBJECT (rtcp_udpsink), "host", data->cfg.server_ip_addr, "port", data->cfg.rtcp_send_port, "sync", FALSE, "async", FALSE, NULL); /* * RTP Bin - Automates RTP/RTCP management */ rtpbin = gst_element_factory_make ("gstrtpbin", "rtpbin"); VC_CHECK_ELEMENT_ERROR(rtpbin,"gstrtpbin"); /* * Request pads from rtpbin, starting with the RTP receive sink pad, * This pad receives RTP data from the network (rtp-udpsrc). */ rtpbin_pad_template = gst_element_class_get_pad_template ( GST_ELEMENT_GET_CLASS (rtpbin), "recv_rtp_sink_%d"); /* Use the template to request the pad */ data->gst_data.recv_rtp_sink_pad = gst_element_request_pad (rtpbin, rtpbin_pad_template, "recv_rtp_sink_0", NULL); /* Print the name for confirmation */ g_print ("A new pad %s was created\n", gst_pad_get_name (data->gst_data.recv_rtp_sink_pad)); rtpbin_pad_template = gst_element_class_get_pad_template ( GST_ELEMENT_GET_CLASS (rtpbin), "recv_rtcp_sink_%d"); data->gst_data.recv_rtcp_sink_pad = gst_element_request_pad (rtpbin, rtpbin_pad_template, "recv_rtcp_sink_0", NULL); g_print ("A new pad %s was created\n", gst_pad_get_name (data->gst_data.recv_rtcp_sink_pad)); rtpbin_pad_template = gst_element_class_get_pad_template ( GST_ELEMENT_GET_CLASS (rtpbin), "send_rtcp_src_%d"); data->gst_data.send_rtcp_src_pad = gst_element_request_pad (rtpbin, rtpbin_pad_template, "send_rtcp_src_0", NULL); g_print ("A new pad %s was created\n", gst_pad_get_name (data->gst_data.send_rtcp_src_pad)); /* Set the latency of the rtpbin */ g_object_set (G_OBJECT (rtpbin), "latency", DEFAULT_LATENCY_MS, "rtcp-sync-interval",1000, NULL); /* * RTP H.264 Depayloader */ depayloader = gst_element_factory_make ("rtph264depay","depayloader"); VC_CHECK_ELEMENT_ERROR(depayloader,"rtph264depay"); data->gst_data.depayloader = depayloader; /* If we are ARM architecture, then assume that we are an i.MX processor and build the pipeline to decode and display using the i.MX plugins */ #ifdef __arm__ int assume_imx = 1; #else int assume_imx = 0; #endif if (assume_imx){ /* * i.MX VPU decoder */ decoder = gst_element_factory_make ("vpudec", "decoder"); VC_CHECK_ELEMENT_ERROR(decoder,"vpudec"); /* * i.MX Video sink */ sink = gst_element_factory_make ("mfw_v4lsink", "sink"); VC_CHECK_ELEMENT_ERROR(sink,"mfw_v4lsink"); /* Set max lateness to .5 seconds */ g_object_set (G_OBJECT(sink), "max-lateness", (long long)50000000, NULL); g_object_set (G_OBJECT(sink), "sync", FALSE, NULL); g_object_set (G_OBJECT(sink), "device", "/dev/video16",NULL); /* Add elements into the pipeline */ g_print(" Adding elements to pipeline...\n"); gst_bin_add_many (GST_BIN (data->gst_data.pipeline), rtp_udpsrc, rtcp_udpsrc, rtpbin, rtcp_udpsink, depayloader, decoder, sink, NULL); /* Link some of the elements together */ g_print(" Linking some elements...\n"); if(!gst_element_link_many (depayloader, decoder, sink, NULL)) g_print("Error: could not link the depayloader, decoder, and sink\n"); } else { /* * ffmpeg decoder */ decoder = gst_element_factory_make ("ffdec_h264", "decoder"); VC_CHECK_ELEMENT_ERROR(decoder,"ffdec_h264"); /* * */ converter = gst_element_factory_make ("ffmpegcolorspace", "converter"); VC_CHECK_ELEMENT_ERROR(converter,"ffmpegcolorspace"); /* * i.MX Video sink */ sink = gst_element_factory_make ("autovideosink", "sink"); VC_CHECK_ELEMENT_ERROR(sink,"autovideosink"); /* Add elements into the pipeline */ g_print(" Adding elements to pipeline...\n"); gst_bin_add_many (GST_BIN (data->gst_data.pipeline), rtp_udpsrc, rtcp_udpsrc, rtpbin, rtcp_udpsink, depayloader, converter, decoder, sink, NULL); /* Link some of the elements together */ g_print(" Linking some elements...\n"); if(!gst_element_link_many (depayloader, decoder, converter, sink, NULL)) g_print("Error: could not link the depayloader, decoder, converter, and sink\n"); } /* * Connect to the pad-added signal for the rtpbin. This allows us to link * the dynamic RTP source pad to the depayloader when it is created. */ if(!g_signal_connect (rtpbin, "pad-added", G_CALLBACK (vc_pad_added_handler), data)) g_print("Error: could not add signal handler\n"); /* * Connect the on-timeout signal */ if(!g_signal_connect (rtpbin, "on-timeout", G_CALLBACK (vc_on_timeout_handler), data)) g_print("Error: could not add on-timeout signal handler\n"); /* Link some of the elements together */ g_print(" Linking RTP and RTCP sources to rtpbin...\n"); /* Link the payloader src pad to the rtpbin send_vrtp_sink_pad */ if(!gst_element_link_pads(rtp_udpsrc, "src", rtpbin, "recv_rtp_sink_0")) g_print("Error: could not link udp source to rtp sink\n"); /* Link the rtpbin send_vrtp_src_pad to the rtp_udpsink sink pad */ if(!gst_element_link_pads(rtcp_udpsrc, "src", rtpbin, "recv_rtcp_sink_0")) g_print("Error: could not link udp source to rtcp sink\n"); /* Link the rtpbin sent_rctp_src_pad to the rtcp_udpsink (udpsink) sink pad */ if(!gst_element_link_pads(rtpbin, "send_rtcp_src_0", rtcp_udpsink, "sink")) g_print("Error: could not link rtcp source to udp sink\n"); /* Set the pipeline to "playing" state*/ g_print ("Now playing\n"); ret = gst_element_set_state (data->gst_data.pipeline, GST_STATE_PLAYING); if (ret == GST_STATE_CHANGE_FAILURE) { g_printerr ("Unable to set the pipeline to the playing state.\n"); gst_object_unref (data->gst_data.pipeline); return -1; } return 0; }
static gint kms_composite_mixer_handle_port (KmsBaseHub * mixer, GstElement * mixer_end_point) { KmsCompositeMixer *self = KMS_COMPOSITE_MIXER (mixer); KmsCompositeMixerData *port_data; gint port_id; GST_DEBUG ("handle new port"); port_id = KMS_BASE_HUB_CLASS (G_OBJECT_CLASS (kms_composite_mixer_parent_class))->handle_port (mixer, mixer_end_point); if (port_id < 0) { return port_id; } KMS_COMPOSITE_MIXER_LOCK (self); if (self->priv->videomixer == NULL) { self->priv->videomixer = gst_element_factory_make ("compositor", NULL); g_object_set (G_OBJECT (self->priv->videomixer), "background", 1 /*black */ , "start-time-selection", 1 /*first */ , "latency", LATENCY * GST_MSECOND, NULL); self->priv->mixer_video_agnostic = gst_element_factory_make ("agnosticbin", NULL); gst_bin_add_many (GST_BIN (mixer), self->priv->videomixer, self->priv->mixer_video_agnostic, NULL); if (self->priv->videotestsrc == NULL) { GstElement *capsfilter; GstCaps *filtercaps; GstPad *pad; GstPadTemplate *sink_pad_template; sink_pad_template = gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (self->priv->videomixer), "sink_%u"); if (G_UNLIKELY (sink_pad_template == NULL)) { GST_ERROR_OBJECT (self, "Error taking a new pad from videomixer"); } self->priv->videotestsrc = gst_element_factory_make ("videotestsrc", NULL); capsfilter = gst_element_factory_make ("capsfilter", NULL); g_object_set (G_OBJECT (capsfilter), "caps-change-mode", 1, NULL); g_object_set (self->priv->videotestsrc, "is-live", TRUE, "pattern", /*black */ 2, NULL); filtercaps = gst_caps_new_simple ("video/x-raw", "width", G_TYPE_INT, self->priv->output_width, "height", G_TYPE_INT, self->priv->output_height, "framerate", GST_TYPE_FRACTION, 15, 1, NULL); g_object_set (G_OBJECT (capsfilter), "caps", filtercaps, NULL); gst_caps_unref (filtercaps); gst_bin_add_many (GST_BIN (self), self->priv->videotestsrc, capsfilter, NULL); gst_element_link (self->priv->videotestsrc, capsfilter); /*link capsfilter -> videomixer */ pad = gst_element_request_pad (self->priv->videomixer, sink_pad_template, NULL, NULL); gst_pad_add_probe (pad, GST_PAD_PROBE_TYPE_QUERY_UPSTREAM, (GstPadProbeCallback) cb_latency, NULL, NULL); gst_element_link_pads (capsfilter, NULL, self->priv->videomixer, GST_OBJECT_NAME (pad)); g_object_set (pad, "xpos", 0, "ypos", 0, "alpha", 0.0, NULL); g_object_unref (pad); gst_element_sync_state_with_parent (capsfilter); gst_element_sync_state_with_parent (self->priv->videotestsrc); } gst_element_sync_state_with_parent (self->priv->videomixer); gst_element_sync_state_with_parent (self->priv->mixer_video_agnostic); gst_element_link (self->priv->videomixer, self->priv->mixer_video_agnostic); } if (self->priv->audiomixer == NULL) { self->priv->audiomixer = gst_element_factory_make ("kmsaudiomixer", NULL); gst_bin_add (GST_BIN (mixer), self->priv->audiomixer); gst_element_sync_state_with_parent (self->priv->audiomixer); g_signal_connect (self->priv->audiomixer, "pad-added", G_CALLBACK (pad_added_cb), self); g_signal_connect (self->priv->audiomixer, "pad-removed", G_CALLBACK (pad_removed_cb), self); } kms_base_hub_link_video_src (KMS_BASE_HUB (self), port_id, self->priv->mixer_video_agnostic, "src_%u", TRUE); port_data = kms_composite_mixer_port_data_create (self, port_id); g_hash_table_insert (self->priv->ports, create_gint (port_id), port_data); KMS_COMPOSITE_MIXER_UNLOCK (self); return port_id; }
static GstPadProbeReturn link_to_videomixer (GstPad * pad, GstPadProbeInfo * info, KmsCompositeMixerData * data) { GstPadTemplate *sink_pad_template; KmsCompositeMixer *mixer; GstPad *tee_src; if (GST_EVENT_TYPE (GST_PAD_PROBE_INFO_EVENT (info)) != GST_EVENT_STREAM_START) { return GST_PAD_PROBE_PASS; } mixer = KMS_COMPOSITE_MIXER (data->mixer); GST_DEBUG ("stream start detected %d", data->id); KMS_COMPOSITE_MIXER_LOCK (mixer); data->link_probe_id = 0; data->latency_probe_id = 0; sink_pad_template = gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (mixer-> priv->videomixer), "sink_%u"); if (G_UNLIKELY (sink_pad_template == NULL)) { GST_ERROR_OBJECT (mixer, "Error taking a new pad from videomixer"); KMS_COMPOSITE_MIXER_UNLOCK (mixer); return GST_PAD_PROBE_DROP; } data->input = TRUE; /*link tee -> videomixer */ data->video_mixer_pad = gst_element_request_pad (mixer->priv->videomixer, sink_pad_template, NULL, NULL); tee_src = gst_element_get_request_pad (data->tee, "src_%u"); gst_element_link_pads (data->tee, GST_OBJECT_NAME (tee_src), mixer->priv->videomixer, GST_OBJECT_NAME (data->video_mixer_pad)); g_object_unref (tee_src); data->probe_id = gst_pad_add_probe (data->video_mixer_pad, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, (GstPadProbeCallback) cb_EOS_received, KMS_COMPOSITE_MIXER_REF (data), (GDestroyNotify) kms_ref_struct_unref); data->latency_probe_id = gst_pad_add_probe (data->video_mixer_pad, GST_PAD_PROBE_TYPE_QUERY_UPSTREAM, (GstPadProbeCallback) cb_latency, NULL, NULL); /*recalculate the output sizes */ mixer->priv->n_elems++; kms_composite_mixer_recalculate_sizes (mixer); //Recalculate latency to avoid video freezes when an element stops to send media. gst_bin_recalculate_latency (GST_BIN (mixer)); KMS_COMPOSITE_MIXER_UNLOCK (mixer); return GST_PAD_PROBE_REMOVE; }
static GstPadProbeReturn link_to_videomixer (GstPad * pad, GstPadProbeInfo * info, KmsAlphaBlendingData * data) { GstPadTemplate *sink_pad_template; KmsAlphaBlending *mixer = data->mixer; if (GST_EVENT_TYPE (GST_PAD_PROBE_INFO_EVENT (info)) != GST_EVENT_CAPS) { return GST_PAD_PROBE_PASS; } GST_DEBUG ("stream start detected"); KMS_ALPHA_BLENDING_LOCK (mixer); data->link_probe_id = 0; sink_pad_template = gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (mixer-> priv->videomixer), "sink_%u"); if (G_UNLIKELY (sink_pad_template == NULL)) { GST_ERROR_OBJECT (mixer, "Error taking a new pad from videomixer"); KMS_ALPHA_BLENDING_UNLOCK (mixer); return GST_PAD_PROBE_DROP; } if (mixer->priv->master_port == data->id) { //master_port, reconfigurate the output_width and heigth_width //and all the ports already created GstEvent *event; GstCaps *caps; gint width, height; const GstStructure *str; event = gst_pad_probe_info_get_event (info); gst_event_parse_caps (event, &caps); GST_DEBUG ("caps %" GST_PTR_FORMAT, caps); if (caps != NULL) { str = gst_caps_get_structure (caps, 0); if (gst_structure_get_int (str, "width", &width) && gst_structure_get_int (str, "height", &height)) { mixer->priv->output_height = height; mixer->priv->output_width = width; } } } if (mixer->priv->videotestsrc == NULL) { GstCaps *filtercaps; GstPad *pad; mixer->priv->videotestsrc = gst_element_factory_make ("videotestsrc", NULL); mixer->priv->videotestsrc_capsfilter = gst_element_factory_make ("capsfilter", NULL); g_object_set (mixer->priv->videotestsrc, "is-live", TRUE, "pattern", /*black */ 2, NULL); filtercaps = gst_caps_new_simple ("video/x-raw", "format", G_TYPE_STRING, "AYUV", "width", G_TYPE_INT, mixer->priv->output_width, "height", G_TYPE_INT, mixer->priv->output_height, "framerate", GST_TYPE_FRACTION, 15, 1, NULL); g_object_set (G_OBJECT (mixer->priv->videotestsrc_capsfilter), "caps", filtercaps, NULL); gst_caps_unref (filtercaps); gst_bin_add_many (GST_BIN (mixer), mixer->priv->videotestsrc, mixer->priv->videotestsrc_capsfilter, NULL); gst_element_link (mixer->priv->videotestsrc, mixer->priv->videotestsrc_capsfilter); /*link capsfilter -> videomixer */ pad = gst_element_request_pad (mixer->priv->videomixer, sink_pad_template, NULL, NULL); gst_element_link_pads (mixer->priv->videotestsrc_capsfilter, NULL, mixer->priv->videomixer, GST_OBJECT_NAME (pad)); g_object_set (pad, "xpos", 0, "ypos", 0, "alpha", 0.0, "zorder", 0, NULL); g_object_unref (pad); gst_element_sync_state_with_parent (mixer->priv->videotestsrc_capsfilter); gst_element_sync_state_with_parent (mixer->priv->videotestsrc); } data->videoscale = gst_element_factory_make ("videoscale", NULL); data->capsfilter = gst_element_factory_make ("capsfilter", NULL); data->videorate = gst_element_factory_make ("videorate", NULL); data->queue = gst_element_factory_make ("queue", NULL); data->videobox = gst_element_factory_make ("videobox", NULL); data->input = TRUE; gst_bin_add_many (GST_BIN (mixer), data->queue, data->videorate, data->videoscale, data->capsfilter, data->videobox, NULL); g_object_set (data->videorate, "average-period", 200 * GST_MSECOND, NULL); g_object_set (data->queue, "flush-on-eos", TRUE, NULL); gst_element_link_many (data->videorate, data->queue, data->videoscale, data->capsfilter, data->videobox, NULL); /*link capsfilter -> videomixer */ data->video_mixer_pad = gst_element_request_pad (mixer->priv->videomixer, sink_pad_template, NULL, NULL); gst_element_link_pads (data->videobox, NULL, mixer->priv->videomixer, GST_OBJECT_NAME (data->video_mixer_pad)); gst_element_link (data->videoconvert, data->videorate); data->probe_id = gst_pad_add_probe (data->video_mixer_pad, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, (GstPadProbeCallback) cb_EOS_received, KMS_ALPHA_BLENDING_REF (data), (GDestroyNotify) kms_ref_struct_unref); gst_element_sync_state_with_parent (data->videoscale); gst_element_sync_state_with_parent (data->capsfilter); gst_element_sync_state_with_parent (data->videorate); gst_element_sync_state_with_parent (data->queue); gst_element_sync_state_with_parent (data->videobox); /* configure videomixer pad */ mixer->priv->n_elems++; if (mixer->priv->master_port == data->id) { kms_alpha_blending_reconfigure_ports (mixer); } else { configure_port (data); } KMS_ALPHA_BLENDING_UNLOCK (mixer); return GST_PAD_PROBE_REMOVE; }
int main(int argc, char *argv[]) { GstElement *pipeline, *audio_source, *tee, *audio_queue, *audio_convert, *audio_resample, *audio_sink; GstElement *video_queue, *visual, *video_convert, *video_sink; GstBus *bus; GstMessage *msg; GstPadTemplate *tee_src_pad_template; GstPad *tee_audio_pad, *tee_video_pad; GstPad *queue_audio_pad, *queue_video_pad; /* Initialize GStreamer */ gst_init (&argc, &argv); /* Create the elements */ audio_source = gst_element_factory_make ("audiotestsrc", "audio_source"); tee = gst_element_factory_make ("tee", "tee"); audio_queue = gst_element_factory_make ("queue", "audio_queue"); audio_convert = gst_element_factory_make ("audioconvert", "audio_convert"); audio_resample = gst_element_factory_make ("audioresample", "audio_resample"); audio_sink = gst_element_factory_make ("autoaudiosink", "audio_sink"); video_queue = gst_element_factory_make ("queue", "video_queue"); visual = gst_element_factory_make ("wavescope", "visual"); video_convert = gst_element_factory_make ("ffmpegcolorspace", "csp"); video_sink = gst_element_factory_make ("autovideosink", "video_sink"); /* Create the empty pipeline */ pipeline = gst_pipeline_new ("test-pipeline"); if (!pipeline || !audio_source || !tee || !audio_queue || !audio_convert || !audio_resample || !audio_sink || !video_queue || !visual || !video_convert || !video_sink) { g_printerr ("Not all elements could be created.\n"); return -1; } /* Configure elements */ g_object_set (audio_source, "freq", 215.0f, NULL); g_object_set (visual, "shader", 0, "style", 1, NULL); /* Link all elements that can be automatically linked because they have "Always" pads */ gst_bin_add_many (GST_BIN (pipeline), audio_source, tee, audio_queue, audio_convert, audio_resample, audio_sink, video_queue, visual, video_convert, video_sink, NULL); if (gst_element_link_many (audio_source, tee, NULL) != TRUE || gst_element_link_many (audio_queue, audio_convert, audio_resample, audio_sink, NULL) != TRUE || gst_element_link_many (video_queue, visual, video_convert, video_sink, NULL) != TRUE) { g_printerr ("Elements could not be linked.\n"); gst_object_unref (pipeline); return -1; } /* Manually link the Tee, which has "Request" pads */ tee_src_pad_template = gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (tee), "src%d"); tee_audio_pad = gst_element_request_pad (tee, tee_src_pad_template, NULL, NULL); g_print ("Obtained request pad %s for audio branch.\n", gst_pad_get_name (tee_audio_pad)); queue_audio_pad = gst_element_get_static_pad (audio_queue, "sink"); tee_video_pad = gst_element_request_pad (tee, tee_src_pad_template, NULL, NULL); g_print ("Obtained request pad %s for video branch.\n", gst_pad_get_name (tee_video_pad)); queue_video_pad = gst_element_get_static_pad (video_queue, "sink"); if (gst_pad_link (tee_audio_pad, queue_audio_pad) != GST_PAD_LINK_OK || gst_pad_link (tee_video_pad, queue_video_pad) != GST_PAD_LINK_OK) { g_printerr ("Tee could not be linked.\n"); gst_object_unref (pipeline); return -1; } gst_object_unref (queue_audio_pad); gst_object_unref (queue_video_pad); /* Start playing the pipeline */ gst_element_set_state (pipeline, GST_STATE_PLAYING); /* Wait until error or EOS */ bus = gst_element_get_bus (pipeline); msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS); /* Release the request pads from the Tee, and unref them */ gst_element_release_request_pad (tee, tee_audio_pad); gst_element_release_request_pad (tee, tee_video_pad); gst_object_unref (tee_audio_pad); gst_object_unref (tee_video_pad); /* Free resources */ if (msg != NULL) gst_message_unref (msg); gst_object_unref (bus); gst_element_set_state (pipeline, GST_STATE_NULL); gst_object_unref (pipeline); return 0; }
ATS_TREE* ats_tree_new(guint stream_id, gchar* ip, guint port, GError** error) { ATS_TREE *rval; GstElement *parse, *fakesink; GstPadTemplate *tee_src_pad_template; GstPad *teepad, *sinkpad; /* init-ing tree */ rval = g_new(ATS_TREE, 1); rval->pipeline = NULL; rval->source = NULL; parse = NULL; fakesink = NULL; rval->faketee.tee = NULL; rval->faketee.pad = NULL; rval->branches = NULL; /* creating elements */ if ((rval->pipeline = gst_pipeline_new("proc-tree-pipe")) == NULL) goto error; if ((rval->source = gst_element_factory_make("udpsrc","proc-tree-source")) == NULL) goto error; if ((parse = gst_element_factory_make("tsparse","proc-tree-parse")) == NULL) goto error; if ((rval->faketee.tee = gst_element_factory_make("tee","proc-tree-tee")) == NULL) goto error; if ((fakesink = gst_element_factory_make("fakesink",NULL)) == NULL) goto error; /* init-ing tree metadata */ rval->metadata = ats_metadata_new(stream_id); /* setting udpsrc port and buf size*/ g_object_set (G_OBJECT (rval->source), "timeout", 5000000000, "buffer-size", 2147483647, "port", port, "address", ip, NULL); g_object_set(G_OBJECT(parse), "parse-private-sections", TRUE, NULL); /* linking pipeline */ gst_bin_add_many(GST_BIN(rval->pipeline), rval->source, parse, rval->faketee.tee, fakesink, NULL); gst_element_link_many (rval->source, parse, rval->faketee.tee, NULL); /* connecting tee src to fakesink */ sinkpad = gst_element_get_static_pad(fakesink, "sink"); tee_src_pad_template = gst_element_class_get_pad_template(GST_ELEMENT_GET_CLASS (rval->faketee.tee), "src_%u"); teepad = gst_element_request_pad(rval->faketee.tee, tee_src_pad_template, NULL, NULL); gst_pad_link(teepad, sinkpad); gst_object_unref(tee_src_pad_template); gst_object_unref(teepad); /* creating additional tee src pad for other brunches */ tee_src_pad_template = gst_element_class_get_pad_template(GST_ELEMENT_GET_CLASS(rval->faketee.tee), "src_%u"); rval->faketee.pad = gst_element_request_pad(rval->faketee.tee, tee_src_pad_template, NULL, NULL); gst_object_unref(tee_src_pad_template); return rval; error: if (rval->pipeline) gst_object_unref(rval->pipeline); if (rval->source) gst_object_unref(rval->source); if (parse) gst_object_unref(parse); if (rval->faketee.tee) gst_object_unref(rval->faketee.tee); if (fakesink) gst_object_unref(fakesink); if (rval) g_free(rval); g_set_error(error, G_ERR_UNKNOWN, -1, "Error: failed to create tree in ats_tree_new"); return NULL; }