static void print_pad_info (GstElement * element) { const GList *pads; GstPad *pad; n_print ("\n"); n_print ("Pads:\n"); if (!element->numpads) { n_print (" none\n"); return; } pads = element->pads; while (pads) { gchar *name; pad = GST_PAD (pads->data); pads = g_list_next (pads); n_print (""); name = gst_pad_get_name (pad); if (gst_pad_get_direction (pad) == GST_PAD_SRC) g_print (" SRC: '%s'", name); else if (gst_pad_get_direction (pad) == GST_PAD_SINK) g_print (" SINK: '%s'", name); else g_print (" UNKNOWN!!!: '%s'", name); g_free (name); g_print ("\n"); n_print (" Implementation:\n"); if (pad->chainfunc) n_print (" Has chainfunc(): %s\n", GST_DEBUG_FUNCPTR_NAME (pad->chainfunc)); if (pad->getrangefunc) n_print (" Has getrangefunc(): %s\n", GST_DEBUG_FUNCPTR_NAME (pad->getrangefunc)); if (pad->eventfunc != gst_pad_event_default) n_print (" Has custom eventfunc(): %s\n", GST_DEBUG_FUNCPTR_NAME (pad->eventfunc)); if (pad->queryfunc != gst_pad_query_default) n_print (" Has custom queryfunc(): %s\n", GST_DEBUG_FUNCPTR_NAME (pad->queryfunc)); if (pad->querytypefunc != gst_pad_get_query_types_default) { const GstQueryType *query_types = gst_pad_get_query_types (pad); if (query_types) { n_print (" Provides query types:\n"); print_query_types (query_types); } } if (pad->iterintlinkfunc != gst_pad_iterate_internal_links_default) n_print (" Has custom iterintlinkfunc(): %s\n", GST_DEBUG_FUNCPTR_NAME (pad->iterintlinkfunc)); if (pad->bufferallocfunc) n_print (" Has bufferallocfunc(): %s\n", GST_DEBUG_FUNCPTR_NAME (pad->bufferallocfunc)); if (pad->getcapsfunc) n_print (" Has getcapsfunc(): %s\n", GST_DEBUG_FUNCPTR_NAME (pad->getcapsfunc)); if (pad->setcapsfunc) n_print (" Has setcapsfunc(): %s\n", GST_DEBUG_FUNCPTR_NAME (pad->setcapsfunc)); /* gst_pad_acceptcaps_default is static :/ */ if (pad->acceptcapsfunc) n_print (" Has acceptcapsfunc(): %s\n", GST_DEBUG_FUNCPTR_NAME (pad->acceptcapsfunc)); if (pad->fixatecapsfunc) n_print (" Has fixatecapsfunc(): %s\n", GST_DEBUG_FUNCPTR_NAME (pad->fixatecapsfunc)); if (pad->padtemplate) n_print (" Pad Template: '%s'\n", pad->padtemplate->name_template); if (pad->caps) { n_print (" Capabilities:\n"); print_caps (pad->caps, " "); } } }
bool GStreamerGWorld::enterFullscreen() { if (m_dynamicPadName) return false; if (!m_videoWindow) m_videoWindow = PlatformVideoWindow::createWindow(); GstElement* platformVideoSink = gst_element_factory_make("autovideosink", "platformVideoSink"); GstElement* colorspace = gst_element_factory_make("ffmpegcolorspace", "colorspace"); GstElement* queue = gst_element_factory_make("queue", "queue"); GstElement* videoScale = gst_element_factory_make("videoscale", "videoScale"); // Get video sink bin and the tee inside. GRefPtr<GstElement> videoSink; GstElement* sinkPtr = 0; g_object_get(m_pipeline, "video-sink", &sinkPtr, NULL); videoSink = adoptGRef(sinkPtr); GRefPtr<GstElement> tee = adoptGRef(gst_bin_get_by_name(GST_BIN(videoSink.get()), "videoTee")); // Add and link a queue, ffmpegcolorspace, videoscale and sink in the bin. gst_bin_add_many(GST_BIN(videoSink.get()), platformVideoSink, videoScale, colorspace, queue, NULL); // Faster elements linking. gst_element_link_pads_full(queue, "src", colorspace, "sink", GST_PAD_LINK_CHECK_NOTHING); gst_element_link_pads_full(colorspace, "src", videoScale, "sink", GST_PAD_LINK_CHECK_NOTHING); gst_element_link_pads_full(videoScale, "src", platformVideoSink, "sink", GST_PAD_LINK_CHECK_NOTHING); // Link a new src pad from tee to queue. GRefPtr<GstPad> srcPad = adoptGRef(gst_element_get_request_pad(tee.get(), "src%d")); GRefPtr<GstPad> sinkPad = adoptGRef(gst_element_get_static_pad(queue, "sink")); gst_pad_link(srcPad.get(), sinkPad.get()); m_dynamicPadName.set(gst_pad_get_name(srcPad.get())); // Synchronize the new elements with pipeline state. If it's // paused limit the state change to pre-rolling. GstState state; gst_element_get_state(m_pipeline, &state, 0, 0); if (state < GST_STATE_PLAYING) state = GST_STATE_READY; gst_element_set_state(platformVideoSink, state); gst_element_set_state(videoScale, state); gst_element_set_state(colorspace, state); gst_element_set_state(queue, state); // Query the current media segment informations and send them towards // the new tee branch downstream. GstQuery* query = gst_query_new_segment(GST_FORMAT_TIME); gboolean queryResult = gst_element_query(m_pipeline, query); if (!queryResult) { gst_query_unref(query); return true; } GstFormat format; gint64 position; if (!gst_element_query_position(m_pipeline, &format, &position)) position = 0; gdouble rate; gint64 startValue, stopValue; gst_query_parse_segment(query, &rate, &format, &startValue, &stopValue); GstEvent* event = gst_event_new_new_segment(FALSE, rate, format, startValue, stopValue, position); gst_pad_push_event(srcPad.get(), event); gst_query_unref(query); return true; }
/** * recursively iterate all our pads and search adjacent elements */ static GooComponent * find_goo_component (GstElement *elem, SearchContext *ctx) { GstIterator *itr; gpointer item; GooComponent *component = NULL; /* check if we've already examined this element, to prevent loops: */ if (already_visited (ctx->visited_nodes, elem)) { GST_INFO ("already visited elem=%s (%s)", gst_element_get_name (elem), G_OBJECT_TYPE_NAME (elem)); return NULL; } GST_INFO ("elem=%s (%s)", gst_element_get_name (elem), G_OBJECT_TYPE_NAME (elem)); /* note: we don't handle the case of the underlying data structure changing * while iterating.. we just bail out and the user needs to restart. */ for( itr = gst_element_iterate_pads (elem); gst_iterator_next (itr, &item) == GST_ITERATOR_OK && !component; gst_object_unref (item) ) { GstElement *adjacent_elem = NULL; GstPad *pad = GST_PAD (item); GstPad *peer = gst_pad_get_peer (pad); GST_INFO ("found pad: %s (%s)", gst_pad_get_name (pad), G_OBJECT_TYPE_NAME (pad)); if (G_UNLIKELY (peer == NULL)) { GST_INFO ("NULL peer.. not connected yet?"); continue; } /* in the case of GstGhostPad (and probably other proxy pads) * the parent is actually the pad we are a proxy for, so * keep looping until we find the GstElement */ while(TRUE) { GstObject *obj = gst_pad_get_parent (peer); if( GST_IS_PAD(obj) ) { gst_object_unref (peer); peer = GST_PAD (obj); } else { adjacent_elem = GST_ELEMENT (obj); break; } } if (G_UNLIKELY (adjacent_elem == NULL)) { gst_object_unref (peer); GST_INFO ("Cannot find a adjacent element"); continue; } GST_INFO ("found adjacent_elem: %s", gst_element_get_name (adjacent_elem)); component = check_for_goo_component (adjacent_elem, ctx); if (component == NULL) { /* if adjacent_elem is itself a bin, we need to search the * contents of that bin: */ if( GST_IS_BIN (adjacent_elem) ) { component = find_goo_component_in_bin (GST_BIN (adjacent_elem), ctx); } } if (component == NULL) { /* if we didn't find our component, recursively search * the contents of adjacent_elem's pads: */ component = find_goo_component (adjacent_elem, ctx); } /* cleanup: */ gst_object_unref (adjacent_elem); gst_object_unref (peer); } gst_iterator_free (itr); return component; }
static void link_sinkpad_cb (GstPad * pad, GstPad * peer, gpointer user_data) { KmsRecorderEndpoint *self = KMS_RECORDER_ENDPOINT (user_data); KmsSinkPadData *sinkdata; GstAppSinkCallbacks callbacks; GstElement *appsink, *appsrc; KmsRecordingProfile profile; DataEvtProbe *data; KmsMediaType type; GstPad *target; gchar *id, *key; target = gst_ghost_pad_get_target (GST_GHOST_PAD (pad)); if (target == NULL) { GST_ERROR_OBJECT (pad, "No target pad set"); return; } key = g_object_get_qdata (G_OBJECT (target), kms_pad_id_key_quark ()); if (key == NULL) { GST_ERROR_OBJECT (pad, "No identifier assigned"); g_object_unref (&target); return; } KMS_ELEMENT_LOCK (KMS_ELEMENT (self)); sinkdata = g_hash_table_lookup (self->priv->sink_pad_data, key); if (sinkdata == NULL) { GST_ERROR_OBJECT (self, "Invalid pad %" GST_PTR_FORMAT " connected %" GST_PTR_FORMAT, pad, peer); goto end; } switch (sinkdata->type) { case KMS_ELEMENT_PAD_TYPE_AUDIO: type = KMS_MEDIA_TYPE_AUDIO; break; case KMS_ELEMENT_PAD_TYPE_VIDEO: type = KMS_MEDIA_TYPE_VIDEO; break; default: GST_ERROR_OBJECT (self, "Invalid pad %" GST_PTR_FORMAT " connected %" GST_PTR_FORMAT, pad, peer); goto end; } profile = self->priv->profile; GST_DEBUG_OBJECT (pad, "linked to %" GST_PTR_FORMAT, peer); id = gst_pad_get_name (pad); appsrc = kms_base_media_muxer_add_src (self->priv->mux, type, id); if (appsrc == NULL) { GST_ERROR_OBJECT (self, "Can not get appsrc for pad %" GST_PTR_FORMAT, pad); KMS_ELEMENT_UNLOCK (KMS_ELEMENT (self)); g_object_unref (target); g_free (id); return; } gst_pad_set_element_private (pad, g_object_ref (appsrc)); g_hash_table_insert (self->priv->srcs, id, g_object_ref (appsrc)); if (sinkdata->sink_probe != 0UL) { gst_pad_remove_probe (target, sinkdata->sink_probe); } callbacks.eos = recv_eos; callbacks.new_preroll = NULL; callbacks.new_sample = recv_sample; appsink = gst_pad_get_parent_element (target); gst_app_sink_set_callbacks (GST_APP_SINK (appsink), &callbacks, appsrc, NULL); g_object_unref (appsink); data = data_evt_probe_new (appsrc, profile); sinkdata->sink_probe = gst_pad_add_probe (target, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, configure_pipeline_capabilities, data, (GDestroyNotify) data_evt_probe_destroy); end: KMS_ELEMENT_UNLOCK (KMS_ELEMENT (self)); g_clear_object (&target); }
int main(int argc, char *argv[]) { GstElement *pipeline, *audio_source, *tee, *audio_queue, *audio_convert, *audio_resample, *audio_sink; GstElement *video_queue, *visual, *video_convert, *video_sink; GstBus *bus; GstMessage *msg; GstPadTemplate *tee_src_pad_template; GstPad *tee_audio_pad, *tee_video_pad; GstPad *queue_audio_pad, *queue_video_pad; /* Initialize GStreamer */ gst_init (&argc, &argv); /* Create the elements */ audio_source = gst_element_factory_make ("audiotestsrc", "audio_source"); tee = gst_element_factory_make ("tee", "tee"); audio_queue = gst_element_factory_make ("queue", "audio_queue"); audio_convert = gst_element_factory_make ("audioconvert", "audio_convert"); audio_resample = gst_element_factory_make ("audioresample", "audio_resample"); audio_sink = gst_element_factory_make ("autoaudiosink", "audio_sink"); video_queue = gst_element_factory_make ("queue", "video_queue"); visual = gst_element_factory_make ("wavescope", "visual"); video_convert = gst_element_factory_make ("videoconvert", "video_convert"); video_sink = gst_element_factory_make ("autovideosink", "video_sink"); /* Create the empty pipeline */ pipeline = gst_pipeline_new ("test-pipeline"); if (!pipeline || !audio_source || !tee || !audio_queue || !audio_convert || !audio_resample || !audio_sink || !video_queue || !visual || !video_convert || !video_sink) { g_printerr ("Not all elements could be created.\n"); return -1; } /* Configure elements */ g_object_set (audio_source, "freq", 215.0f, NULL); g_object_set (visual, "shader", 0, "style", 1, NULL); /* Link all elements that can be automatically linked because they have "Always" pads */ gst_bin_add_many (GST_BIN (pipeline), audio_source, tee, audio_queue, audio_convert, audio_resample, audio_sink, video_queue, visual, video_convert, video_sink, NULL); if (gst_element_link_many (audio_source, tee, NULL) != TRUE || gst_element_link_many (audio_queue, audio_convert, audio_resample, audio_sink, NULL) != TRUE || gst_element_link_many (video_queue, visual, video_convert, video_sink, NULL) != TRUE) { g_printerr ("Elements could not be linked.\n"); gst_object_unref (pipeline); return -1; } /* Manually link the Tee, which has "Request" pads */ tee_src_pad_template = gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (tee), "src_%u"); tee_audio_pad = gst_element_request_pad (tee, tee_src_pad_template, NULL, NULL); g_print ("Obtained request pad %s for audio branch.\n", gst_pad_get_name (tee_audio_pad)); queue_audio_pad = gst_element_get_static_pad (audio_queue, "sink"); tee_video_pad = gst_element_request_pad (tee, tee_src_pad_template, NULL, NULL); g_print ("Obtained request pad %s for video branch.\n", gst_pad_get_name (tee_video_pad)); queue_video_pad = gst_element_get_static_pad (video_queue, "sink"); if (gst_pad_link (tee_audio_pad, queue_audio_pad) != GST_PAD_LINK_OK || gst_pad_link (tee_video_pad, queue_video_pad) != GST_PAD_LINK_OK) { g_printerr ("Tee could not be linked.\n"); gst_object_unref (pipeline); return -1; } gst_object_unref (queue_audio_pad); gst_object_unref (queue_video_pad); /* Start playing the pipeline */ gst_element_set_state (pipeline, GST_STATE_PLAYING); /* Wait until error or EOS */ bus = gst_element_get_bus (pipeline); msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS); /* Release the request pads from the Tee, and unref them */ gst_element_release_request_pad (tee, tee_audio_pad); gst_element_release_request_pad (tee, tee_video_pad); gst_object_unref (tee_audio_pad); gst_object_unref (tee_video_pad); /* Free resources */ if (msg != NULL) gst_message_unref (msg); gst_object_unref (bus); gst_element_set_state (pipeline, GST_STATE_NULL); gst_object_unref (pipeline); return 0; }
bool GStreamerGWorld::enterFullscreen() { if (m_dynamicPadName) return false; if (!m_videoWindow) m_videoWindow = PlatformVideoWindow::createWindow(); GstElement* platformVideoSink = gst_element_factory_make("autovideosink", "platformVideoSink"); GstElement* colorspace = gst_element_factory_make("ffmpegcolorspace", "colorspace"); GstElement* queue = gst_element_factory_make("queue", "queue"); GstElement* videoScale = gst_element_factory_make("videoscale", "videoScale"); // Get video sink bin and the tee inside. GOwnPtr<GstElement> videoSink; g_object_get(m_pipeline, "video-sink", &videoSink.outPtr(), NULL); GstElement* tee = gst_bin_get_by_name(GST_BIN(videoSink.get()), "videoTee"); // Add and link a queue, ffmpegcolorspace and sink in the bin. gst_bin_add_many(GST_BIN(videoSink.get()), platformVideoSink, videoScale, colorspace, queue, NULL); gst_element_link_many(queue, colorspace, videoScale, platformVideoSink, NULL); // Link a new src pad from tee to queue. GstPad* srcPad = gst_element_get_request_pad(tee, "src%d"); GstPad* sinkPad = gst_element_get_static_pad(queue, "sink"); gst_pad_link(srcPad, sinkPad); gst_object_unref(GST_OBJECT(sinkPad)); m_dynamicPadName = gst_pad_get_name(srcPad); // Roll new elements to pipeline state. gst_element_sync_state_with_parent(queue); gst_element_sync_state_with_parent(colorspace); gst_element_sync_state_with_parent(videoScale); gst_element_sync_state_with_parent(platformVideoSink); gst_object_unref(tee); // Query the current media segment informations and send them towards // the new tee branch downstream. GstQuery* query = gst_query_new_segment(GST_FORMAT_TIME); gboolean queryResult = gst_element_query(m_pipeline, query); #if GST_CHECK_VERSION(0, 10, 30) if (!queryResult) { gst_query_unref(query); gst_object_unref(GST_OBJECT(srcPad)); return true; } #else // GStreamer < 0.10.30 doesn't set the query result correctly, so // just ignore it to avoid a compilation warning. // See https://bugzilla.gnome.org/show_bug.cgi?id=620490. (void) queryResult; #endif GstFormat format; gint64 position; if (!gst_element_query_position(m_pipeline, &format, &position)) position = 0; gdouble rate; gint64 startValue, stopValue; gst_query_parse_segment(query, &rate, &format, &startValue, &stopValue); GstEvent* event = gst_event_new_new_segment(FALSE, rate, format, startValue, stopValue, position); gst_pad_push_event(srcPad, event); gst_query_unref(query); gst_object_unref(GST_OBJECT(srcPad)); return true; }
static gboolean kms_recorder_endpoint_query_accept_caps (KmsElement * element, GstPad * pad, GstQuery * query) { KmsRecorderEndpoint *self = KMS_RECORDER_ENDPOINT (element); GstCaps *caps, *accept; gboolean ret = TRUE; switch (kms_element_get_pad_type (element, pad)) { case KMS_ELEMENT_PAD_TYPE_VIDEO: caps = kms_recorder_endpoint_get_caps_from_profile (self, KMS_ELEMENT_PAD_TYPE_VIDEO); break; case KMS_ELEMENT_PAD_TYPE_AUDIO: caps = kms_recorder_endpoint_get_caps_from_profile (self, KMS_ELEMENT_PAD_TYPE_AUDIO); break; default: GST_ERROR_OBJECT (pad, "unknown pad"); return FALSE; } if (caps == NULL) { GST_ERROR_OBJECT (self, "Can not accept caps without profile"); gst_query_set_accept_caps_result (query, FALSE); return TRUE; } gst_query_parse_accept_caps (query, &accept); ret = gst_caps_can_intersect (accept, caps); if (ret) { GstElement *appsrc; GstPad *srcpad; gchar *id; id = gst_pad_get_name (pad); KMS_ELEMENT_LOCK (KMS_ELEMENT (self)); appsrc = g_hash_table_lookup (self->priv->srcs, id); g_free (id); if (appsrc == NULL) { KMS_ELEMENT_UNLOCK (KMS_ELEMENT (self)); GST_DEBUG_OBJECT (self, "No appsrc attached to pad %" GST_PTR_FORMAT, pad); goto end; } srcpad = gst_element_get_static_pad (appsrc, "src"); KMS_ELEMENT_UNLOCK (KMS_ELEMENT (self)); ret = gst_pad_peer_query_accept_caps (srcpad, accept); gst_object_unref (srcpad); } else { GST_ERROR_OBJECT (self, "Incompatbile caps %" GST_PTR_FORMAT, caps); } end: gst_caps_unref (caps); gst_query_set_accept_caps_result (query, ret); return TRUE; }
/** * gst_composite_adjust_pip: * @param composite The GstComposite instance * @param x the X position of the PIP * @param y the Y position of the PIP * @param w the width of the PIP * @param h the height of the PIP * @return PIP has been changed succefully * * Change the PIP position and size. */ gboolean gst_composite_adjust_pip (GstComposite * composite, gint x, gint y, gint w, gint h) { gboolean result = FALSE; GstIterator *iter = NULL; GValue value = { 0 }; GstElement *element = NULL; gboolean done = FALSE; g_return_val_if_fail (GST_IS_COMPOSITE (composite), FALSE); GST_COMPOSITE_LOCK (composite); if (composite->adjusting) { WARN ("last PIP adjustment request is progressing"); goto end; } composite->b_x = x; composite->b_y = y; if (composite->b_width != w || composite->b_height != h) { composite->b_width = w; composite->b_height = h; composite->adjusting = TRUE; gst_worker_stop (GST_WORKER (composite)); result = TRUE; goto end; } element = gst_worker_get_element (GST_WORKER (composite), "mix"); iter = gst_element_iterate_sink_pads (element); while (iter && !done) { switch (gst_iterator_next (iter, &value)) { case GST_ITERATOR_OK: { GstPad *pad = g_value_get_object (&value); if (g_strcmp0 (gst_pad_get_name (pad), "sink_1") == 0) { g_object_set (pad, "xpos", composite->b_x, "ypos", composite->b_y, NULL); done = TRUE; result = TRUE; } g_value_reset (&value); } break; case GST_ITERATOR_RESYNC: gst_iterator_resync (iter); break; case GST_ITERATOR_DONE: done = TRUE; break; default: /* iterator returned _ERROR or premature end with _OK, * mark an error and exit */ done = TRUE; result = FALSE; break; } } if (G_IS_VALUE (&value)) g_value_unset (&value); if (iter) gst_iterator_free (iter); composite->adjusting = FALSE; /* if (!result) { WARN ("failed to adjust PIP: %d, %d, %d, %d", x, y, w, h); } */ end: GST_COMPOSITE_UNLOCK (composite); return result; }
static void close_link (GstPad *srcpad, GstElement *sinkelement, const gchar *padname, const GList *templlist) { GstPad *pad; gboolean has_dynamic_pads = FALSE; g_print ("Plugging pad %s:%s to newly created %s:%s\n", gst_object_get_name (GST_OBJECT (gst_pad_get_parent (srcpad))), gst_pad_get_name (srcpad), gst_object_get_name (GST_OBJECT (sinkelement)), padname); /* add the element to the pipeline and set correct state */ if (sinkelement != audiosink) { gst_bin_add (GST_BIN (pipeline), sinkelement); gst_element_set_state (sinkelement, GST_STATE_READY); } pad = gst_element_get_pad (sinkelement, padname); gst_pad_link (srcpad, pad); if (sinkelement != audiosink) { gst_element_set_state (sinkelement, GST_STATE_PAUSED); } gst_object_unref (GST_OBJECT (pad)); /* if we have static source pads, link those. If we have dynamic * source pads, listen for pad-added signals on the element */ for ( ; templlist != NULL; templlist = templlist->next) { GstStaticPadTemplate *templ = templlist->data; /* only sourcepads, no request pads */ if (templ->direction != GST_PAD_SRC || templ->presence == GST_PAD_REQUEST) { continue; } switch (templ->presence) { case GST_PAD_ALWAYS: { GstPad *pad = gst_element_get_pad (sinkelement, templ->name_template); GstCaps *caps = gst_pad_get_caps (pad); /* link */ try_to_plug (pad, caps); gst_object_unref (GST_OBJECT (pad)); gst_caps_unref (caps); break; } case GST_PAD_SOMETIMES: has_dynamic_pads = TRUE; break; default: break; } } /* listen for newly created pads if this element supports that */ if (has_dynamic_pads) { g_signal_connect (sinkelement, "pad-added", G_CALLBACK (cb_newpad), NULL); } }
static gboolean mpegtsmux_src_event (GstPad * pad, GstEvent * event) { MpegTsMux *mux = GST_MPEG_TSMUX (gst_pad_get_parent (pad)); gboolean res = TRUE; switch (GST_EVENT_TYPE (event)) { case GST_EVENT_CUSTOM_UPSTREAM: { GstIterator *iter; GstIteratorResult iter_ret; GstPad *sinkpad; GstClockTime running_time; gboolean all_headers, done; guint count; if (!gst_video_event_is_force_key_unit (event)) break; gst_video_event_parse_upstream_force_key_unit (event, &running_time, &all_headers, &count); GST_INFO_OBJECT (mux, "received upstream force-key-unit event, " "seqnum %d running_time %" GST_TIME_FORMAT " all_headers %d count %d", gst_event_get_seqnum (event), GST_TIME_ARGS (running_time), all_headers, count); if (!all_headers) break; mux->pending_key_unit_ts = running_time; gst_event_replace (&mux->force_key_unit_event, event); iter = gst_element_iterate_sink_pads (GST_ELEMENT_CAST (mux)); done = FALSE; while (!done) { gboolean res = FALSE, tmp; iter_ret = gst_iterator_next (iter, (gpointer *) & sinkpad); switch (iter_ret) { case GST_ITERATOR_DONE: done = TRUE; break; case GST_ITERATOR_OK: GST_INFO_OBJECT (mux, "forwarding to %s", gst_pad_get_name (sinkpad)); tmp = gst_pad_push_event (sinkpad, gst_event_ref (event)); GST_INFO_OBJECT (mux, "result %d", tmp); /* succeed if at least one pad succeeds */ res |= tmp; gst_object_unref (sinkpad); break; case GST_ITERATOR_ERROR: done = TRUE; break; case GST_ITERATOR_RESYNC: break; } } gst_event_unref (event); break; } default: res = gst_pad_event_default (pad, event); break; } gst_object_unref (mux); return res; }
gboolean set_audio_bin(GstElement *bin, MbMedia *media, GstPad *decoder_src_pad) { GstPad *sink_pad = NULL, *ghost_pad = NULL, *output_sink_pad = NULL; GstCaps *caps = NULL; GstPadLinkReturn ret; int return_code = TRUE; media->audio_volume = gst_element_factory_make ("volume", NULL); g_assert (media->audio_volume); media->audio_converter = gst_element_factory_make ("audioconvert", NULL); g_assert (media->audio_converter); media->audio_resampler = gst_element_factory_make ("audioresample", NULL); g_assert (media->audio_resampler); media->audio_filter = gst_element_factory_make ("capsfilter", NULL); g_assert (media->audio_filter); gst_element_set_state (media->audio_volume, GST_STATE_PAUSED); gst_element_set_state (media->audio_converter, GST_STATE_PAUSED); gst_element_set_state (media->audio_resampler, GST_STATE_PAUSED); gst_element_set_state (media->audio_filter, GST_STATE_PAUSED); caps = gst_caps_from_string (audio_caps); g_assert (caps); g_object_set (media->audio_filter, "caps", caps, NULL); gst_caps_unref(caps); gst_bin_add_many (GST_BIN(bin), media->audio_volume, media->audio_converter, media->audio_resampler, media->audio_filter, NULL); if (!gst_element_link_many (media->audio_volume, media->audio_converter, media->audio_resampler, media->audio_filter, NULL)) { g_debug ("Could not link audio_converter and audio_volume together\n."); return_code = FALSE; } else { sink_pad = gst_element_get_static_pad (media->audio_volume, "sink"); g_assert (sink_pad); ret = gst_pad_link (decoder_src_pad, sink_pad); if (GST_PAD_LINK_FAILED(ret)) { return_code = FALSE; g_debug (" Link failed.\n"); } else { g_debug (" Link succeeded.\n"); g_object_set (G_OBJECT(media->audio_volume), "volume", media->volume, NULL); gst_element_set_state (media->audio_volume, GST_STATE_PLAYING); gst_element_set_state (media->audio_converter, GST_STATE_PLAYING); gst_element_set_state (media->audio_resampler, GST_STATE_PLAYING); gst_element_set_state (media->audio_filter, GST_STATE_PLAYING); ghost_pad = gst_ghost_pad_new ( "a_src", gst_element_get_static_pad (media->audio_filter, "src")); gst_pad_set_active (ghost_pad, TRUE); gst_element_add_pad (bin, ghost_pad); output_sink_pad = gst_element_get_request_pad (_mb_global_data.audio_mixer, "sink_%u"); g_assert(output_sink_pad); media->audio_pad_name = gst_pad_get_name(output_sink_pad); g_debug ("audiomixer: new pad requested (%s)\n", media->audio_pad_name); ret = gst_pad_link (ghost_pad, output_sink_pad); if (GST_PAD_LINK_FAILED(ret)) { return_code = FALSE; g_debug (" Could not link %s and audiomixer together.\n", media->name); } gst_object_unref (output_sink_pad); gst_object_unref (sink_pad); } } return return_code; }
gboolean set_video_bin(GstElement *bin, MbMedia *media, GstPad *decoder_src_pad) { GstElement *sink_element = NULL; GstCaps *caps = NULL; GstPad *sink_pad = NULL, *ghost_pad = NULL, *output_sink_pad = NULL; GstPadLinkReturn ret; gchar *uri = NULL; gboolean is_image = FALSE; int return_code = TRUE; g_assert (media->video_scaler); g_assert (media->video_filter); gst_element_set_state(media->video_scaler, GST_STATE_PAUSED); gst_element_set_state(media->video_filter, GST_STATE_PAUSED); caps = gst_caps_new_simple ("video/x-raw", "pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1, "width", G_TYPE_INT, media->width, "height", G_TYPE_INT, media->height, NULL); g_object_set (G_OBJECT (media->video_scaler), "add-borders", 0, NULL); g_object_set (G_OBJECT (media->video_filter), "caps", caps, NULL); gst_bin_add_many(GST_BIN (bin), media->video_scaler, media->video_filter, NULL); if (!gst_element_link (media->video_scaler, media->video_filter)) { g_debug ("Could not link elements together.\n"); gst_object_unref (media->video_scaler); gst_object_unref (media->video_filter); return FALSE; } sink_element = media->video_scaler; g_object_get (G_OBJECT(media->decoder), "uri", &uri, NULL); is_image = has_image_extension(uri); g_free (uri); if (is_image) { media->image_freezer = gst_element_factory_make("imagefreeze", NULL); g_assert (media->image_freezer); gst_bin_add (GST_BIN(bin), media->image_freezer); if (!gst_element_link (media->image_freezer, media->video_scaler)) { g_debug("Could not link image element.\n"); gst_object_unref(media->image_freezer); return FALSE; } gst_element_set_state(media->image_freezer, GST_STATE_PAUSED); sink_element = media->image_freezer; } sink_pad = gst_element_get_static_pad (sink_element, "sink"); g_assert(sink_pad); ret = gst_pad_link (decoder_src_pad, sink_pad); if (GST_PAD_LINK_FAILED(ret)) g_debug (" Link failed.\n"); else g_debug (" Link succeeded.\n"); ghost_pad = gst_ghost_pad_new ( "v_src", gst_element_get_static_pad (media->video_filter, "src")); gst_pad_set_active (ghost_pad, TRUE); gst_element_add_pad (bin, ghost_pad); output_sink_pad = gst_element_get_request_pad(_mb_global_data.video_mixer, "sink_%u"); g_assert (output_sink_pad); media->video_pad_name = gst_pad_get_name (output_sink_pad); g_debug ("videomixer: new pad requested (%s)\n", media->video_pad_name); ret = gst_pad_link (ghost_pad, output_sink_pad); if (GST_PAD_LINK_FAILED(ret)) { return_code = FALSE; g_debug (" Could not link %s and videomixer together\n", media->name); } else { g_object_set (output_sink_pad, "xpos", media->x_pos, NULL); g_object_set (output_sink_pad, "ypos", media->y_pos, NULL); g_object_set (output_sink_pad, "zorder", media->z_index, NULL); g_object_set (output_sink_pad, "alpha", media->alpha, NULL); g_debug (" Link succeeded between %s and videomixer.\n", media->name); } if (is_image) gst_element_set_state(media->image_freezer, GST_STATE_PLAYING); gst_element_set_state (media->video_scaler, GST_STATE_PLAYING); gst_element_set_state (media->video_filter, GST_STATE_PLAYING); gst_caps_unref(caps); gst_object_unref (output_sink_pad); gst_object_unref(sink_pad); return return_code; }