static GstPadProbeReturn events_cb (GstPad * pad, GstPadProbeInfo * probe_info, gpointer user_data) { APP_STATE_T *state = (APP_STATE_T *) user_data; GstEvent *event = GST_PAD_PROBE_INFO_EVENT (probe_info); switch (GST_EVENT_TYPE (event)) { case GST_EVENT_CAPS: { if (state->caps) { gst_caps_unref (state->caps); state->caps = NULL; } gst_event_parse_caps (event, &state->caps); if (state->caps) gst_caps_ref (state->caps); break; } case GST_EVENT_FLUSH_START: flush_start (state); break; case GST_EVENT_FLUSH_STOP: flush_stop (state); break; case GST_EVENT_EOS: queue_object (state, GST_MINI_OBJECT_CAST (gst_event_ref (event)), FALSE); break; default: break; } return GST_PAD_PROBE_OK; }
static GstPadProbeReturn event_counter (GstObject * pad, GstPadProbeInfo * info, gpointer user_data) { GstEvent *event = GST_PAD_PROBE_INFO_EVENT (info); fail_unless (event != NULL); fail_unless (GST_IS_EVENT (event)); switch (GST_EVENT_TYPE (event)) { case GST_EVENT_STREAM_START: ++nb_stream_start_event; break; case GST_EVENT_CAPS: ++nb_caps_event; break; case GST_EVENT_SEGMENT: ++nb_segment_event; break; case GST_EVENT_GAP: ++nb_gap_event; break; default: break; } return GST_PAD_PROBE_OK; }
static GstPadProbeReturn on_appsink_event (GstPad * pad, GstPadProbeInfo * info, gpointer data) { GstAdaptiveDemuxTestEnginePrivate *priv = (GstAdaptiveDemuxTestEnginePrivate *) data; GstAdaptiveDemuxTestOutputStream *stream = NULL; GstEvent *event; event = GST_PAD_PROBE_INFO_EVENT (info); GST_DEBUG ("Received event %" GST_PTR_FORMAT " on pad %" GST_PTR_FORMAT, event, pad); if (priv->callbacks->appsink_event) { GstPad *stream_pad = gst_pad_get_peer (pad); fail_unless (stream_pad != NULL); GST_TEST_LOCK (priv); stream = getTestOutputDataByPad (priv, stream_pad, TRUE); GST_TEST_UNLOCK (priv); gst_object_unref (stream_pad); priv->callbacks->appsink_event (&priv->engine, stream, event, priv->user_data); } return GST_PAD_PROBE_OK; }
/* callback called when demux receives events from GstFakeSoupHTTPSrc */ static GstPadProbeReturn on_demuxReceivesEvent (GstPad * pad, GstPadProbeInfo * info, gpointer data) { GstAdaptiveDemuxTestEnginePrivate *priv = (GstAdaptiveDemuxTestEnginePrivate *) data; GstAdaptiveDemuxTestOutputStream *stream = NULL; GstEvent *event; const GstSegment *segment; event = GST_PAD_PROBE_INFO_EVENT (info); GST_DEBUG ("Received event %" GST_PTR_FORMAT " on pad %" GST_PTR_FORMAT, event, pad); if (GST_EVENT_TYPE (event) == GST_EVENT_SEGMENT) { /* a new segment will start arriving * update segment_start used by pattern validation */ gst_event_parse_segment (event, &segment); GST_TEST_LOCK (priv); stream = getTestOutputDataByPad (priv, pad, TRUE); stream->total_received_size += stream->segment_received_size; stream->segment_received_size = 0; stream->segment_start = segment->start; GST_TEST_UNLOCK (priv); } return GST_PAD_PROBE_OK; }
static GstPadProbeReturn parsebin_pending_event_probe (GstPad * pad, GstPadProbeInfo * info, PendingPad * ppad) { GstDecodebin3 *dbin = ppad->dbin; /* We drop all events by default */ GstPadProbeReturn ret = GST_PAD_PROBE_DROP; GstEvent *ev = GST_PAD_PROBE_INFO_EVENT (info); GST_DEBUG_OBJECT (pad, "Got event %p %s", ev, GST_EVENT_TYPE_NAME (ev)); switch (GST_EVENT_TYPE (ev)) { case GST_EVENT_EOS: { GST_DEBUG_OBJECT (pad, "Pending pad marked as EOS, removing"); ppad->input->pending_pads = g_list_remove (ppad->input->pending_pads, ppad); gst_pad_remove_probe (ppad->pad, ppad->buffer_probe); gst_pad_remove_probe (ppad->pad, ppad->event_probe); g_free (ppad); check_all_streams_for_eos (dbin); } break; default: break; } return ret; }
static GstPadProbeReturn event_probe (GstPad * pad, GstPadProbeInfo * info, gpointer user_data) { GstEvent *event = GST_PAD_PROBE_INFO_EVENT (info); GST_INFO ("got %" GST_PTR_FORMAT, event); myreceivedevents = g_list_append (myreceivedevents, gst_event_ref (event)); return GST_PAD_PROBE_OK; }
static GstPadProbeReturn empathy_video_src_drop_eos (GstPad *pad, GstPadProbeInfo *info, gpointer user_data) { if (GST_EVENT_TYPE (GST_PAD_PROBE_INFO_EVENT (info)) == GST_EVENT_EOS) return GST_PAD_PROBE_DROP; return GST_PAD_PROBE_OK; }
static GstPadProbeReturn onAppsinkFlushCallback(GstPad*, GstPadProbeInfo* info, gpointer userData) { if (GST_PAD_PROBE_INFO_TYPE(info) & (GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM | GST_PAD_PROBE_TYPE_EVENT_FLUSH)) { GstEvent* event = GST_PAD_PROBE_INFO_EVENT(info); if (GST_EVENT_TYPE(event) == GST_EVENT_FLUSH_STOP) { AudioSourceProviderGStreamer* provider = reinterpret_cast<AudioSourceProviderGStreamer*>(userData); provider->clearAdapters(); } } return GST_PAD_PROBE_OK; }
static GstPadProbeReturn gap_detection_probe (GstPad * pad, GstPadProbeInfo * info, gpointer data) { GstEvent *event = GST_PAD_PROBE_INFO_EVENT (info); if (GST_EVENT_TYPE (event) == GST_EVENT_GAP) { GST_WARNING_OBJECT (pad, "Gap detected"); send_force_key_unit_event (pad, FALSE); return GST_PAD_PROBE_DROP; } return GST_PAD_PROBE_OK; }
/* reconfiguration is not supported by all sources and can be disruptive * we will handle reconfiguration manually * FIXME: implement source reconfiguration support :) */ static GstPadProbeReturn drop_reconfigure_cb(GstPad *pad, GstPadProbeInfo *info, gpointer user_data) { OWR_UNUSED(pad); OWR_UNUSED(user_data); if (GST_IS_EVENT(GST_PAD_PROBE_INFO_DATA(info)) && GST_EVENT_TYPE(GST_PAD_PROBE_INFO_EVENT(info)) == GST_EVENT_RECONFIGURE) { GST_DEBUG("Dropping reconfigure event"); return GST_PAD_PROBE_DROP; } return GST_PAD_PROBE_OK; }
static GstPadProbeReturn send_remb_event_probe (GstPad * pad, GstPadProbeInfo * info, gpointer user_data) { KmsRembRemote *rm = user_data; GstEvent *event = GST_PAD_PROBE_INFO_EVENT (info); if (GST_EVENT_TYPE (event) != GST_EVENT_CAPS) { return GST_PAD_PROBE_OK; }; send_remb_event (rm, rm->remb_on_connect, rm->local_ssrc); return GST_PAD_PROBE_REMOVE; }
GstPadProbeReturn introbin_pad_block_cb(GstPad *pad, GstPadProbeInfo *info, gpointer user_data) { CustomData *data=(CustomData*)user_data; // g_print("Introbin pad callback triggered. Type:%d\n",GST_PAD_PROBE_INFO_TYPE(info)); if(GST_PAD_PROBE_INFO_TYPE(info)|GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM) { GstEvent *event=GST_PAD_PROBE_INFO_EVENT(info); // g_print("Introbin blocking pad received %s event...",GST_EVENT_TYPE_NAME(event)); if(strcmp(GST_EVENT_TYPE_NAME(event),"stream-start")==0) { // g_print("Dropped.\n"); return GST_PAD_PROBE_DROP; } if(strcmp(GST_EVENT_TYPE_NAME(event),"eos")==0) { char *pad_name=gst_pad_get_name(pad); g_print("Introbin EOS received on pad:%s.\n",pad_name); GstElement *valve; if(strcmp(pad_name,"vsrc")==0) valve=gst_bin_get_by_name(data->introbin,"introvvalve"); else valve=gst_bin_get_by_name(data->introbin,"introavalve"); g_object_set(G_OBJECT(valve),"drop",TRUE,NULL); g_print("%s DROP set TRUE.\n",gst_element_get_name(valve)); g_object_unref(valve); // GstPad *peer=gst_pad_get_peer(pad); // GstElement *mixer=gst_bin_get_by_name(data->pipeline,"mixer1"); // gst_pad_unlink(pad,peer); // if(!gst_pad_send_event(peer,gst_event_new_eos())) { // g_print("couldn't send eos event to displaymixer"); // } // gst_element_release_request_pad(mixer,peer); // gst_object_unref(peer); // gst_object_unref(mixer); // gst_object_ref(data->introbin); //Remove reduce to refcount... // gst_bin_remove(GST_BIN(data->pipeline),data->introbin); return GST_PAD_PROBE_PASS; } if(strcmp(GST_EVENT_TYPE_NAME(event),"segment")==0) { g_print("Introbin segment received.\n"); return GST_PAD_PROBE_PASS; } // g_print("Passed.\n"); return GST_PAD_PROBE_PASS; } return GST_PAD_PROBE_PASS; }
static GstPadProbeReturn wait_and_drop_eos_cb (GstPad * pad, GstPadProbeInfo * info, gpointer user_data) { GstPad *peer; if (GST_EVENT_TYPE (GST_PAD_PROBE_INFO_EVENT (info)) != GST_EVENT_EOS) return GST_PAD_PROBE_PASS; peer = gst_pad_get_peer (pad); if (peer) { gst_pad_unlink (pad, peer); gst_object_unref (peer); } return GST_PAD_PROBE_DROP; }
static GstPadProbeReturn control_duplicates (GstPad * pad, GstPadProbeInfo * info, gpointer data) { GstEvent *event = GST_PAD_PROBE_INFO_EVENT (info); if (gst_video_event_is_force_key_unit (event)) { if (check_last_request_time (pad)) { GST_TRACE_OBJECT (pad, "Sending keyframe request"); return GST_PAD_PROBE_OK; } else { GST_TRACE_OBJECT (pad, "Dropping keyframe request"); return GST_PAD_PROBE_DROP; } } return GST_PAD_PROBE_OK; }
/* callback called when dash sends event to AppSink */ static GstPadProbeReturn on_demux_sent_event (GstPad * pad, GstPadProbeInfo * info, gpointer data) { GstAdaptiveDemuxTestEnginePrivate *priv = (GstAdaptiveDemuxTestEnginePrivate *) data; GstAdaptiveDemuxTestOutputStream *stream = NULL; GstEvent *event; event = GST_PAD_PROBE_INFO_EVENT (info); GST_TEST_LOCK (&priv->engine); if (priv->callbacks->demux_sent_event) { stream = getTestOutputDataByPad (priv, pad, TRUE); (*priv->callbacks->demux_sent_event) (&priv->engine, stream, event, priv->user_data); } GST_TEST_UNLOCK (&priv->engine); return GST_PAD_PROBE_OK; }
GstPadProbeReturn GstEnginePipeline::DecodebinProbe(GstPad* pad, GstPadProbeInfo* info, gpointer data) { GstEnginePipeline* instance = reinterpret_cast<GstEnginePipeline*>(data); const GstPadProbeType info_type = GST_PAD_PROBE_INFO_TYPE(info); if (info_type & GST_PAD_PROBE_TYPE_BUFFER) { // The decodebin produced a buffer. Record its end time, so we can offset // the buffers produced by the next decodebin when transitioning to the next // song. GstBuffer* buffer = GST_PAD_PROBE_INFO_BUFFER(info); GstClockTime timestamp = GST_BUFFER_TIMESTAMP(buffer); GstClockTime duration = GST_BUFFER_DURATION(buffer); if (timestamp == GST_CLOCK_TIME_NONE) { timestamp = instance->last_decodebin_segment_.position; } if (duration != GST_CLOCK_TIME_NONE) { timestamp += duration; } instance->last_decodebin_segment_.position = timestamp; } else if (info_type & GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM) { GstEvent* event = GST_PAD_PROBE_INFO_EVENT(info); GstEventType event_type = GST_EVENT_TYPE(event); if (event_type == GST_EVENT_SEGMENT) { // A new segment started, we need to save this to calculate running time // offsets later. gst_event_copy_segment(event, &instance->last_decodebin_segment_); } else if (event_type == GST_EVENT_FLUSH_START) { // A flushing seek resets the running time to 0, so remove any offset // we set on this pad before. gst_pad_set_offset(pad, 0); } } return GST_PAD_PROBE_OK; }
static GstPadProbeReturn dvdbin_dump_timing_info (GstPad * opad, GstPadProbeInfo * info, gpointer userdata) { if (GST_PAD_PROBE_INFO_TYPE (info) & (GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM | GST_PAD_PROBE_TYPE_EVENT_FLUSH)) { GstEvent *event = GST_PAD_PROBE_INFO_EVENT (info); if (GST_EVENT_TYPE (event) == GST_EVENT_SEGMENT) { const GstSegment *seg; gst_event_parse_segment (event, &seg); g_print ("%s:%s segment: rate %g format %d, start: %" GST_TIME_FORMAT ", stop: %" GST_TIME_FORMAT ", time: %" GST_TIME_FORMAT " base: %" GST_TIME_FORMAT "\n", GST_DEBUG_PAD_NAME (opad), seg->rate, seg->format, GST_TIME_ARGS (seg->start), GST_TIME_ARGS (seg->stop), GST_TIME_ARGS (seg->time), GST_TIME_ARGS (seg->base)); } else if (GST_EVENT_TYPE (event) == GST_EVENT_GAP) { GstClockTime ts, dur, end; gst_event_parse_gap (event, &ts, &dur); end = ts; if (ts != GST_CLOCK_TIME_NONE && dur != GST_CLOCK_TIME_NONE) end += dur; g_print ("%s:%s Gap TS: %" GST_TIME_FORMAT " dur %" GST_TIME_FORMAT " (to %" GST_TIME_FORMAT ")\n", GST_DEBUG_PAD_NAME (opad), GST_TIME_ARGS (ts), GST_TIME_ARGS (dur), GST_TIME_ARGS (end)); } else if (GST_EVENT_TYPE (event) == GST_EVENT_FLUSH_STOP) { g_print ("%s:%s FLUSHED\n", GST_DEBUG_PAD_NAME (opad)); } } if (GST_PAD_PROBE_INFO_TYPE (info) & GST_PAD_PROBE_TYPE_BUFFER) { GstBuffer *buf = GST_PAD_PROBE_INFO_BUFFER (info); g_print ("%s:%s Buffer PTS %" GST_TIME_FORMAT " duration %" GST_TIME_FORMAT "\n", GST_DEBUG_PAD_NAME (opad), GST_TIME_ARGS (GST_BUFFER_PTS (buf)), GST_TIME_ARGS (GST_BUFFER_DURATION (buf))); } return GST_PAD_PROBE_OK; }
static GstPadProbeReturn pad_blocked_callback (GstPad * pad, GstPadProbeInfo * info, gpointer d) { PadBlockedData *data = d; GstEvent *event; const GstStructure *st; if (GST_PAD_PROBE_INFO_TYPE (info) & GST_PAD_PROBE_TYPE_QUERY_BOTH) { /* Queries must be answered */ return GST_PAD_PROBE_PASS; } if (!(GST_PAD_PROBE_INFO_TYPE (info) & GST_PAD_PROBE_TYPE_EVENT_BOTH)) { goto end; } if (~GST_PAD_PROBE_INFO_TYPE (info) & GST_PAD_PROBE_TYPE_BLOCK) { return data->drop ? GST_PAD_PROBE_DROP : GST_PAD_PROBE_OK; } event = GST_PAD_PROBE_INFO_EVENT (info); if (!(GST_EVENT_TYPE (event) & GST_EVENT_CUSTOM_BOTH)) { goto end; } st = gst_event_get_structure (event); if (g_strcmp0 (data->eventId, gst_structure_get_name (st)) != 0) { goto end; } data->callback (pad, data->userData); return GST_PAD_PROBE_DROP; end: return data->drop ? GST_PAD_PROBE_DROP : GST_PAD_PROBE_PASS; }
static GstPadProbeReturn block_agnostic_sink (GstPad * pad, GstPadProbeInfo * info, gpointer data) { static gboolean configuring = FALSE; /* HACK: Ignore caps event and stream start event that causes negotiation * failures.This is a workaround that should be removed */ if (GST_PAD_PROBE_INFO_TYPE (info) & GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM) { GstEvent *event = GST_PAD_PROBE_INFO_EVENT (info); if (GST_EVENT_TYPE (event) == GST_EVENT_STREAM_START || GST_EVENT_TYPE (event) == GST_EVENT_CAPS) { return GST_PAD_PROBE_PASS; } } /* HACK: Ignore query accept caps that causes negotiation errors. * This is a workaround that should be removed */ if (GST_PAD_PROBE_INFO_TYPE (info) & GST_PAD_PROBE_TYPE_QUERY_DOWNSTREAM) { GstQuery *query = GST_PAD_PROBE_INFO_QUERY (info); if (GST_QUERY_TYPE (query) == GST_QUERY_ACCEPT_CAPS) { return GST_PAD_PROBE_PASS; } } if (!g_atomic_int_get (&configuring)) { g_atomic_int_set (&configuring, TRUE); change_input (data); g_atomic_int_set (&configuring, FALSE); return GST_PAD_PROBE_REMOVE; } return GST_PAD_PROBE_PASS; }
static GstPadProbeReturn cb_EOS_received (GstPad * pad, GstPadProbeInfo * info, KmsAlphaBlendingData * port_data) { KmsAlphaBlending *self = port_data->mixer; GstEvent *event; if (GST_EVENT_TYPE (GST_PAD_PROBE_INFO_EVENT (info)) != GST_EVENT_EOS) { return GST_PAD_PROBE_OK; } KMS_ALPHA_BLENDING_LOCK (self); if (!port_data->removing) { port_data->eos_managed = TRUE; KMS_ALPHA_BLENDING_UNLOCK (self); return GST_PAD_PROBE_OK; } if (port_data->probe_id > 0) { gst_pad_remove_probe (pad, port_data->probe_id); port_data->probe_id = 0; } KMS_ALPHA_BLENDING_UNLOCK (self); event = gst_event_new_eos (); gst_pad_send_event (pad, event); kms_loop_idle_add_full (self->priv->loop, G_PRIORITY_DEFAULT, (GSourceFunc) remove_elements_from_pipeline, KMS_ALPHA_BLENDING_REF (port_data), (GDestroyNotify) kms_ref_struct_unref); return GST_PAD_PROBE_DROP; }
static GstPadProbeReturn tag_event_probe_cb (GstPad * pad, GstPadProbeInfo * info, gpointer user_data) { GMainLoop *loop = user_data; GstEvent *event = GST_PAD_PROBE_INFO_EVENT (info); switch (GST_EVENT_TYPE (event)) { case GST_EVENT_TAG: { ++n_tags; fail_if (n_tags > 1, "More than 1 tag received"); break; } case GST_EVENT_EOS: { g_main_loop_quit (loop); break; } default: break; } return GST_PAD_PROBE_OK; }
static GstPadProbeReturn link_to_videomixer (GstPad * pad, GstPadProbeInfo * info, KmsAlphaBlendingData * data) { GstPadTemplate *sink_pad_template; KmsAlphaBlending *mixer = data->mixer; if (GST_EVENT_TYPE (GST_PAD_PROBE_INFO_EVENT (info)) != GST_EVENT_CAPS) { return GST_PAD_PROBE_PASS; } GST_DEBUG ("stream start detected"); KMS_ALPHA_BLENDING_LOCK (mixer); data->link_probe_id = 0; sink_pad_template = gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (mixer-> priv->videomixer), "sink_%u"); if (G_UNLIKELY (sink_pad_template == NULL)) { GST_ERROR_OBJECT (mixer, "Error taking a new pad from videomixer"); KMS_ALPHA_BLENDING_UNLOCK (mixer); return GST_PAD_PROBE_DROP; } if (mixer->priv->master_port == data->id) { //master_port, reconfigurate the output_width and heigth_width //and all the ports already created GstEvent *event; GstCaps *caps; gint width, height; const GstStructure *str; event = gst_pad_probe_info_get_event (info); gst_event_parse_caps (event, &caps); GST_DEBUG ("caps %" GST_PTR_FORMAT, caps); if (caps != NULL) { str = gst_caps_get_structure (caps, 0); if (gst_structure_get_int (str, "width", &width) && gst_structure_get_int (str, "height", &height)) { mixer->priv->output_height = height; mixer->priv->output_width = width; } } } if (mixer->priv->videotestsrc == NULL) { GstCaps *filtercaps; GstPad *pad; mixer->priv->videotestsrc = gst_element_factory_make ("videotestsrc", NULL); mixer->priv->videotestsrc_capsfilter = gst_element_factory_make ("capsfilter", NULL); g_object_set (mixer->priv->videotestsrc, "is-live", TRUE, "pattern", /*black */ 2, NULL); filtercaps = gst_caps_new_simple ("video/x-raw", "format", G_TYPE_STRING, "AYUV", "width", G_TYPE_INT, mixer->priv->output_width, "height", G_TYPE_INT, mixer->priv->output_height, "framerate", GST_TYPE_FRACTION, 15, 1, NULL); g_object_set (G_OBJECT (mixer->priv->videotestsrc_capsfilter), "caps", filtercaps, NULL); gst_caps_unref (filtercaps); gst_bin_add_many (GST_BIN (mixer), mixer->priv->videotestsrc, mixer->priv->videotestsrc_capsfilter, NULL); gst_element_link (mixer->priv->videotestsrc, mixer->priv->videotestsrc_capsfilter); /*link capsfilter -> videomixer */ pad = gst_element_request_pad (mixer->priv->videomixer, sink_pad_template, NULL, NULL); gst_element_link_pads (mixer->priv->videotestsrc_capsfilter, NULL, mixer->priv->videomixer, GST_OBJECT_NAME (pad)); g_object_set (pad, "xpos", 0, "ypos", 0, "alpha", 0.0, "zorder", 0, NULL); g_object_unref (pad); gst_element_sync_state_with_parent (mixer->priv->videotestsrc_capsfilter); gst_element_sync_state_with_parent (mixer->priv->videotestsrc); } data->videoscale = gst_element_factory_make ("videoscale", NULL); data->capsfilter = gst_element_factory_make ("capsfilter", NULL); data->videorate = gst_element_factory_make ("videorate", NULL); data->queue = gst_element_factory_make ("queue", NULL); data->videobox = gst_element_factory_make ("videobox", NULL); data->input = TRUE; gst_bin_add_many (GST_BIN (mixer), data->queue, data->videorate, data->videoscale, data->capsfilter, data->videobox, NULL); g_object_set (data->videorate, "average-period", 200 * GST_MSECOND, NULL); g_object_set (data->queue, "flush-on-eos", TRUE, NULL); gst_element_link_many (data->videorate, data->queue, data->videoscale, data->capsfilter, data->videobox, NULL); /*link capsfilter -> videomixer */ data->video_mixer_pad = gst_element_request_pad (mixer->priv->videomixer, sink_pad_template, NULL, NULL); gst_element_link_pads (data->videobox, NULL, mixer->priv->videomixer, GST_OBJECT_NAME (data->video_mixer_pad)); gst_element_link (data->videoconvert, data->videorate); data->probe_id = gst_pad_add_probe (data->video_mixer_pad, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, (GstPadProbeCallback) cb_EOS_received, KMS_ALPHA_BLENDING_REF (data), (GDestroyNotify) kms_ref_struct_unref); gst_element_sync_state_with_parent (data->videoscale); gst_element_sync_state_with_parent (data->capsfilter); gst_element_sync_state_with_parent (data->videorate); gst_element_sync_state_with_parent (data->queue); gst_element_sync_state_with_parent (data->videobox); /* configure videomixer pad */ mixer->priv->n_elems++; if (mixer->priv->master_port == data->id) { kms_alpha_blending_reconfigure_ports (mixer); } else { configure_port (data); } KMS_ALPHA_BLENDING_UNLOCK (mixer); return GST_PAD_PROBE_REMOVE; }
static GstPadProbeReturn set_appsrc_caps (GstPad * pad, GstPadProbeInfo * info, gpointer httpep) { KmsHttpPostEndpoint *self = KMS_HTTP_POST_ENDPOINT (httpep); GstEvent *event = GST_PAD_PROBE_INFO_EVENT (info); GstCaps *audio_caps = NULL, *video_caps = NULL; GstElement *appsrc, *appsink, *agnosticbin; GstCaps *caps; gpointer data; if (GST_EVENT_TYPE (event) != GST_EVENT_CAPS) { return GST_PAD_PROBE_OK; } gst_event_parse_caps (event, &caps); if (caps == NULL) { GST_ERROR_OBJECT (pad, "Invalid caps received"); return GST_PAD_PROBE_OK; } GST_TRACE ("caps are %" GST_PTR_FORMAT, caps); data = g_object_get_qdata (G_OBJECT (pad), appsrc_data_quark ()); if (data != NULL) { goto end; } /* Get the proper agnosticbin */ audio_caps = gst_caps_from_string (KMS_AGNOSTIC_AUDIO_CAPS); video_caps = gst_caps_from_string (KMS_AGNOSTIC_VIDEO_CAPS); if (gst_caps_can_intersect (audio_caps, caps)) agnosticbin = kms_element_get_audio_agnosticbin (KMS_ELEMENT (self)); else if (gst_caps_can_intersect (video_caps, caps)) agnosticbin = kms_element_get_video_agnosticbin (KMS_ELEMENT (self)); else { GST_ELEMENT_WARNING (self, CORE, CAPS, ("Unsupported media received: %" GST_PTR_FORMAT, caps), ("Unsupported media received: %" GST_PTR_FORMAT, caps)); goto end; } /* Create appsrc element and link to agnosticbin */ appsrc = gst_element_factory_make ("appsrc", NULL); g_object_set (G_OBJECT (appsrc), "is-live", TRUE, "do-timestamp", FALSE, "min-latency", G_GUINT64_CONSTANT (0), "max-latency", G_GUINT64_CONSTANT (0), "format", GST_FORMAT_TIME, "caps", caps, NULL); gst_bin_add (GST_BIN (self), appsrc); if (!gst_element_link (appsrc, agnosticbin)) { GST_ERROR ("Could not link %s to element %s", GST_ELEMENT_NAME (appsrc), GST_ELEMENT_NAME (agnosticbin)); } /* Connect new-sample signal to callback */ appsink = gst_pad_get_parent_element (pad); g_signal_connect (appsink, "new-sample", G_CALLBACK (new_sample_post_handler), appsrc); g_object_unref (appsink); g_object_set_qdata (G_OBJECT (pad), appsrc_data_quark (), appsrc); gst_element_sync_state_with_parent (appsrc); end: if (audio_caps != NULL) gst_caps_unref (audio_caps); if (video_caps != NULL) gst_caps_unref (video_caps); return GST_PAD_PROBE_OK; }
static GstPadProbeReturn link_to_videomixer (GstPad * pad, GstPadProbeInfo * info, KmsCompositeMixerData * data) { GstPadTemplate *sink_pad_template; KmsCompositeMixer *mixer; GstPad *tee_src; if (GST_EVENT_TYPE (GST_PAD_PROBE_INFO_EVENT (info)) != GST_EVENT_STREAM_START) { return GST_PAD_PROBE_PASS; } mixer = KMS_COMPOSITE_MIXER (data->mixer); GST_DEBUG ("stream start detected %d", data->id); KMS_COMPOSITE_MIXER_LOCK (mixer); data->link_probe_id = 0; data->latency_probe_id = 0; sink_pad_template = gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (mixer-> priv->videomixer), "sink_%u"); if (G_UNLIKELY (sink_pad_template == NULL)) { GST_ERROR_OBJECT (mixer, "Error taking a new pad from videomixer"); KMS_COMPOSITE_MIXER_UNLOCK (mixer); return GST_PAD_PROBE_DROP; } data->input = TRUE; /*link tee -> videomixer */ data->video_mixer_pad = gst_element_request_pad (mixer->priv->videomixer, sink_pad_template, NULL, NULL); tee_src = gst_element_get_request_pad (data->tee, "src_%u"); gst_element_link_pads (data->tee, GST_OBJECT_NAME (tee_src), mixer->priv->videomixer, GST_OBJECT_NAME (data->video_mixer_pad)); g_object_unref (tee_src); data->probe_id = gst_pad_add_probe (data->video_mixer_pad, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, (GstPadProbeCallback) cb_EOS_received, KMS_COMPOSITE_MIXER_REF (data), (GDestroyNotify) kms_ref_struct_unref); data->latency_probe_id = gst_pad_add_probe (data->video_mixer_pad, GST_PAD_PROBE_TYPE_QUERY_UPSTREAM, (GstPadProbeCallback) cb_latency, NULL, NULL); /*recalculate the output sizes */ mixer->priv->n_elems++; kms_composite_mixer_recalculate_sizes (mixer); //Recalculate latency to avoid video freezes when an element stops to send media. gst_bin_recalculate_latency (GST_BIN (mixer)); KMS_COMPOSITE_MIXER_UNLOCK (mixer); return GST_PAD_PROBE_REMOVE; }
/* Probe on the output of a parser chain (the last * src pad) */ static GstPadProbeReturn parse_chain_output_probe (GstPad * pad, GstPadProbeInfo * info, DecodebinInputStream * input) { GstPadProbeReturn ret = GST_PAD_PROBE_OK; if (GST_IS_EVENT (GST_PAD_PROBE_INFO_DATA (info))) { GstEvent *ev = GST_PAD_PROBE_INFO_EVENT (info); GST_DEBUG_OBJECT (pad, "Got event %s", GST_EVENT_TYPE_NAME (ev)); switch (GST_EVENT_TYPE (ev)) { case GST_EVENT_STREAM_START: { GstStream *stream = NULL; guint group_id = G_MAXUINT32; gst_event_parse_group_id (ev, &group_id); GST_DEBUG_OBJECT (pad, "Got stream-start, group_id:%d, input %p", group_id, input->input); if (set_input_group_id (input->input, &group_id)) { ev = gst_event_make_writable (ev); gst_event_set_group_id (ev, group_id); GST_PAD_PROBE_INFO_DATA (info) = ev; } input->saw_eos = FALSE; gst_event_parse_stream (ev, &stream); /* FIXME : Would we ever end up with a stream already set on the input ?? */ if (stream) { if (input->active_stream != stream) { MultiQueueSlot *slot; if (input->active_stream) gst_object_unref (input->active_stream); input->active_stream = stream; /* We have the beginning of a stream, get a multiqueue slot and link to it */ g_mutex_lock (&input->dbin->selection_lock); slot = get_slot_for_input (input->dbin, input); link_input_to_slot (input, slot); g_mutex_unlock (&input->dbin->selection_lock); } else gst_object_unref (stream); } } break; case GST_EVENT_CAPS: { GstCaps *caps = NULL; gst_event_parse_caps (ev, &caps); GST_DEBUG_OBJECT (pad, "caps %" GST_PTR_FORMAT, caps); if (caps && input->active_stream) gst_stream_set_caps (input->active_stream, caps); } break; case GST_EVENT_EOS: input->saw_eos = TRUE; if (all_inputs_are_eos (input->dbin)) { GST_DEBUG_OBJECT (pad, "real input pad, marking as EOS"); check_all_streams_for_eos (input->dbin); } else { GstPad *peer = gst_pad_get_peer (input->srcpad); if (peer) { /* Send custom-eos event to multiqueue slot */ GstStructure *s; GstEvent *event; GST_DEBUG_OBJECT (pad, "Got EOS end of input stream, post custom-eos"); s = gst_structure_new_empty ("decodebin3-custom-eos"); event = gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM, s); gst_pad_send_event (peer, event); gst_object_unref (peer); } else { GST_FIXME_OBJECT (pad, "No peer, what should we do ?"); } } ret = GST_PAD_PROBE_DROP; break; case GST_EVENT_FLUSH_STOP: GST_DEBUG_OBJECT (pad, "Clear saw_eos flag"); input->saw_eos = FALSE; default: break; } } else if (GST_IS_QUERY (GST_PAD_PROBE_INFO_DATA (info))) { GstQuery *q = GST_PAD_PROBE_INFO_QUERY (info); GST_DEBUG_OBJECT (pad, "Seeing query %s", GST_QUERY_TYPE_NAME (q)); /* If we have a parser, we want to reply to the caps query */ /* FIXME: Set a flag when the input stream is created for * streams where we shouldn't reply to these queries */ if (GST_QUERY_TYPE (q) == GST_QUERY_CAPS && (info->type & GST_PAD_PROBE_TYPE_PULL)) { GstCaps *filter = NULL; GstCaps *allowed; gst_query_parse_caps (q, &filter); allowed = get_parser_caps_filter (input->dbin, filter); GST_DEBUG_OBJECT (pad, "Intercepting caps query, setting %" GST_PTR_FORMAT, allowed); gst_query_set_caps_result (q, allowed); gst_caps_unref (allowed); ret = GST_PAD_PROBE_HANDLED; } else if (GST_QUERY_TYPE (q) == GST_QUERY_ACCEPT_CAPS) { GstCaps *prop = NULL; gst_query_parse_accept_caps (q, &prop); /* Fast check against target caps */ if (gst_caps_can_intersect (prop, input->dbin->caps)) gst_query_set_accept_caps_result (q, TRUE); else { gboolean accepted = check_parser_caps_filter (input->dbin, prop); /* check against caps filter */ gst_query_set_accept_caps_result (q, accepted); GST_DEBUG_OBJECT (pad, "ACCEPT_CAPS query, returning %d", accepted); } ret = GST_PAD_PROBE_HANDLED; } } return ret; }
static GstPadProbeReturn link_to_videomixer (GstPad * pad, GstPadProbeInfo * info, KmsCompositeMixerData * data) { GstPadTemplate *sink_pad_template; KmsCompositeMixer *mixer; if (GST_EVENT_TYPE (GST_PAD_PROBE_INFO_EVENT (info)) != GST_EVENT_STREAM_START) { return GST_PAD_PROBE_PASS; } mixer = KMS_COMPOSITE_MIXER (data->mixer); GST_DEBUG ("stream start detected %d", data->id); KMS_COMPOSITE_MIXER_LOCK (mixer); data->link_probe_id = 0; sink_pad_template = gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (mixer->priv-> videomixer), "sink_%u"); if (G_UNLIKELY (sink_pad_template == NULL)) { GST_ERROR_OBJECT (mixer, "Error taking a new pad from videomixer"); KMS_COMPOSITE_MIXER_UNLOCK (mixer); return GST_PAD_PROBE_DROP; } if (mixer->priv->videotestsrc == NULL) { GstElement *capsfilter; GstCaps *filtercaps; GstPad *pad; mixer->priv->videotestsrc = gst_element_factory_make ("videotestsrc", NULL); capsfilter = gst_element_factory_make ("capsfilter", NULL); g_object_set (mixer->priv->videotestsrc, "is-live", TRUE, "pattern", /*black */ 2, NULL); filtercaps = gst_caps_new_simple ("video/x-raw", "format", G_TYPE_STRING, "AYUV", "width", G_TYPE_INT, mixer->priv->output_width, "height", G_TYPE_INT, mixer->priv->output_height, "framerate", GST_TYPE_FRACTION, 15, 1, NULL); g_object_set (G_OBJECT (capsfilter), "caps", filtercaps, NULL); gst_caps_unref (filtercaps); gst_bin_add_many (GST_BIN (mixer), mixer->priv->videotestsrc, capsfilter, NULL); gst_element_link (mixer->priv->videotestsrc, capsfilter); /*link capsfilter -> videomixer */ pad = gst_element_request_pad (mixer->priv->videomixer, sink_pad_template, NULL, NULL); gst_element_link_pads (capsfilter, NULL, mixer->priv->videomixer, GST_OBJECT_NAME (pad)); g_object_set (pad, "xpos", 0, "ypos", 0, "alpha", 0.0, NULL); g_object_unref (pad); gst_element_sync_state_with_parent (capsfilter); gst_element_sync_state_with_parent (mixer->priv->videotestsrc); } data->videoscale = gst_element_factory_make ("videoscale", NULL); data->capsfilter = gst_element_factory_make ("capsfilter", NULL); data->videorate = gst_element_factory_make ("videorate", NULL); data->queue = gst_element_factory_make ("queue", NULL); data->input = TRUE; gst_bin_add_many (GST_BIN (mixer), data->queue, data->videorate, data->videoscale, data->capsfilter, NULL); g_object_set (data->videorate, "average-period", 200 * GST_MSECOND, NULL); g_object_set (data->queue, "flush-on-eos", TRUE, "max-size-buffers", 60, NULL); gst_element_link_many (data->videorate, data->queue, data->videoscale, data->capsfilter, NULL); /*link capsfilter -> videomixer */ data->video_mixer_pad = gst_element_request_pad (mixer->priv->videomixer, sink_pad_template, NULL, NULL); gst_element_link_pads (data->capsfilter, NULL, mixer->priv->videomixer, GST_OBJECT_NAME (data->video_mixer_pad)); gst_element_link (data->videoconvert, data->videorate); data->probe_id = gst_pad_add_probe (data->video_mixer_pad, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, (GstPadProbeCallback) cb_EOS_received, KMS_COMPOSITE_MIXER_REF (data), (GDestroyNotify) kms_ref_struct_unref); gst_element_sync_state_with_parent (data->videoscale); gst_element_sync_state_with_parent (data->capsfilter); gst_element_sync_state_with_parent (data->videorate); gst_element_sync_state_with_parent (data->queue); /*recalculate the output sizes */ mixer->priv->n_elems++; kms_composite_mixer_recalculate_sizes (mixer); KMS_COMPOSITE_MIXER_UNLOCK (mixer); return GST_PAD_PROBE_REMOVE; }