static void gst_nle_source_push_eos (GstNleSource * nlesrc) { GST_INFO_OBJECT (nlesrc, "All items rendered, pushing eos"); /* push on both sink pads of our A/V prep bins */ gst_pad_send_event (nlesrc->video_sinkpad, gst_event_new_eos ()); gst_pad_send_event (nlesrc->audio_sinkpad, gst_event_new_eos ()); }
/* element navigation */ static void _send_eos (GstElement * element) { GstPad *sinkpad = gst_element_get_static_pad (element, "sink"); if (sinkpad) gst_pad_send_event (sinkpad, gst_event_new_eos ()); else { GstPad *srcpad = gst_element_get_static_pad (element, "src"); gst_pad_send_event (srcpad, gst_event_new_flush_stop (FALSE)); } }
static void send_force_key_unit_event (GstPad * pad, gboolean all_headers) { GstEvent *event; GstCaps *caps = gst_pad_get_current_caps (pad); if (caps == NULL) { caps = gst_pad_get_allowed_caps (pad); } if (caps == NULL) { return; } if (is_raw_caps (caps)) { goto end; } event = gst_video_event_new_upstream_force_key_unit (GST_CLOCK_TIME_NONE, all_headers, 0); if (GST_PAD_DIRECTION (pad) == GST_PAD_SRC) { gst_pad_send_event (pad, event); } else { gst_pad_push_event (pad, event); } end: gst_caps_unref (caps); }
static void gst_navseek_change_playback_rate (GstNavSeek * navseek, gdouble rate) { gboolean ret; GstPad *peer_pad; gint64 current_position; peer_pad = gst_pad_get_peer (GST_BASE_TRANSFORM (navseek)->sinkpad); ret = gst_pad_query_position (peer_pad, GST_FORMAT_TIME, ¤t_position); if (ret) { GstEvent *event; gint64 start; gint64 stop; if (rate > 0.0) { start = current_position; stop = -1; } else { /* negative rate: we play from stop to start */ start = 0; stop = current_position; } event = gst_event_new_seek (rate, GST_FORMAT_TIME, GST_SEEK_FLAG_ACCURATE | GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_SKIP, GST_SEEK_TYPE_SET, start, GST_SEEK_TYPE_SET, stop); gst_pad_send_event (peer_pad, event); } gst_object_unref (peer_pad); }
static void gst_navseek_segseek (GstNavSeek * navseek) { GstEvent *event; GstPad *peer_pad; if ((navseek->segment_start == GST_CLOCK_TIME_NONE) || (navseek->segment_end == GST_CLOCK_TIME_NONE) || (!GST_PAD_IS_LINKED (GST_BASE_TRANSFORM (navseek)->sinkpad))) { return; } if (navseek->loop) { event = gst_event_new_seek (1.0, GST_FORMAT_TIME, GST_SEEK_FLAG_ACCURATE | GST_SEEK_FLAG_SEGMENT, GST_SEEK_TYPE_SET, navseek->segment_start, GST_SEEK_TYPE_SET, navseek->segment_end); } else { event = gst_event_new_seek (1.0, GST_FORMAT_TIME, GST_SEEK_FLAG_ACCURATE, GST_SEEK_TYPE_SET, navseek->segment_start, GST_SEEK_TYPE_SET, navseek->segment_end); } peer_pad = gst_pad_get_peer (GST_BASE_TRANSFORM (navseek)->sinkpad); gst_pad_send_event (peer_pad, event); gst_object_unref (peer_pad); }
bool Pad::sendEvent(const EventPtr &event) { //Sending an event passes ownership of it, so we need to strong ref() it as we still //hold a pointer to the object, and will release it when the wrapper is cleared. gst_event_ref(event); return gst_pad_send_event(object<GstPad>(), event); }
static gboolean event_forward_func (GstPad * pad, EventData * evdata) { gboolean ret = TRUE; GstPad *peer = gst_pad_get_peer (pad); GstAggregatorPadPrivate *padpriv = GST_AGGREGATOR_PAD (pad)->priv; if (peer) { ret = gst_pad_send_event (peer, gst_event_ref (evdata->event)); GST_DEBUG_OBJECT (pad, "return of event push is %d", ret); gst_object_unref (peer); } evdata->result &= ret; if (ret == FALSE) { if (GST_EVENT_TYPE (evdata->event) == GST_EVENT_SEEK) GST_ERROR_OBJECT (pad, "Event %" GST_PTR_FORMAT " failed", evdata->event); else GST_INFO_OBJECT (pad, "Event %" GST_PTR_FORMAT " failed", evdata->event); if (evdata->flush) { padpriv->pending_flush_start = FALSE; padpriv->pending_flush_stop = FALSE; } } /* Always send to all pads */ return FALSE; }
gboolean eos_callback(GstPad *pad, GstObject *parent, GstEvent *event) { GstEvent *seek_event; GstElement *bkgdec; GValue v=G_VALUE_INIT; GstPad *srcpad; GstIterator *srcpads; gboolean result; g_print("Decodebin received EOS. Someone should handle that...\n"); bkgdec=gst_pad_get_parent_element(pad); if(bkgdec->numsrcpads>0) { srcpads=gst_element_iterate_src_pads(bkgdec); gst_iterator_next(srcpads,&v); srcpad=GST_PAD(g_value_get_object(&v)); seek_event=gst_event_new_seek ( 1.0, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH, GST_SEEK_TYPE_SET, 0, GST_SEEK_TYPE_NONE, GST_CLOCK_TIME_NONE); result=gst_pad_send_event(srcpad,seek_event); if(result==TRUE) { g_print("seek event sent OK.\n"); } else { g_print("seek sent FAILED.\n"); } g_value_reset(&v); g_value_unset(&v); gst_iterator_free(srcpads); return TRUE; } return gst_pad_event_default(pad,parent,event); }
static void pad_removed_cb (GstElement * element, GstPad * pad, gpointer data) { GstElement *wavenc; GstPad *sinkpad; if (gst_pad_get_direction (pad) != GST_PAD_SRC) return; GST_DEBUG ("Removed pad %" GST_PTR_FORMAT, pad); wavenc = g_hash_table_lookup (hash, GST_OBJECT_NAME (pad)); if (wavenc == NULL) return; GST_DEBUG ("Send EOS to %s", GST_OBJECT_NAME (wavenc)); sinkpad = gst_element_get_static_pad (wavenc, "sink"); gst_pad_send_event (sinkpad, gst_event_new_eos ()); gst_object_unref (sinkpad); #ifdef MANUAL_CHECK { /* Let test last for a few seconds to have a decent output file to debug */ g_timeout_add_seconds (2, quit_main_loop, NULL); } #else { g_idle_add (quit_main_loop, NULL); } #endif }
static void gst_sdlvideosink_navigation_send_event (GstNavigation * navigation, GstStructure * structure) { GstSDLVideoSink *sdlvideosink = GST_SDLVIDEOSINK (navigation); GstEvent *event; GstVideoRectangle dst = { 0, }; GstVideoRectangle src = { 0, }; GstVideoRectangle result; double x, y, old_x, old_y; GstPad *pad = NULL; src.w = GST_VIDEO_SINK_WIDTH (sdlvideosink); src.h = GST_VIDEO_SINK_HEIGHT (sdlvideosink); dst.w = sdlvideosink->width; dst.h = sdlvideosink->height; gst_video_sink_center_rect (src, dst, &result, FALSE); event = gst_event_new_navigation (structure); /* Our coordinates can be wrong here if we centered the video */ /* Converting pointer coordinates to the non scaled geometry */ if (gst_structure_get_double (structure, "pointer_x", &old_x)) { x = old_x; if (x >= result.x && x <= (result.x + result.w)) { x -= result.x; x *= sdlvideosink->width; x /= result.w; } else { x = 0; } GST_DEBUG_OBJECT (sdlvideosink, "translated navigation event x " "coordinate from %f to %f", old_x, x); gst_structure_set (structure, "pointer_x", G_TYPE_DOUBLE, x, NULL); } if (gst_structure_get_double (structure, "pointer_y", &old_y)) { y = old_y; if (y >= result.y && y <= (result.y + result.h)) { y -= result.y; y *= sdlvideosink->height; y /= result.h; } else { y = 0; } GST_DEBUG_OBJECT (sdlvideosink, "translated navigation event y " "coordinate from %f to %f", old_y, y); gst_structure_set (structure, "pointer_y", G_TYPE_DOUBLE, y, NULL); } pad = gst_pad_get_peer (GST_VIDEO_SINK_PAD (sdlvideosink)); if (GST_IS_PAD (pad) && GST_IS_EVENT (event)) { gst_pad_send_event (pad, event); gst_object_unref (pad); } }
static void gst_navseek_seek (GstNavSeek * navseek, gint64 offset) { gboolean ret; GstPad *peer_pad; gint64 peer_value; /* Query for the current time then attempt to set to time + offset */ peer_pad = gst_pad_get_peer (GST_BASE_TRANSFORM (navseek)->sinkpad); ret = gst_pad_query_position (peer_pad, GST_FORMAT_TIME, &peer_value); if (ret) { GstEvent *event; peer_value += offset; if (peer_value < 0) peer_value = 0; event = gst_event_new_seek (1.0, GST_FORMAT_TIME, GST_SEEK_FLAG_ACCURATE | GST_SEEK_FLAG_FLUSH, GST_SEEK_TYPE_SET, peer_value, GST_SEEK_TYPE_NONE, GST_CLOCK_TIME_NONE); gst_pad_send_event (peer_pad, event); } gst_object_unref (peer_pad); }
static void distribute_running_time (GstElement * element, const GstSegment * segment) { GstEvent *event; GstPad *pad; pad = gst_element_get_static_pad (element, "sink"); gst_pad_send_event (pad, gst_event_new_flush_start ()); gst_pad_send_event (pad, gst_event_new_flush_stop (FALSE)); if (segment->format != GST_FORMAT_UNDEFINED) { event = gst_event_new_segment (segment); gst_pad_send_event (pad, event); } gst_object_unref (pad); }
static void do_end_of_track (GstSpotSrc *spot) { GstPad *src_pad = gst_element_get_static_pad (GST_ELEMENT (spot), "src"); GstPad *peer_pad = gst_pad_get_peer (src_pad); gst_pad_send_event (peer_pad, gst_event_new_eos ()); spot->end_of_track = FALSE; gst_object_unref (peer_pad); }
static gpointer push_vbuffers (gpointer data) { GstSegment segment; GstPad *pad = data; gint i; GstClockTime timestamp = 0; if (videodelay) g_usleep (2000); if (late_video) timestamp = 50 * GST_MSECOND; gst_pad_send_event (pad, gst_event_new_stream_start ("test")); gst_segment_init (&segment, GST_FORMAT_TIME); gst_pad_send_event (pad, gst_event_new_segment (&segment)); for (i = 0; i < n_vbuffers; i++) { GstBuffer *buf = gst_buffer_new_and_alloc (1000); GstClockTime *rtime = g_new (GstClockTime, 1); gst_buffer_memset (buf, 0, i, 1); GST_BUFFER_TIMESTAMP (buf) = timestamp; timestamp += 25 * GST_MSECOND; GST_BUFFER_DURATION (buf) = timestamp - GST_BUFFER_TIMESTAMP (buf); *rtime = gst_segment_to_running_time (&segment, GST_FORMAT_TIME, timestamp); g_queue_push_tail (&v_timestamp_q, rtime); if (i == 4) { if (video_gaps) timestamp += 10 * GST_MSECOND; else if (video_overlaps) timestamp -= 10 * GST_MSECOND; } fail_unless (gst_pad_chain (pad, buf) == GST_FLOW_OK); } gst_pad_send_event (pad, gst_event_new_eos ()); return NULL; }
static void kms_element_set_target_on_linked (GstPad * pad, GstPad * peer, GstElement * element) { GstPad *target; target = gst_element_get_request_pad (element, "src_%u"); if (GST_PAD_IS_FLUSHING (peer)) { gst_pad_send_event (peer, gst_event_new_flush_start ()); gst_pad_send_event (peer, gst_event_new_flush_stop (FALSE)); } GST_DEBUG_OBJECT (pad, "Setting target %" GST_PTR_FORMAT, target); if (!gst_ghost_pad_set_target (GST_GHOST_PAD (pad), target)) { GST_ERROR_OBJECT (pad, "Can not set target pad"); } g_object_unref (target); }
static gboolean start_image_capture (GstWrapperCameraBinSrc * self) { GstBaseCameraSrc *bcamsrc = GST_BASE_CAMERA_SRC (self); GstPhotography *photography = (GstPhotography *) gst_bin_get_by_interface (GST_BIN_CAST (bcamsrc), GST_TYPE_PHOTOGRAPHY); gboolean ret = FALSE; GstCaps *caps; GST_DEBUG_OBJECT (self, "Starting image capture"); gst_element_set_state (self->src_vid_src, GST_STATE_READY); if (self->image_renegotiate) { /* clean capsfilter caps so they don't interfere here */ g_object_set (self->src_filter, "caps", NULL, NULL); if (self->src_zoom_filter) g_object_set (self->src_zoom_filter, "caps", NULL, NULL); caps = gst_pad_get_allowed_caps (self->imgsrc); gst_caps_replace (&self->image_capture_caps, caps); gst_caps_unref (caps); /* FIXME - do we need to update basecamerasrc width/height somehow here? * if not, i think we need to do something about _when_ they get updated * to be sure that set_element_zoom doesn't use the wrong values */ /* We caught this event in the src pad event handler and now we want to * actually push it upstream */ gst_pad_send_event (self->outsel_imgpad, gst_event_new_reconfigure ()); self->image_renegotiate = FALSE; } if (photography) { gst_element_set_state (self->src_vid_src, GST_STATE_PLAYING); GST_DEBUG_OBJECT (self, "prepare image capture caps %" GST_PTR_FORMAT, self->image_capture_caps); ret = gst_photography_prepare_for_capture (photography, (GstPhotographyCapturePrepared) img_capture_prepared, self->image_capture_caps, self); } else { g_mutex_unlock (&bcamsrc->capturing_mutex); gst_wrapper_camera_bin_reset_video_src_caps (self, self->image_capture_caps); g_mutex_lock (&bcamsrc->capturing_mutex); ret = TRUE; gst_element_set_state (self->src_vid_src, GST_STATE_PLAYING); } return ret; }
static gboolean gst_a2dp_sink_init_dynamic_elements (GstA2dpSink * self, GstCaps * caps) { GstStructure *structure; GstEvent *event; gboolean crc; gchar *mode = NULL; structure = gst_caps_get_structure (caps, 0); /* first, we need to create our rtp payloader */ if (gst_structure_has_name (structure, "audio/x-sbc")) { GST_LOG_OBJECT (self, "sbc media received"); if (!gst_a2dp_sink_init_rtp_sbc_element (self)) return FALSE; } else if (gst_structure_has_name (structure, "audio/mpeg")) { GST_LOG_OBJECT (self, "mp3 media received"); if (!gst_a2dp_sink_init_rtp_mpeg_element (self)) return FALSE; } else { GST_ERROR_OBJECT (self, "Unexpected media type"); return FALSE; } if (!gst_element_link (GST_ELEMENT (self->rtp), GST_ELEMENT (self->sink))) { GST_ERROR_OBJECT (self, "couldn't link rtpsbcpay " "to avdtpsink"); return FALSE; } /* check if we should push the taglist FIXME should we push this? * we can send the tags directly if needed */ if (self->taglist != NULL && gst_structure_has_name (structure, "audio/mpeg")) { event = gst_event_new_tag (self->taglist); /* send directly the crc */ if (gst_tag_list_get_boolean (self->taglist, "has-crc", &crc)) gst_avdtp_sink_set_crc (self->sink, crc); if (gst_tag_list_get_string (self->taglist, "channel-mode", &mode)) gst_avdtp_sink_set_channel_mode (self->sink, mode); gst_pad_send_event (self->ghostpad, event); self->taglist = NULL; g_free (mode); } g_object_set (self->rtp, "mtu", gst_avdtp_sink_get_link_mtu (self->sink), NULL); return TRUE; }
int main(int argc, char *argv[]) { CustomData data; GstBus *bus; GstMessage *msg; GstStateChangeReturn ret; GstEvent *event; guint ii = 0; data.terminate = FALSE; /* Initialize GStreamer */ gst_init (&argc, &argv); /* Create the elements */ data.pipeline = gst_parse_launch ("v4l2src name=src ! video/x-raw,framerate=24/1,width=640 ! queue ! videoconvert ! x264enc threads=0 bitrate=400 tune=zerolatency ! avimux ! multifilesink name=sink next-file=3 location=out%d.avi", NULL); data.sink = gst_bin_get_by_name( GST_BIN( data.pipeline), "sink"); if (!data.pipeline) { g_printerr ("Not all elements could be created.\n"); return -1; } /* Start playing */ ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING); if (ret == GST_STATE_CHANGE_FAILURE) { g_printerr ("Unable to set the pipeline to the playing state.\n"); gst_object_unref (data.pipeline); return -1; } /* Listen to the bus */ bus = gst_element_get_bus (data.pipeline); do { msg = gst_bus_timed_pop_filtered (bus, 10 * GST_SECOND, GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS); /* Parse message */ if (msg != NULL) { handle_message (&data, msg); } else { g_print ("DO IT!!\n"); event = event_new_downstream_force_key_unit (GST_CLOCK_TIME_NONE, GST_CLOCK_TIME_NONE, GST_CLOCK_TIME_NONE, TRUE, ii++); gst_pad_send_event( gst_element_get_static_pad (data.sink, "sink"), event); } } while (!data.terminate); /* Free resources */ gst_object_unref (bus); gst_element_set_state (data.pipeline, GST_STATE_NULL); gst_object_unref (data.pipeline); return 0; }
static void distribute_running_time (GstElement * element, const GstSegment * segment) { GstEvent *event; GstPad *pad; pad = gst_element_get_static_pad (element, "sink"); if (segment->accum) { event = gst_event_new_new_segment_full (FALSE, segment->rate, segment->applied_rate, segment->format, 0, segment->accum, 0); gst_pad_send_event (pad, event); } event = gst_event_new_new_segment_full (FALSE, segment->rate, segment->applied_rate, segment->format, segment->start, segment->stop, segment->time); gst_pad_send_event (pad, event); gst_object_unref (pad); }
/* * This function sends a dummy event to force blocked probe to be called */ static void send_dummy_event (GstPad * pad, const gchar * name) { GstElement *parent = gst_pad_get_parent_element (pad); if (parent == NULL) { return; } if (GST_PAD_IS_SINK (pad)) { gst_pad_send_event (pad, gst_event_new_custom (GST_EVENT_TYPE_DOWNSTREAM | GST_EVENT_TYPE_SERIALIZED, gst_structure_new_empty (name))); } else { gst_pad_send_event (pad, gst_event_new_custom (GST_EVENT_TYPE_UPSTREAM | GST_EVENT_TYPE_SERIALIZED, gst_structure_new_empty (name))); } g_object_unref (parent); }
static gboolean gst_dvd_spu_src_event (GstPad * pad, GstObject * parent, GstEvent * event) { GstDVDSpu *dvdspu = GST_DVD_SPU (parent); GstPad *peer; gboolean res = TRUE; peer = gst_pad_get_peer (dvdspu->videosinkpad); if (peer) { res = gst_pad_send_event (peer, event); gst_object_unref (peer); } return res; }
static gboolean speed_src_event (GstPad * pad, GstEvent * event) { GstSpeed *filter; gboolean ret = FALSE; filter = GST_SPEED (gst_pad_get_parent (pad)); switch (GST_EVENT_TYPE (event)) { case GST_EVENT_SEEK:{ gdouble rate; GstFormat format; GstSeekFlags flags; GstSeekType start_type, stop_type; gint64 start, stop; gst_event_parse_seek (event, &rate, &format, &flags, &start_type, &start, &stop_type, &stop); gst_event_unref (event); if (format != GST_FORMAT_TIME) { GST_DEBUG_OBJECT (filter, "only support seeks in TIME format"); break; } if (start_type != GST_SEEK_TYPE_NONE && start != -1) { start *= filter->speed; } if (stop_type != GST_SEEK_TYPE_NONE && stop != -1) { stop *= filter->speed; } event = gst_event_new_seek (rate, format, flags, start_type, start, stop_type, stop); GST_LOG ("sending seek event: %" GST_PTR_FORMAT, event->structure); ret = gst_pad_send_event (GST_PAD_PEER (filter->sinkpad), event); break; } default: ret = gst_pad_event_default (pad, event); break; } gst_object_unref (filter); return ret; }
static gboolean event_forward_func (GstPad * pad, EventData * evdata) { gboolean ret = TRUE; GstPad *peer = gst_pad_get_peer (pad); GstAggregatorPadPrivate *padpriv = GST_AGGREGATOR_PAD (pad)->priv; if (peer) { ret = gst_pad_send_event (peer, gst_event_ref (evdata->event)); GST_DEBUG_OBJECT (pad, "return of event push is %d", ret); gst_object_unref (peer); } if (ret == FALSE) { if (GST_EVENT_TYPE (evdata->event) == GST_EVENT_SEEK) GST_ERROR_OBJECT (pad, "Event %" GST_PTR_FORMAT " failed", evdata->event); if (GST_EVENT_TYPE (evdata->event) == GST_EVENT_SEEK) { GstQuery *seeking = gst_query_new_seeking (GST_FORMAT_TIME); if (gst_pad_query (peer, seeking)) { gboolean seekable; gst_query_parse_seeking (seeking, NULL, &seekable, NULL, NULL); if (seekable == FALSE) { GST_INFO_OBJECT (pad, "Source not seekable, We failed but it does not matter!"); ret = TRUE; } } else { GST_ERROR_OBJECT (pad, "Query seeking FAILED"); } } if (evdata->flush) { padpriv->pending_flush_start = FALSE; padpriv->pending_flush_stop = FALSE; } } else { evdata->one_actually_seeked = TRUE; } evdata->result &= ret; /* Always send to all pads */ return FALSE; }
static void gst_vdp_sink_navigation_send_event (GstNavigation * navigation, GstStructure * structure) { VdpSink *vdp_sink = GST_VDP_SINK (navigation); GstEvent *event; gint x_offset, y_offset; gdouble x, y; GstPad *pad = NULL; event = gst_event_new_navigation (structure); /* We are not converting the pointer coordinates as there's no hardware scaling done here. The only possible scaling is done by videoscale and videoscale will have to catch those events and tranform the coordinates to match the applied scaling. So here we just add the offset if the image is centered in the window. */ /* We take the flow_lock while we look at the window */ g_mutex_lock (vdp_sink->flow_lock); if (!vdp_sink->window) { g_mutex_unlock (vdp_sink->flow_lock); return; } x_offset = vdp_sink->window->width - GST_VIDEO_SINK_WIDTH (vdp_sink); y_offset = vdp_sink->window->height - GST_VIDEO_SINK_HEIGHT (vdp_sink); g_mutex_unlock (vdp_sink->flow_lock); if (x_offset > 0 && gst_structure_get_double (structure, "pointer_x", &x)) { x -= x_offset / 2; gst_structure_set (structure, "pointer_x", G_TYPE_DOUBLE, x, NULL); } if (y_offset > 0 && gst_structure_get_double (structure, "pointer_y", &y)) { y -= y_offset / 2; gst_structure_set (structure, "pointer_y", G_TYPE_DOUBLE, y, NULL); } pad = gst_pad_get_peer (GST_VIDEO_SINK_PAD (vdp_sink)); if (GST_IS_PAD (pad) && GST_IS_EVENT (event)) { gst_pad_send_event (pad, event); gst_object_unref (pad); } }
static void pipeline_op_done (GstPad *pad, gboolean blocked, GstPad *new_pad) { GstEvent *segment; if (new_pad == NULL) return; /* send a very unimaginative new segment through the new pad */ segment = gst_event_new_new_segment (TRUE, 1.0, GST_FORMAT_DEFAULT, 0, GST_CLOCK_TIME_NONE, 0); gst_pad_send_event (new_pad, segment); gst_object_unref (new_pad); }
static gboolean gst_wrapper_camera_bin_src_src_event (GstPad * pad, GstObject * parent, GstEvent * event) { gboolean ret = TRUE; GstWrapperCameraBinSrc *self = GST_WRAPPER_CAMERA_BIN_SRC (parent); GstPad *upstream_pad = NULL; GST_DEBUG_OBJECT (self, "Handling event %p %" GST_PTR_FORMAT, event, event); switch (GST_EVENT_TYPE (event)) { case GST_EVENT_RECONFIGURE: if (pad == self->imgsrc) { GST_DEBUG_OBJECT (self, "Image mode reconfigure event received"); self->image_renegotiate = TRUE; } else if (pad == self->vidsrc) { GST_DEBUG_OBJECT (self, "Video mode reconfigure event received"); self->video_renegotiate = TRUE; } if (pad == self->imgsrc || pad == self->vidsrc) { gst_event_unref (event); return ret; } break; default: break; } if (pad == self->imgsrc) { upstream_pad = self->outsel_imgpad; } else if (pad == self->vidsrc) { upstream_pad = self->outsel_vidpad; } if (upstream_pad) { ret = gst_pad_send_event (upstream_pad, event); } else { GST_WARNING_OBJECT (self, "Event caught that doesn't have an upstream pad -" "this shouldn't be possible!"); gst_event_unref (event); ret = FALSE; } return ret; }
/* Called with lock held, drops the lock to send EOS to the * pad */ static void send_eos (GstSplitMuxSink * splitmux, MqStreamCtx * ctx) { GstEvent *eos; GstPad *pad; eos = gst_event_new_eos (); pad = gst_pad_get_peer (ctx->srcpad); ctx->out_eos = TRUE; GST_INFO_OBJECT (splitmux, "Sending EOS on %" GST_PTR_FORMAT, pad); GST_SPLITMUX_UNLOCK (splitmux); gst_pad_send_event (pad, eos); GST_SPLITMUX_LOCK (splitmux); gst_object_unref (pad); }
static gboolean change_input_cb (gpointer pipeline) { GstBin *pipe = GST_BIN (pipeline); GstPad *sink; GstElement *agnosticbin2 = gst_bin_get_by_name (pipe, "agnosticbin_2"); sink = gst_element_get_static_pad (agnosticbin2, "sink"); gst_pad_add_probe (sink, GST_PAD_PROBE_TYPE_BLOCK, block_agnostic_sink, pipeline, NULL); // HACK: Sending a dummy event to ensure the block probe is called gst_pad_send_event (sink, gst_event_new_custom (GST_EVENT_TYPE_DOWNSTREAM, gst_structure_new_from_string ("dummy"))); g_object_unref (agnosticbin2); g_object_unref (sink); return FALSE; }
static void kms_element_release_pad (GstElement * element, GstPad * pad) { GstElement *agnosticbin; GstPad *target; GstPad *peer; if (g_str_has_prefix (GST_OBJECT_NAME (pad), "audio_src")) { agnosticbin = KMS_ELEMENT (element)->priv->audio_agnosticbin; } else if (g_str_has_prefix (GST_OBJECT_NAME (pad), "video_src")) { agnosticbin = KMS_ELEMENT (element)->priv->video_agnosticbin; } else { return; } // TODO: Remove pad if is a sinkpad target = gst_ghost_pad_get_target (GST_GHOST_PAD (pad)); if (target != NULL) { if (agnosticbin != NULL) { gst_element_release_request_pad (agnosticbin, target); } g_object_unref (target); } peer = gst_pad_get_peer (pad); gst_pad_push_event (pad, gst_event_new_flush_start ()); if (GST_STATE (element) >= GST_STATE_PAUSED || GST_STATE_PENDING (element) >= GST_STATE_PAUSED) { gst_pad_set_active (pad, FALSE); } if (peer) { gst_pad_send_event (peer, gst_event_new_flush_stop (FALSE)); g_object_unref (peer); } gst_element_remove_pad (element, pad); }
gboolean gst_camerabin_image_send_event (GstElement * element, GstEvent * event) { GstCameraBinImage *bin = GST_CAMERABIN_IMAGE (element); gboolean ret = FALSE; GST_INFO ("got %s event", GST_EVENT_TYPE_NAME (event)); if (GST_EVENT_IS_DOWNSTREAM (event)) { ret = gst_pad_send_event (bin->sinkpad, event); } else { if (bin->sink) { ret = gst_element_send_event (bin->sink, event); } else { GST_WARNING ("upstream event handling failed"); } } return ret; }