static gboolean vorbis_parse_sink_event (GstPad * pad, GstEvent * event) { gboolean ret; GstVorbisParse *parse; parse = GST_VORBIS_PARSE (gst_pad_get_parent (pad)); switch (GST_EVENT_TYPE (event)) { case GST_EVENT_FLUSH_START: vorbis_parse_clear_queue (parse); parse->prev_granulepos = -1; parse->prev_blocksize = -1; ret = gst_pad_event_default (pad, event); break; case GST_EVENT_EOS: vorbis_parse_drain_queue_prematurely (parse); ret = gst_pad_event_default (pad, event); break; default: if (!parse->streamheader_sent && GST_EVENT_IS_SERIALIZED (event)) ret = vorbis_parse_queue_event (parse, event); else ret = gst_pad_event_default (pad, event); break; } gst_object_unref (parse); return ret; }
static gboolean theora_parse_sink_event (GstPad * pad, GstObject * parent, GstEvent * event) { gboolean ret; GstTheoraParse *parse; parse = GST_THEORA_PARSE (parent); switch (GST_EVENT_TYPE (event)) { case GST_EVENT_FLUSH_STOP: theora_parse_clear_queue (parse); parse->prev_keyframe = -1; parse->prev_frame = -1; ret = gst_pad_event_default (pad, parent, event); break; case GST_EVENT_EOS: theora_parse_drain_queue_prematurely (parse); ret = gst_pad_event_default (pad, parent, event); break; default: if (parse->send_streamheader && GST_EVENT_IS_SERIALIZED (event)) ret = theora_parse_queue_event (parse, event); else ret = gst_pad_event_default (pad, parent, event); break; } return ret; }
static gboolean gst_kate_parse_sink_event (GstPad * pad, GstObject * parent, GstEvent * event) { gboolean ret; GstKateParse *parse; parse = GST_KATE_PARSE (parent); switch (GST_EVENT_TYPE (event)) { case GST_EVENT_FLUSH_STOP: gst_kate_parse_clear_queue (parse); ret = gst_pad_event_default (pad, parent, event); break; case GST_EVENT_EOS: if (!parse->streamheader_sent) { GST_DEBUG_OBJECT (parse, "Got EOS, pushing headers seen so far"); ret = gst_kate_parse_push_headers (parse); if (ret != GST_FLOW_OK) break; } gst_kate_parse_drain_queue_prematurely (parse); ret = gst_pad_event_default (pad, parent, event); break; default: if (!parse->streamheader_sent && GST_EVENT_IS_SERIALIZED (event) && GST_EVENT_TYPE (event) > GST_EVENT_CAPS) ret = gst_kate_parse_queue_event (parse, event); else ret = gst_pad_event_default (pad, parent, event); break; } return ret; }
static GstPadProbeReturn pad_blocked_cb (GstPad * pad, GstPadProbeInfo * info, gpointer user_data) { GstPlaySinkConvertBin *self = user_data; GstPad *peer; GstCaps *caps; gboolean raw; if (GST_IS_EVENT (info->data) && !GST_EVENT_IS_SERIALIZED (info->data)) { GST_DEBUG_OBJECT (self, "Letting non-serialized event %s pass", GST_EVENT_TYPE_NAME (info->data)); return GST_PAD_PROBE_PASS; } GST_PLAY_SINK_CONVERT_BIN_LOCK (self); GST_DEBUG_OBJECT (self, "Pad blocked"); /* There must be a peer at this point */ peer = gst_pad_get_peer (self->sinkpad); caps = gst_pad_get_current_caps (peer); if (!caps) caps = gst_pad_query_caps (peer, NULL); gst_object_unref (peer); raw = is_raw_caps (caps, self->audio); GST_DEBUG_OBJECT (self, "Caps %" GST_PTR_FORMAT " are raw: %d", caps, raw); gst_caps_unref (caps); if (raw == self->raw) goto unblock; self->raw = raw; gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (self->sinkpad), NULL); gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (self->srcpad), NULL); if (raw) { GST_DEBUG_OBJECT (self, "Switching to raw conversion pipeline"); if (self->conversion_elements) g_list_foreach (self->conversion_elements, (GFunc) gst_play_sink_convert_bin_on_element_added, self); } else { GST_DEBUG_OBJECT (self, "Switch to passthrough pipeline"); gst_play_sink_convert_bin_on_element_added (self->identity, self); } gst_play_sink_convert_bin_set_targets (self, !raw); unblock: self->sink_proxypad_block_id = 0; GST_PLAY_SINK_CONVERT_BIN_UNLOCK (self); return GST_PAD_PROBE_REMOVE; }
static gboolean gst_base_video_encoder_sink_event (GstPad * pad, GstEvent * event) { GstBaseVideoEncoder *enc; GstBaseVideoEncoderClass *klass; gboolean handled = FALSE; gboolean ret = TRUE; enc = GST_BASE_VIDEO_ENCODER (gst_pad_get_parent (pad)); klass = GST_BASE_VIDEO_ENCODER_GET_CLASS (enc); GST_DEBUG_OBJECT (enc, "received event %d, %s", GST_EVENT_TYPE (event), GST_EVENT_TYPE_NAME (event)); if (klass->event) handled = klass->event (enc, event); if (!handled) handled = gst_base_video_encoder_sink_eventfunc (enc, event); if (!handled) { /* Forward non-serialized events and EOS/FLUSH_STOP immediately. * For EOS this is required because no buffer or serialized event * will come after EOS and nothing could trigger another * _finish_frame() call. * * If the subclass handles sending of EOS manually it can return * _DROPPED from ::finish() and all other subclasses should have * decoded/flushed all remaining data before this * * For FLUSH_STOP this is required because it is expected * to be forwarded immediately and no buffers are queued anyway. */ if (!GST_EVENT_IS_SERIALIZED (event) || GST_EVENT_TYPE (event) == GST_EVENT_EOS || GST_EVENT_TYPE (event) == GST_EVENT_FLUSH_STOP) { ret = gst_pad_push_event (enc->base_video_codec.srcpad, event); } else { GST_BASE_VIDEO_CODEC_STREAM_LOCK (enc); enc->current_frame_events = g_list_prepend (enc->current_frame_events, event); GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (enc); } } GST_DEBUG_OBJECT (enc, "event handled"); gst_object_unref (enc); return ret; }
/** * progress_buffer_sink_event() * * Receives event from the sink pad (currently, data from javasource). When an event comes in, * we get the data from the pad by getting at the ProgressBuffer* object associated with the pad. */ static gboolean progress_buffer_sink_event(GstPad *pad, GstObject *parent, GstEvent *event) { ProgressBuffer *element = PROGRESS_BUFFER(parent); gboolean result = TRUE; // Ignore GST_EVENT_FLUSH_START and GST_EVENT_FLUSH_STOP if source seeking if (element->is_source_seeking) { if (GST_EVENT_TYPE(event) == GST_EVENT_FLUSH_START || GST_EVENT_TYPE(event) == GST_EVENT_FLUSH_STOP) { // INLINE - gst_event_unref() gst_event_unref(event); return TRUE; } } if (GST_EVENT_IS_SERIALIZED (event) && GST_EVENT_TYPE(event) != GST_EVENT_FLUSH_STOP) { g_mutex_lock(&element->lock); if (element->eos_status.eos) { // INLINE - gst_event_unref() gst_event_unref(event); result = FALSE; } else progress_buffer_enqueue_item(element, GST_MINI_OBJECT_CAST(event)); g_mutex_unlock(&element->lock); } else result = gst_pad_push_event(element->srcpad, event); return result; }
static gboolean gst_dvd_spu_subpic_event (GstPad * pad, GstObject * parent, GstEvent * event) { GstDVDSpu *dvdspu = (GstDVDSpu *) parent; gboolean res = TRUE; /* Some events on the subpicture sink pad just get ignored, like * FLUSH_START */ switch (GST_EVENT_TYPE (event)) { case GST_EVENT_CAPS: { GstCaps *caps; gst_event_parse_caps (event, &caps); res = gst_dvd_spu_subpic_set_caps (pad, caps); gst_event_unref (event); break; } case GST_EVENT_CUSTOM_DOWNSTREAM: case GST_EVENT_CUSTOM_DOWNSTREAM_STICKY: case GST_EVENT_CUSTOM_DOWNSTREAM_OOB: { const GstStructure *structure = gst_event_get_structure (event); gboolean need_push; if (!gst_structure_has_name (structure, "application/x-gst-dvd")) { res = gst_pad_event_default (pad, parent, event); break; } DVD_SPU_LOCK (dvdspu); if (GST_EVENT_IS_SERIALIZED (event)) { SpuPacket *spu_packet = g_new0 (SpuPacket, 1); GST_DEBUG_OBJECT (dvdspu, "Enqueueing DVD event on subpicture pad for later"); spu_packet->event = event; g_queue_push_tail (dvdspu->pending_spus, spu_packet); } else { gst_dvd_spu_handle_dvd_event (dvdspu, event); } /* If the handle_dvd_event generated a pending frame, we * need to synchronise with the video pad's stream lock and push it. * This requires some dancing to preserve locking order and handle * flushes correctly */ need_push = (dvdspu->pending_frame != NULL); DVD_SPU_UNLOCK (dvdspu); if (need_push) { GstBuffer *to_push = NULL; gboolean flushing; GST_LOG_OBJECT (dvdspu, "Going for stream lock"); GST_PAD_STREAM_LOCK (dvdspu->videosinkpad); GST_LOG_OBJECT (dvdspu, "Got stream lock"); GST_OBJECT_LOCK (dvdspu->videosinkpad); flushing = GST_PAD_IS_FLUSHING (dvdspu->videosinkpad); GST_OBJECT_UNLOCK (dvdspu->videosinkpad); DVD_SPU_LOCK (dvdspu); if (dvdspu->pending_frame == NULL || flushing) { /* Got flushed while waiting for the stream lock */ DVD_SPU_UNLOCK (dvdspu); } else { to_push = dvdspu->pending_frame; dvdspu->pending_frame = NULL; DVD_SPU_UNLOCK (dvdspu); gst_pad_push (dvdspu->srcpad, to_push); } GST_LOG_OBJECT (dvdspu, "Dropping stream lock"); GST_PAD_STREAM_UNLOCK (dvdspu->videosinkpad); } break; } case GST_EVENT_SEGMENT: { GstSegment seg; gst_event_copy_segment (event, &seg); /* Only print updates if they have an end time (don't print start_time * updates */ GST_DEBUG_OBJECT (dvdspu, "subpic pad Segment: %" GST_SEGMENT_FORMAT, &seg); DVD_SPU_LOCK (dvdspu); dvdspu->subp_seg = seg; GST_LOG_OBJECT (dvdspu, "Subpicture segment now: %" GST_SEGMENT_FORMAT, &dvdspu->subp_seg); DVD_SPU_UNLOCK (dvdspu); gst_event_unref (event); break; } case GST_EVENT_GAP: { GstClockTime timestamp, duration; gst_event_parse_gap (event, ×tamp, &duration); if (GST_CLOCK_TIME_IS_VALID (duration)) timestamp += duration; DVD_SPU_LOCK (dvdspu); dvdspu->subp_seg.position = timestamp; GST_LOG_OBJECT (dvdspu, "Received GAP. Segment now: %" GST_SEGMENT_FORMAT, &dvdspu->subp_seg); DVD_SPU_UNLOCK (dvdspu); gst_event_unref (event); break; } case GST_EVENT_FLUSH_START: gst_event_unref (event); goto done; case GST_EVENT_FLUSH_STOP: GST_DEBUG_OBJECT (dvdspu, "Have flush-stop event on SPU pad"); DVD_SPU_LOCK (dvdspu); gst_segment_init (&dvdspu->subp_seg, GST_FORMAT_UNDEFINED); gst_dvd_spu_flush_spu_info (dvdspu, TRUE); DVD_SPU_UNLOCK (dvdspu); /* We don't forward flushes on the spu pad */ gst_event_unref (event); goto done; case GST_EVENT_EOS: /* drop EOS on the subtitle pad, it means there are no more subtitles, * video might still continue, though */ gst_event_unref (event); goto done; break; default: res = gst_pad_event_default (pad, parent, event); break; } done: return res; }
static gboolean gst_gdk_pixbuf_dec_sink_event (GstPad * pad, GstObject * parent, GstEvent * event) { GstFlowReturn res = GST_FLOW_OK; gboolean ret = TRUE, forward = TRUE; GstGdkPixbufDec *pixbuf; pixbuf = GST_GDK_PIXBUF_DEC (parent); switch (GST_EVENT_TYPE (event)) { case GST_EVENT_CAPS: { GstCaps *caps; gst_event_parse_caps (event, &caps); ret = gst_gdk_pixbuf_dec_sink_setcaps (pixbuf, caps); forward = FALSE; break; } case GST_EVENT_EOS: if (pixbuf->pixbuf_loader != NULL) { gdk_pixbuf_loader_close (pixbuf->pixbuf_loader, NULL); res = gst_gdk_pixbuf_dec_flush (pixbuf); g_object_unref (G_OBJECT (pixbuf->pixbuf_loader)); pixbuf->pixbuf_loader = NULL; /* as long as we don't have flow returns for event functions we need * to post an error here, or the application might never know that * things failed */ if (res != GST_FLOW_OK && res != GST_FLOW_FLUSHING && res != GST_FLOW_EOS && res != GST_FLOW_NOT_LINKED) { GST_ELEMENT_ERROR (pixbuf, STREAM, FAILED, (NULL), ("Flow: %s", gst_flow_get_name (res))); forward = FALSE; ret = FALSE; } } break; case GST_EVENT_FLUSH_STOP: g_list_free_full (pixbuf->pending_events, (GDestroyNotify) gst_event_unref); pixbuf->pending_events = NULL; /* Fall through */ case GST_EVENT_SEGMENT: { const GstSegment *segment; gst_event_parse_segment (event, &segment); if (segment->format == GST_FORMAT_BYTES) pixbuf->packetized = FALSE; else pixbuf->packetized = TRUE; if (pixbuf->pixbuf_loader != NULL) { gdk_pixbuf_loader_close (pixbuf->pixbuf_loader, NULL); g_object_unref (G_OBJECT (pixbuf->pixbuf_loader)); pixbuf->pixbuf_loader = NULL; } break; } default: break; } if (forward) { if (!gst_pad_has_current_caps (pixbuf->srcpad) && GST_EVENT_IS_SERIALIZED (event) && GST_EVENT_TYPE (event) > GST_EVENT_CAPS && GST_EVENT_TYPE (event) != GST_EVENT_FLUSH_STOP && GST_EVENT_TYPE (event) != GST_EVENT_EOS) { ret = TRUE; pixbuf->pending_events = g_list_prepend (pixbuf->pending_events, event); } else { ret = gst_pad_event_default (pad, parent, event); } } else { gst_event_unref (event); } return ret; }