/** * gst_event_new_custom: * @type: The type of the new event * @structure: (transfer full): the structure for the event. The event will * take ownership of the structure. * * Create a new custom-typed event. This can be used for anything not * handled by other event-specific functions to pass an event to another * element. * * Make sure to allocate an event type with the #GST_EVENT_MAKE_TYPE macro, * assigning a free number and filling in the correct direction and * serialization flags. * * New custom events can also be created by subclassing the event type if * needed. * * Returns: (transfer full): the new custom event. */ GstEvent * gst_event_new_custom (GstEventType type, GstStructure * structure) { GstEventImpl *event; event = g_slice_new0 (GstEventImpl); GST_CAT_DEBUG (GST_CAT_EVENT, "creating new event %p %s %d", event, gst_event_type_get_name (type), type); if (structure) { /* structure must not have a parent */ if (!gst_structure_set_parent_refcount (structure, &event->event.mini_object.refcount)) goto had_parent; } gst_event_init (event, type); GST_EVENT_STRUCTURE (event) = structure; return GST_EVENT_CAST (event); /* ERRORS */ had_parent: { g_slice_free1 (sizeof (GstEventImpl), event); g_warning ("structure is already owned by another object"); return NULL; } }
static gboolean handle_queued_objects (APP_STATE_T * state) { GstMiniObject *object = NULL; g_mutex_lock (&state->queue_lock); if (state->flushing) { g_cond_broadcast (&state->cond); goto beach; } else if (g_async_queue_length (state->queue) == 0) { goto beach; } if ((object = g_async_queue_try_pop (state->queue))) { if (GST_IS_BUFFER (object)) { GstBuffer *buffer = GST_BUFFER_CAST (object); update_image (state, buffer); render_scene (state); gst_buffer_unref (buffer); if (!SYNC_BUFFERS) { object = NULL; } } else if (GST_IS_QUERY (object)) { GstQuery *query = GST_QUERY_CAST (object); GstStructure *s = (GstStructure *) gst_query_get_structure (query); if (gst_structure_has_name (s, "not-used")) { g_assert_not_reached (); } else { g_assert_not_reached (); } } else if (GST_IS_EVENT (object)) { GstEvent *event = GST_EVENT_CAST (object); g_print ("\nevent %p %s\n", event, gst_event_type_get_name (GST_EVENT_TYPE (event))); switch (GST_EVENT_TYPE (event)) { case GST_EVENT_EOS: flush_internal (state); break; default: break; } gst_event_unref (event); object = NULL; } } if (object) { state->popped_obj = object; g_cond_broadcast (&state->cond); } beach: g_mutex_unlock (&state->queue_lock); return FALSE; }
static void theora_parse_drain_event_queue (GstTheoraParse * parse) { while (parse->event_queue->length) { GstEvent *event; event = GST_EVENT_CAST (g_queue_pop_head (parse->event_queue)); gst_pad_event_default (parse->sinkpad, GST_OBJECT_CAST (parse), event); } }
static void vorbis_parse_drain_event_queue (GstVorbisParse * parse) { while (parse->event_queue->length) { GstEvent *event; event = GST_EVENT_CAST (g_queue_pop_head (parse->event_queue)); gst_pad_event_default (parse->sinkpad, event); } }
static gboolean on_video_sink_data_flow (GstPad * pad, GstMiniObject * mini_obj, gpointer user_data) { GstFPSDisplaySink *self = GST_FPS_DISPLAY_SINK (user_data); #if 0 if (GST_IS_BUFFER (mini_obj)) { GstBuffer *buf = GST_BUFFER_CAST (mini_obj); if (GST_CLOCK_TIME_IS_VALID (self->next_ts)) { if (GST_BUFFER_TIMESTAMP (buf) <= self->next_ts) { self->frames_rendered++; } else { GST_WARNING_OBJECT (self, "dropping frame : ts %" GST_TIME_FORMAT " < expected_ts %" GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)), GST_TIME_ARGS (self->next_ts)); self->frames_dropped++; } } else { self->frames_rendered++; } } else #endif if (GST_IS_EVENT (mini_obj)) { GstEvent *ev = GST_EVENT_CAST (mini_obj); if (GST_EVENT_TYPE (ev) == GST_EVENT_QOS) { GstClockTimeDiff diff; GstClockTime ts; gst_event_parse_qos (ev, NULL, &diff, &ts); if (diff <= 0.0) { g_atomic_int_inc (&self->frames_rendered); } else { g_atomic_int_inc (&self->frames_dropped); } ts = gst_util_get_timestamp (); if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (self->start_ts))) { self->interval_ts = self->last_ts = self->start_ts = ts; } if (GST_CLOCK_DIFF (self->interval_ts, ts) > self->fps_update_interval) { display_current_fps (self); self->interval_ts = ts; } } } return TRUE; }
static GstFlowReturn vorbis_handle_type_packet (GstVorbisDec * vd) { GList *walk; gint res; g_assert (vd->initialized == FALSE); #ifdef USE_TREMOLO if (G_UNLIKELY ((res = vorbis_dsp_init (&vd->vd, &vd->vi)))) goto synthesis_init_error; #else if (G_UNLIKELY ((res = vorbis_synthesis_init (&vd->vd, &vd->vi)))) goto synthesis_init_error; if (G_UNLIKELY ((res = vorbis_block_init (&vd->vd, &vd->vb)))) goto block_init_error; #endif vd->initialized = TRUE; if (vd->pendingevents) { for (walk = vd->pendingevents; walk; walk = g_list_next (walk)) gst_pad_push_event (vd->srcpad, GST_EVENT_CAST (walk->data)); g_list_free (vd->pendingevents); vd->pendingevents = NULL; } if (vd->taglist) { /* The tags have already been sent on the bus as messages. */ gst_pad_push_event (vd->srcpad, gst_event_new_tag (vd->taglist)); vd->taglist = NULL; } return GST_FLOW_OK; /* ERRORS */ synthesis_init_error: { GST_ELEMENT_ERROR (GST_ELEMENT (vd), STREAM, DECODE, (NULL), ("couldn't initialize synthesis (%d)", res)); return GST_FLOW_ERROR; } block_init_error: { GST_ELEMENT_ERROR (GST_ELEMENT (vd), STREAM, DECODE, (NULL), ("couldn't initialize block (%d)", res)); return GST_FLOW_ERROR; } }
static void theora_parse_clear_queue (GstTheoraParse * parse) { while (parse->buffer_queue->length) { GstBuffer *buf; buf = GST_BUFFER_CAST (g_queue_pop_head (parse->buffer_queue)); gst_buffer_unref (buf); } while (parse->event_queue->length) { GstEvent *event; event = GST_EVENT_CAST (g_queue_pop_head (parse->event_queue)); gst_event_unref (event); } }
static void gst_kate_parse_clear_queue (GstKateParse * parse) { GST_DEBUG_OBJECT (parse, "Clearing queue"); while (parse->buffer_queue->length) { GstBuffer *buf; buf = GST_BUFFER_CAST (g_queue_pop_head (parse->buffer_queue)); gst_buffer_unref (buf); } while (parse->event_queue->length) { GstEvent *event; event = GST_EVENT_CAST (g_queue_pop_head (parse->event_queue)); gst_event_unref (event); } }
static GList * _flush_events (GstPad * pad, GList * events) { GList *tmp; for (tmp = events; tmp; tmp = tmp->next) { if (GST_EVENT_TYPE (tmp->data) == GST_EVENT_EOS || GST_EVENT_TYPE (tmp->data) == GST_EVENT_SEGMENT || !GST_EVENT_IS_STICKY (tmp->data) || pad == NULL) { gst_event_unref (tmp->data); } else { gst_pad_store_sticky_event (pad, GST_EVENT_CAST (tmp->data)); } } g_list_free (events); return NULL; }
static GstBuffer * dequeue_buffer (GstAppSink * appsink) { GstAppSinkPrivate *priv = appsink->priv; GstBuffer *buffer; do { GstMiniObject *obj; obj = g_queue_pop_head (priv->queue); if (GST_IS_BUFFER (obj)) { buffer = GST_BUFFER_CAST (obj); GST_DEBUG_OBJECT (appsink, "dequeued buffer %p", buffer); priv->num_buffers--; break; } else if (GST_IS_EVENT (obj)) { GstEvent *event = GST_EVENT_CAST (obj); switch (GST_EVENT_TYPE (obj)) { case GST_EVENT_CAPS: { GstCaps *caps; gst_event_parse_caps (event, &caps); GST_DEBUG_OBJECT (appsink, "activating caps %" GST_PTR_FORMAT, caps); gst_caps_replace (&priv->last_caps, caps); break; } case GST_EVENT_SEGMENT: gst_event_copy_segment (event, &priv->last_segment); GST_DEBUG_OBJECT (appsink, "activated segment %" GST_SEGMENT_FORMAT, &priv->last_segment); break; default: break; } gst_mini_object_unref (obj); } } while (TRUE); return buffer; }
static gboolean on_video_sink_data_flow (GstPad * pad, GstMiniObject * mini_obj, gpointer user_data) { GstFPSDisplaySink *self = GST_FPS_DISPLAY_SINK (user_data); #if 0 if (GST_IS_BUFFER (mini_obj)) { GstBuffer *buf = GST_BUFFER_CAST (mini_obj); if (GST_CLOCK_TIME_IS_VALID (self->next_ts)) { if (GST_BUFFER_TIMESTAMP (buf) <= self->next_ts) { self->frames_rendered++; } else { GST_WARNING_OBJECT (self, "dropping frame : ts %" GST_TIME_FORMAT " < expected_ts %" GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)), GST_TIME_ARGS (self->next_ts)); self->frames_dropped++; } } else { self->frames_rendered++; } } else #endif if (GST_IS_EVENT (mini_obj)) { GstEvent *ev = GST_EVENT_CAST (mini_obj); if (GST_EVENT_TYPE (ev) == GST_EVENT_QOS) { GstClockTimeDiff diff; GstClockTime ts; gst_event_parse_qos (ev, NULL, &diff, &ts); self->next_ts = ts + diff; if (diff <= 0.0) { self->frames_rendered++; } else { self->frames_dropped++; } } } return TRUE; }
static gboolean gst_rtp_pt_demux_src_event (GstPad * pad, GstObject * parent, GstEvent * event) { GstRtpPtDemux *demux; const GstStructure *s; demux = GST_RTP_PT_DEMUX (parent); switch (GST_EVENT_TYPE (event)) { case GST_EVENT_CUSTOM_UPSTREAM: case GST_EVENT_CUSTOM_BOTH: case GST_EVENT_CUSTOM_BOTH_OOB: s = gst_event_get_structure (event); if (s && !gst_structure_has_field (s, "payload")) { GSList *walk; GST_OBJECT_LOCK (demux); for (walk = demux->srcpads; walk; walk = g_slist_next (walk)) { GstRtpPtDemuxPad *dpad = (GstRtpPtDemuxPad *) walk->data; if (dpad->pad == pad) { GstStructure *ws; event = GST_EVENT_CAST (gst_mini_object_make_writable (GST_MINI_OBJECT_CAST (event))); ws = gst_event_writable_structure (event); gst_structure_set (ws, "payload", G_TYPE_UINT, dpad->pt, NULL); break; } } GST_OBJECT_UNLOCK (demux); } break; default: break; } return gst_pad_event_default (pad, parent, event); }
static gboolean gst_rtp_ssrc_demux_src_event (GstPad * pad, GstEvent * event) { GstRtpSsrcDemux *demux; const GstStructure *s; demux = GST_RTP_SSRC_DEMUX (gst_pad_get_parent (pad)); switch (GST_EVENT_TYPE (event)) { case GST_EVENT_CUSTOM_UPSTREAM: case GST_EVENT_CUSTOM_BOTH: case GST_EVENT_CUSTOM_BOTH_OOB: s = gst_event_get_structure (event); if (s && !gst_structure_has_field (s, "ssrc")) { GSList *walk; for (walk = demux->srcpads; walk; walk = g_slist_next (walk)) { GstRtpSsrcDemuxPad *dpad = (GstRtpSsrcDemuxPad *) walk->data; if (dpad->rtp_pad == pad || dpad->rtcp_pad == pad) { event = GST_EVENT_CAST (gst_mini_object_make_writable (GST_MINI_OBJECT_CAST (event))); gst_structure_set (event->structure, "ssrc", G_TYPE_UINT, dpad->ssrc, NULL); break; } } } break; default: break; } gst_object_unref (demux); return gst_pad_event_default (pad, event); }
static GstEvent * _gst_event_copy (GstEvent * event) { GstEventImpl *copy; GstStructure *s; copy = g_slice_new0 (GstEventImpl); gst_event_init (copy, GST_EVENT_TYPE (event)); GST_EVENT_TIMESTAMP (copy) = GST_EVENT_TIMESTAMP (event); GST_EVENT_SEQNUM (copy) = GST_EVENT_SEQNUM (event); s = GST_EVENT_STRUCTURE (event); if (s) { GST_EVENT_STRUCTURE (copy) = gst_structure_copy (s); gst_structure_set_parent_refcount (GST_EVENT_STRUCTURE (copy), ©->event.mini_object.refcount); } else { GST_EVENT_STRUCTURE (copy) = NULL; } return GST_EVENT_CAST (copy); }
static gboolean handle_queued_objects (APP_STATE_T * state) { GstMiniObject *object = NULL; g_mutex_lock (state->queue_lock); if (state->flushing) { g_cond_broadcast (state->cond); goto beach; } else if (g_async_queue_length (state->queue) == 0) { goto beach; } if ((object = g_async_queue_try_pop (state->queue))) { if (GST_IS_BUFFER (object)) { GstBuffer *buffer = GST_BUFFER_CAST (object); update_image (state, buffer); render_scene (state); gst_buffer_unref (buffer); if (!SYNC_BUFFERS) { object = NULL; } } else if (GST_IS_QUERY (object)) { GstQuery *query = GST_QUERY_CAST (object); GstStructure *s = (GstStructure *) gst_query_get_structure (query); if (gst_structure_has_name (s, "eglglessink-allocate-eglimage")) { GstBuffer *buffer; GstVideoFormat format; gint width, height; GValue v = { 0, }; if (!gst_structure_get_enum (s, "format", GST_TYPE_VIDEO_FORMAT, (gint *) & format) || !gst_structure_get_int (s, "width", &width) || !gst_structure_get_int (s, "height", &height)) { g_assert_not_reached (); } buffer = gst_egl_allocate_eglimage (state, GST_EGL_IMAGE_BUFFER_POOL (state->pool)->allocator, format, width, height); g_value_init (&v, G_TYPE_POINTER); g_value_set_pointer (&v, buffer); gst_structure_set_value (s, "buffer", &v); g_value_unset (&v); } else { g_assert_not_reached (); } } else if (GST_IS_EVENT (object)) { GstEvent *event = GST_EVENT_CAST (object); g_print ("\nevent %p %s\n", event, gst_event_type_get_name (GST_EVENT_TYPE (event))); switch (GST_EVENT_TYPE (event)) { case GST_EVENT_EOS: flush_internal (state); break; default: break; } gst_event_unref (event); object = NULL; } } if (object) { state->popped_obj = object; g_cond_broadcast (state->cond); } beach: g_mutex_unlock (state->queue_lock); return FALSE; }
/** * progress_buffer_enqueue_item() * * Add an item in the queue. Must be called in the locked context. Item may be event or data. */ static GstFlowReturn progress_buffer_enqueue_item(ProgressBuffer *element, GstMiniObject *item) { gboolean signal = FALSE; if (GST_IS_BUFFER (item)) { gdouble elapsed; // update sink segment position element->sink_segment.position = GST_BUFFER_OFFSET(GST_BUFFER(item)) + gst_buffer_get_size (GST_BUFFER(item)); if(element->sink_segment.stop < element->sink_segment.position) // This must never happen. return GST_FLOW_ERROR; cache_write_buffer(element->cache, GST_BUFFER(item)); elapsed = g_timer_elapsed(element->bandwidth_timer, NULL); element->subtotal += gst_buffer_get_size (GST_BUFFER(item)); if (elapsed > 1.0) { element->bandwidth = element->subtotal/elapsed; element->subtotal = 0; g_timer_start(element->bandwidth_timer); } // send buffer progress position up (used to track buffer fill, etc.) signal = send_position_message(element, signal); } else if (GST_IS_EVENT (item)) { GstEvent *event = GST_EVENT_CAST (item); switch (GST_EVENT_TYPE (event)) { case GST_EVENT_EOS: element->eos_status.eos = TRUE; if (element->sink_segment.position < element->sink_segment.stop) element->sink_segment.stop = element->sink_segment.position; progress_buffer_set_pending_event(element, NULL); signal = send_position_message(element, TRUE); gst_event_unref(event); // INLINE - gst_event_unref() break; case GST_EVENT_SEGMENT: { GstSegment segment; element->unexpected = FALSE; gst_event_copy_segment (event, &segment); if (segment.format != GST_FORMAT_BYTES) { gst_element_message_full(GST_ELEMENT(element), GST_MESSAGE_ERROR, GST_STREAM_ERROR, GST_STREAM_ERROR_FORMAT, g_strdup("GST_FORMAT_BYTES buffers expected."), NULL, ("progressbuffer.c"), ("progress_buffer_enqueue_item"), 0); gst_event_unref(event); // INLINE - gst_event_unref() return GST_FLOW_ERROR; } if (segment.stop - segment.start <= 0) { gst_element_message_full(GST_ELEMENT(element), GST_MESSAGE_ERROR, GST_STREAM_ERROR, GST_STREAM_ERROR_WRONG_TYPE, g_strdup("Only limited content is supported by progressbuffer."), NULL, ("progressbuffer.c"), ("progress_buffer_enqueue_item"), 0); gst_event_unref(event); // INLINE - gst_event_unref() return GST_FLOW_ERROR; } if ((segment.flags & GST_SEGMENT_FLAG_UPDATE) == GST_SEGMENT_FLAG_UPDATE) // Updating segments create new cache. { if (element->cache) destroy_cache(element->cache); element->cache = create_cache(); if (!element->cache) { gst_element_message_full(GST_ELEMENT(element), GST_MESSAGE_ERROR, GST_RESOURCE_ERROR, GST_RESOURCE_ERROR_OPEN_READ_WRITE, g_strdup("Couldn't create backing cache"), NULL, ("progressbuffer.c"), ("progress_buffer_enqueue_item"), 0); gst_event_unref(event); // INLINE - gst_event_unref() return GST_FLOW_ERROR; } } else { cache_set_write_position(element->cache, 0); cache_set_read_position(element->cache, 0); element->cache_read_offset = segment.start; } gst_segment_copy_into (&segment, &element->sink_segment); progress_buffer_set_pending_event(element, event); element->instant_seek = TRUE; signal = send_position_message(element, TRUE); break; } default: gst_event_unref(event); // INLINE - gst_event_unref() break; } } if (signal) g_cond_signal(&element->add_cond); return GST_FLOW_OK; }
static GstFlowReturn theora_handle_type_packet (GstTheoraDec * dec, ogg_packet * packet) { GstCaps *caps; gint par_num, par_den; GstFlowReturn ret = GST_FLOW_OK; GList *walk; guint32 fourcc; GST_DEBUG_OBJECT (dec, "fps %d/%d, PAR %d/%d", dec->info.fps_numerator, dec->info.fps_denominator, dec->info.aspect_numerator, dec->info.aspect_denominator); /* calculate par * the info.aspect_* values reflect PAR; * 0:x and x:0 are allowed and can be interpreted as 1:1. */ if (dec->have_par) { /* we had a par on the sink caps, override the encoded par */ GST_DEBUG_OBJECT (dec, "overriding with input PAR"); par_num = dec->par_num; par_den = dec->par_den; } else { /* take encoded par */ par_num = dec->info.aspect_numerator; par_den = dec->info.aspect_denominator; } if (par_num == 0 || par_den == 0) { par_num = par_den = 1; } /* theora has: * * width/height : dimension of the encoded frame * pic_width/pic_height : dimension of the visible part * pic_x/pic_y : offset in encoded frame where visible part starts */ GST_DEBUG_OBJECT (dec, "dimension %dx%d, PAR %d/%d", dec->info.pic_width, dec->info.pic_height, par_num, par_den); GST_DEBUG_OBJECT (dec, "frame dimension %dx%d, offset %d:%d", dec->info.pic_width, dec->info.pic_height, dec->info.pic_x, dec->info.pic_y); if (dec->info.pixel_fmt == TH_PF_420) { dec->output_bpp = 12; /* Average bits per pixel. */ fourcc = GST_MAKE_FOURCC ('I', '4', '2', '0'); } else if (dec->info.pixel_fmt == TH_PF_422) { dec->output_bpp = 16; fourcc = GST_MAKE_FOURCC ('Y', '4', '2', 'B'); } else if (dec->info.pixel_fmt == TH_PF_444) { dec->output_bpp = 24; fourcc = GST_MAKE_FOURCC ('Y', '4', '4', '4'); } else { GST_ERROR_OBJECT (dec, "Invalid pixel format %d", dec->info.pixel_fmt); return GST_FLOW_ERROR; } if (dec->crop) { dec->width = dec->info.pic_width; dec->height = dec->info.pic_height; dec->offset_x = dec->info.pic_x; dec->offset_y = dec->info.pic_y; /* Ensure correct offsets in chroma for formats that need it * by rounding the offset. libtheora will add proper pixels, * so no need to handle them ourselves. */ if (dec->offset_x & 1 && dec->info.pixel_fmt != TH_PF_444) { dec->offset_x--; dec->width++; } if (dec->offset_y & 1 && dec->info.pixel_fmt == TH_PF_420) { dec->offset_y--; dec->height++; } } else { /* no cropping, use the encoded dimensions */ dec->width = dec->info.frame_width; dec->height = dec->info.frame_height; dec->offset_x = 0; dec->offset_y = 0; } GST_DEBUG_OBJECT (dec, "after fixup frame dimension %dx%d, offset %d:%d", dec->width, dec->height, dec->offset_x, dec->offset_y); /* done */ dec->decoder = th_decode_alloc (&dec->info, dec->setup); if (th_decode_ctl (dec->decoder, TH_DECCTL_SET_TELEMETRY_MV, &dec->telemetry_mv, sizeof (dec->telemetry_mv)) != TH_EIMPL) { GST_WARNING_OBJECT (dec, "Could not enable MV visualisation"); } if (th_decode_ctl (dec->decoder, TH_DECCTL_SET_TELEMETRY_MBMODE, &dec->telemetry_mbmode, sizeof (dec->telemetry_mbmode)) != TH_EIMPL) { GST_WARNING_OBJECT (dec, "Could not enable MB mode visualisation"); } if (th_decode_ctl (dec->decoder, TH_DECCTL_SET_TELEMETRY_QI, &dec->telemetry_qi, sizeof (dec->telemetry_qi)) != TH_EIMPL) { GST_WARNING_OBJECT (dec, "Could not enable QI mode visualisation"); } if (th_decode_ctl (dec->decoder, TH_DECCTL_SET_TELEMETRY_BITS, &dec->telemetry_bits, sizeof (dec->telemetry_bits)) != TH_EIMPL) { GST_WARNING_OBJECT (dec, "Could not enable BITS mode visualisation"); } caps = gst_caps_new_simple ("video/x-raw-yuv", "format", GST_TYPE_FOURCC, fourcc, "framerate", GST_TYPE_FRACTION, dec->info.fps_numerator, dec->info.fps_denominator, "pixel-aspect-ratio", GST_TYPE_FRACTION, par_num, par_den, "width", G_TYPE_INT, dec->width, "height", G_TYPE_INT, dec->height, "color-matrix", G_TYPE_STRING, "sdtv", "chroma-site", G_TYPE_STRING, "jpeg", NULL); gst_pad_set_caps (dec->srcpad, caps); gst_caps_unref (caps); dec->have_header = TRUE; if (dec->pendingevents) { for (walk = dec->pendingevents; walk; walk = g_list_next (walk)) gst_pad_push_event (dec->srcpad, GST_EVENT_CAST (walk->data)); g_list_free (dec->pendingevents); dec->pendingevents = NULL; } if (dec->tags) { gst_element_found_tags_for_pad (GST_ELEMENT_CAST (dec), dec->srcpad, dec->tags); dec->tags = NULL; } return ret; }