static void gst_dvbsub_overlay_finalize (GObject * object) { GstDVBSubOverlay *overlay = GST_DVBSUB_OVERLAY (object); DVBSubtitles *subs; while ((subs = g_queue_pop_head (overlay->pending_subtitles))) { dvb_subtitles_free (subs); } g_queue_free (overlay->pending_subtitles); if (overlay->current_subtitle) dvb_subtitles_free (overlay->current_subtitle); overlay->current_subtitle = NULL; if (overlay->current_comp) gst_video_overlay_composition_unref (overlay->current_comp); overlay->current_comp = NULL; if (overlay->dvb_sub) dvb_sub_free (overlay->dvb_sub); g_mutex_clear (&overlay->dvbsub_mutex); G_OBJECT_CLASS (parent_class)->finalize (object); }
static void gst_dvbsub_overlay_flush_subtitles (GstDVBSubOverlay * render) { DVBSubtitles *subs; g_mutex_lock (&render->dvbsub_mutex); while ((subs = g_queue_pop_head (render->pending_subtitles))) { dvb_subtitles_free (subs); } if (render->current_subtitle) dvb_subtitles_free (render->current_subtitle); render->current_subtitle = NULL; if (render->current_comp) gst_video_overlay_composition_unref (render->current_comp); render->current_comp = NULL; if (render->dvb_sub) dvb_sub_free (render->dvb_sub); render->dvb_sub = dvb_sub_new (); { DvbSubCallbacks dvbsub_callbacks = { &new_dvb_subtitles_cb, }; dvb_sub_set_callbacks (render->dvb_sub, &dvbsub_callbacks, render); } render->last_text_pts = GST_CLOCK_TIME_NONE; render->pending_sub = FALSE; g_mutex_unlock (&render->dvbsub_mutex); }
static void gst_gdk_pixbuf_overlay_update_composition (GstGdkPixbufOverlay * overlay) { GstVideoOverlayComposition *comp; GstVideoOverlayRectangle *rect; GstVideoMeta *overlay_meta; gint x, y, width, height; gint video_width = GST_VIDEO_INFO_WIDTH (&GST_VIDEO_FILTER (overlay)->in_info); gint video_height = GST_VIDEO_INFO_HEIGHT (&GST_VIDEO_FILTER (overlay)->in_info); if (overlay->comp) { gst_video_overlay_composition_unref (overlay->comp); overlay->comp = NULL; } if (overlay->alpha == 0.0 || overlay->pixels == NULL) return; overlay_meta = gst_buffer_get_video_meta (overlay->pixels); x = overlay->offset_x < 0 ? video_width + overlay->offset_x - overlay_meta->width + (overlay->relative_x * overlay_meta->width) : overlay->offset_x + (overlay->relative_x * overlay_meta->width); y = overlay->offset_y < 0 ? video_height + overlay->offset_y - overlay_meta->height + (overlay->relative_y * overlay_meta->height) : overlay->offset_y + (overlay->relative_y * overlay_meta->height); width = overlay->overlay_width; if (width == 0) width = overlay_meta->width; height = overlay->overlay_height; if (height == 0) height = overlay_meta->height; GST_DEBUG_OBJECT (overlay, "overlay image dimensions: %d x %d, alpha=%.2f", overlay_meta->width, overlay_meta->height, overlay->alpha); GST_DEBUG_OBJECT (overlay, "properties: x,y: %d,%d (%g%%,%g%%) - WxH: %dx%d", overlay->offset_x, overlay->offset_y, overlay->relative_x * 100.0, overlay->relative_y * 100.0, overlay->overlay_height, overlay->overlay_width); GST_DEBUG_OBJECT (overlay, "overlay rendered: %d x %d @ %d,%d (onto %d x %d)", width, height, x, y, video_width, video_height); rect = gst_video_overlay_rectangle_new_raw (overlay->pixels, x, y, width, height, GST_VIDEO_OVERLAY_FORMAT_FLAG_NONE); if (overlay->alpha != 1.0) gst_video_overlay_rectangle_set_global_alpha (rect, overlay->alpha); comp = gst_video_overlay_composition_new (rect); gst_video_overlay_rectangle_unref (rect); overlay->comp = comp; }
static void gst_gdk_pixbuf_overlay_update_composition (GstGdkPixbufOverlay * overlay) { GstVideoOverlayComposition *comp; GstVideoOverlayRectangle *rect; gint x, y, width, height; if (overlay->comp) { gst_video_overlay_composition_unref (overlay->comp); overlay->comp = NULL; } if (overlay->alpha == 0.0) return; x = overlay->offset_x + (overlay->relative_x * overlay->pixels_width); y = overlay->offset_y + (overlay->relative_y * overlay->pixels_height); /* FIXME: this should work, but seems to crash */ if (x < 0) x = 0; if (y < 0) y = 0; width = overlay->overlay_width; if (width == 0) width = overlay->pixels_width; height = overlay->overlay_height; if (height == 0) height = overlay->pixels_height; GST_DEBUG_OBJECT (overlay, "overlay image dimensions: %d x %d, alpha=%.2f", overlay->pixels_width, overlay->pixels_height, overlay->alpha); GST_DEBUG_OBJECT (overlay, "properties: x,y: %d,%d (%g%%,%g%%) - WxH: %dx%d", overlay->offset_x, overlay->offset_y, overlay->relative_x * 100.0, overlay->relative_y * 100.0, overlay->overlay_height, overlay->overlay_width); GST_DEBUG_OBJECT (overlay, "overlay rendered: %d x %d @ %d,%d (onto %d x %d)", width, height, x, y, overlay->width, overlay->height); rect = gst_video_overlay_rectangle_new_argb (overlay->pixels, overlay->pixels_width, overlay->pixels_height, overlay->pixels_stride, x, y, width, height, GST_VIDEO_OVERLAY_FORMAT_FLAG_NONE); if (overlay->alpha != 1.0) gst_video_overlay_rectangle_set_global_alpha (rect, overlay->alpha); comp = gst_video_overlay_composition_new (rect); gst_video_overlay_rectangle_unref (rect); overlay->comp = comp; }
static gboolean gst_gdk_pixbuf_overlay_stop (GstBaseTransform * trans) { GstGdkPixbufOverlay *overlay = GST_GDK_PIXBUF_OVERLAY (trans); if (overlay->comp) { gst_video_overlay_composition_unref (overlay->comp); overlay->comp = NULL; } gst_buffer_replace (&overlay->pixels, NULL); return TRUE; }
static GstPadProbeReturn buffer_cb (GstPad * pad, GstPadProbeInfo * info, gpointer user_data) { GstVideoOverlayRectangle *rect; GstVideoOverlayComposition *comp; GstVideoFrame frame; GstVideoMeta *vmeta; GstVideoInfo vinfo; GstCaps *caps; gint x, y; caps = gst_pad_get_current_caps (pad); gst_video_info_from_caps (&vinfo, caps); gst_caps_unref (caps); info->data = gst_buffer_make_writable (info->data); vmeta = gst_buffer_get_video_meta (logo_buf); calculate_position (&x, &y, vmeta->width, vmeta->height, ++count); GST_LOG ("%3d, %3d", x, y); rect = gst_video_overlay_rectangle_new_raw (logo_buf, x, y, vmeta->width, vmeta->height, GST_VIDEO_OVERLAY_FORMAT_FLAG_NONE); comp = gst_video_overlay_composition_new (rect); gst_video_overlay_rectangle_unref (rect); gst_video_frame_map (&frame, &vinfo, info->data, GST_MAP_READWRITE); if (!gst_video_overlay_composition_blend (comp, &frame)) g_warning ("Error blending overlay at position (%d,%d)", x, y); gst_video_frame_unmap (&frame); gst_video_overlay_composition_unref (comp); return GST_PAD_PROBE_OK; }
static GstFlowReturn gst_dvbsub_overlay_chain_video (GstPad * pad, GstObject * parent, GstBuffer * buffer) { GstDVBSubOverlay *overlay = GST_DVBSUB_OVERLAY (parent); GstFlowReturn ret = GST_FLOW_OK; gint64 start, stop; guint64 cstart, cstop; gboolean in_seg; GstClockTime vid_running_time, vid_running_time_end; if (GST_VIDEO_INFO_FORMAT (&overlay->info) == GST_VIDEO_FORMAT_UNKNOWN) return GST_FLOW_NOT_NEGOTIATED; if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) goto missing_timestamp; start = GST_BUFFER_TIMESTAMP (buffer); GST_LOG_OBJECT (overlay, "Video segment: %" GST_SEGMENT_FORMAT " --- Subtitle position: %" GST_TIME_FORMAT " --- BUFFER: ts=%" GST_TIME_FORMAT, &overlay->video_segment, GST_TIME_ARGS (overlay->subtitle_segment.position), GST_TIME_ARGS (start)); /* ignore buffers that are outside of the current segment */ if (!GST_BUFFER_DURATION_IS_VALID (buffer)) { stop = GST_CLOCK_TIME_NONE; } else { stop = start + GST_BUFFER_DURATION (buffer); } in_seg = gst_segment_clip (&overlay->video_segment, GST_FORMAT_TIME, start, stop, &cstart, &cstop); if (!in_seg) { GST_DEBUG_OBJECT (overlay, "Buffer outside configured segment -- dropping"); gst_buffer_unref (buffer); return GST_FLOW_OK; } buffer = gst_buffer_make_writable (buffer); GST_BUFFER_TIMESTAMP (buffer) = cstart; if (GST_BUFFER_DURATION_IS_VALID (buffer)) GST_BUFFER_DURATION (buffer) = cstop - cstart; vid_running_time = gst_segment_to_running_time (&overlay->video_segment, GST_FORMAT_TIME, cstart); if (GST_BUFFER_DURATION_IS_VALID (buffer)) vid_running_time_end = gst_segment_to_running_time (&overlay->video_segment, GST_FORMAT_TIME, cstop); else vid_running_time_end = vid_running_time; GST_DEBUG_OBJECT (overlay, "Video running time: %" GST_TIME_FORMAT, GST_TIME_ARGS (vid_running_time)); overlay->video_segment.position = GST_BUFFER_TIMESTAMP (buffer); g_mutex_lock (&overlay->dvbsub_mutex); if (!g_queue_is_empty (overlay->pending_subtitles)) { DVBSubtitles *tmp, *candidate = NULL; while (!g_queue_is_empty (overlay->pending_subtitles)) { tmp = g_queue_peek_head (overlay->pending_subtitles); if (tmp->pts > vid_running_time_end) { /* For a future video frame */ break; } else if (tmp->num_rects == 0) { /* Clear screen */ if (overlay->current_subtitle) dvb_subtitles_free (overlay->current_subtitle); overlay->current_subtitle = NULL; if (candidate) dvb_subtitles_free (candidate); candidate = NULL; g_queue_pop_head (overlay->pending_subtitles); dvb_subtitles_free (tmp); tmp = NULL; } else if (tmp->pts + tmp->page_time_out * GST_SECOND * ABS (overlay->subtitle_segment.rate) >= vid_running_time) { if (candidate) dvb_subtitles_free (candidate); candidate = tmp; g_queue_pop_head (overlay->pending_subtitles); } else { /* Too late */ dvb_subtitles_free (tmp); tmp = NULL; g_queue_pop_head (overlay->pending_subtitles); } } if (candidate) { GST_DEBUG_OBJECT (overlay, "Time to show the next subtitle page (%" GST_TIME_FORMAT " >= %" GST_TIME_FORMAT ") - it has %u regions", GST_TIME_ARGS (vid_running_time), GST_TIME_ARGS (candidate->pts), candidate->num_rects); dvb_subtitles_free (overlay->current_subtitle); overlay->current_subtitle = candidate; if (overlay->current_comp) gst_video_overlay_composition_unref (overlay->current_comp); overlay->current_comp = gst_dvbsub_overlay_subs_to_comp (overlay, overlay->current_subtitle); } } /* Check that we haven't hit the fallback timeout for current subtitle page */ if (overlay->current_subtitle && vid_running_time > (overlay->current_subtitle->pts + overlay->current_subtitle->page_time_out * GST_SECOND * ABS (overlay->subtitle_segment.rate))) { GST_INFO_OBJECT (overlay, "Subtitle page not redefined before fallback page_time_out of %u seconds (missed data?) - deleting current page", overlay->current_subtitle->page_time_out); dvb_subtitles_free (overlay->current_subtitle); overlay->current_subtitle = NULL; } /* Now render it */ if (g_atomic_int_get (&overlay->enable) && overlay->current_subtitle) { GstVideoFrame frame; g_assert (overlay->current_comp); if (overlay->attach_compo_to_buffer) { GST_DEBUG_OBJECT (overlay, "Attaching overlay image to video buffer"); gst_buffer_add_video_overlay_composition_meta (buffer, overlay->current_comp); } else { GST_DEBUG_OBJECT (overlay, "Blending overlay image to video buffer"); gst_video_frame_map (&frame, &overlay->info, buffer, GST_MAP_READWRITE); gst_video_overlay_composition_blend (overlay->current_comp, &frame); gst_video_frame_unmap (&frame); } } g_mutex_unlock (&overlay->dvbsub_mutex); ret = gst_pad_push (overlay->srcpad, buffer); return ret; missing_timestamp: { GST_WARNING_OBJECT (overlay, "video buffer without timestamp, discarding"); gst_buffer_unref (buffer); return GST_FLOW_OK; } }
static GstFlowReturn gst_overlay_composition_sink_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer) { GstOverlayComposition *self = GST_OVERLAY_COMPOSITION (parent); GstVideoOverlayComposition *compo = NULL; GstVideoOverlayCompositionMeta *upstream_compo_meta; if (gst_pad_check_reconfigure (self->srcpad)) { if (!gst_overlay_composition_negotiate (self, NULL)) { gst_pad_mark_reconfigure (self->srcpad); gst_buffer_unref (buffer); GST_OBJECT_LOCK (self->srcpad); if (GST_PAD_IS_FLUSHING (self->srcpad)) { GST_OBJECT_UNLOCK (self->srcpad); return GST_FLOW_FLUSHING; } GST_OBJECT_UNLOCK (self->srcpad); return GST_FLOW_NOT_NEGOTIATED; } } if (!self->sample) { self->sample = gst_sample_new (buffer, self->caps, &self->segment, NULL); } else { self->sample = gst_sample_make_writable (self->sample); gst_sample_set_buffer (self->sample, buffer); gst_sample_set_caps (self->sample, self->caps); gst_sample_set_segment (self->sample, &self->segment); } g_signal_emit (self, overlay_composition_signals[SIGNAL_DRAW], 0, self->sample, &compo); /* Don't store the buffer in the sample any longer, otherwise it will not * be writable below as we have one reference in the sample and one in * this function. * * If the sample is not writable itself then the application kept an * reference itself. */ if (gst_sample_is_writable (self->sample)) { gst_sample_set_buffer (self->sample, NULL); } if (!compo) { GST_DEBUG_OBJECT (self->sinkpad, "Application did not provide an overlay composition"); return gst_pad_push (self->srcpad, buffer); } /* If upstream attached a meta, we can safely add our own things * in it. Upstream must've checked that downstream supports it */ upstream_compo_meta = gst_buffer_get_video_overlay_composition_meta (buffer); if (upstream_compo_meta) { GstVideoOverlayComposition *merged_compo = gst_video_overlay_composition_copy (upstream_compo_meta->overlay); guint i, n; GST_DEBUG_OBJECT (self->sinkpad, "Appending to upstream overlay composition"); n = gst_video_overlay_composition_n_rectangles (compo); for (i = 0; i < n; i++) { GstVideoOverlayRectangle *rect = gst_video_overlay_composition_get_rectangle (compo, i); gst_video_overlay_composition_add_rectangle (merged_compo, rect); } gst_video_overlay_composition_unref (compo); gst_video_overlay_composition_unref (upstream_compo_meta->overlay); upstream_compo_meta->overlay = merged_compo; } else if (self->attach_compo_to_buffer) { GST_DEBUG_OBJECT (self->sinkpad, "Attaching as meta"); buffer = gst_buffer_make_writable (buffer); gst_buffer_add_video_overlay_composition_meta (buffer, compo); gst_video_overlay_composition_unref (compo); } else { GstVideoFrame frame; buffer = gst_buffer_make_writable (buffer); if (!gst_video_frame_map (&frame, &self->info, buffer, GST_MAP_READWRITE)) { gst_video_overlay_composition_unref (compo); goto map_failed; } gst_video_overlay_composition_blend (compo, &frame); gst_video_frame_unmap (&frame); gst_video_overlay_composition_unref (compo); } return gst_pad_push (self->srcpad, buffer); map_failed: { GST_ERROR_OBJECT (self->sinkpad, "Failed to map buffer"); gst_buffer_unref (buffer); return GST_FLOW_ERROR; } }