static gboolean gst_shape_wipe_src_event (GstPad * pad, GstObject * parent, GstEvent * event) { GstShapeWipe *self = GST_SHAPE_WIPE (parent); gboolean ret; GST_LOG_OBJECT (pad, "Got %s event", GST_EVENT_TYPE_NAME (event)); switch (GST_EVENT_TYPE (event)) { case GST_EVENT_QOS:{ GstQOSType type; GstClockTimeDiff diff; GstClockTime timestamp; gdouble proportion; gst_event_parse_qos (event, &type, &proportion, &diff, ×tamp); gst_shape_wipe_update_qos (self, proportion, diff, timestamp); } /* fall through */ default: ret = gst_pad_push_event (self->video_sinkpad, event); break; } return ret; }
static gboolean gst_shape_wipe_src_event (GstPad * pad, GstEvent * event) { GstShapeWipe *self = GST_SHAPE_WIPE (gst_pad_get_parent (pad)); gboolean ret; switch (GST_EVENT_TYPE (event)) { case GST_EVENT_QOS:{ GstClockTimeDiff diff; GstClockTime timestamp; gdouble proportion; gst_event_parse_qos (event, &proportion, &diff, ×tamp); gst_shape_wipe_update_qos (self, proportion, diff, timestamp); } /* fall through */ default: ret = gst_pad_push_event (self->video_sinkpad, event); break; } gst_object_unref (self); return ret; }
static void gst_shape_wipe_set_property (GObject * object, guint prop_id, const GValue * value, GParamSpec * pspec) { GstShapeWipe *self = GST_SHAPE_WIPE (object); switch (prop_id) { case PROP_POSITION:{ gfloat f = g_value_get_float (value); GST_LOG_OBJECT (self, "Setting mask position: %f", f); self->mask_position = f; break; } case PROP_BORDER:{ gfloat f = g_value_get_float (value); GST_LOG_OBJECT (self, "Setting mask border: %f", f); self->mask_border = f; break; } default: G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); break; } }
static gboolean gst_shape_wipe_src_query (GstPad * pad, GstObject * parent, GstQuery * query) { GstShapeWipe *self = GST_SHAPE_WIPE (parent); gboolean ret; GST_LOG_OBJECT (pad, "Handling query of type '%s'", gst_query_type_get_name (GST_QUERY_TYPE (query))); switch (GST_QUERY_TYPE (query)) { case GST_QUERY_CAPS: { GstCaps *filter, *caps; gst_query_parse_caps (query, &filter); caps = gst_shape_wipe_src_getcaps (pad, filter); gst_query_set_caps_result (query, caps); gst_caps_unref (caps); ret = TRUE; break; } default: ret = gst_pad_peer_query (self->video_sinkpad, query); break; } return ret; }
static gboolean gst_shape_wipe_mask_sink_event (GstPad * pad, GstObject * parent, GstEvent * event) { GstShapeWipe *self = GST_SHAPE_WIPE (parent); GST_LOG_OBJECT (pad, "Got %s event", GST_EVENT_TYPE_NAME (event)); switch (GST_EVENT_TYPE (event)) { case GST_EVENT_CAPS: { GstCaps *caps; gst_event_parse_caps (event, &caps); gst_shape_wipe_mask_sink_setcaps (self, caps); break; } case GST_EVENT_FLUSH_STOP: g_mutex_lock (&self->mask_mutex); gst_buffer_replace (&self->mask, NULL); g_mutex_unlock (&self->mask_mutex); break; default: break; } /* Dropping all events here */ gst_event_unref (event); return TRUE; }
static GstStateChangeReturn gst_shape_wipe_change_state (GstElement * element, GstStateChange transition) { GstShapeWipe *self = GST_SHAPE_WIPE (element); GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS; switch (transition) { case GST_STATE_CHANGE_READY_TO_PAUSED: self->shutdown = FALSE; break; case GST_STATE_CHANGE_PAUSED_TO_READY: /* Unblock video sink chain function */ g_mutex_lock (&self->mask_mutex); self->shutdown = TRUE; g_cond_signal (&self->mask_cond); g_mutex_unlock (&self->mask_mutex); break; default: break; } if (GST_ELEMENT_CLASS (parent_class)->change_state) ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition); switch (transition) { case GST_STATE_CHANGE_PAUSED_TO_READY: gst_shape_wipe_reset (self); break; default: break; } return ret; }
static void gst_shape_wipe_finalize (GObject * object) { GstShapeWipe *self = GST_SHAPE_WIPE (object); gst_shape_wipe_reset (self); g_cond_clear (&self->mask_cond); g_mutex_clear (&self->mask_mutex); G_OBJECT_CLASS (parent_class)->finalize (object); }
static void gst_shape_wipe_finalize (GObject * object) { GstShapeWipe *self = GST_SHAPE_WIPE (object); gst_shape_wipe_reset (self); if (self->mask_cond) g_cond_free (self->mask_cond); self->mask_cond = NULL; if (self->mask_mutex) g_mutex_free (self->mask_mutex); self->mask_mutex = NULL; G_OBJECT_CLASS (parent_class)->finalize (object); }
static GstFlowReturn gst_shape_wipe_mask_sink_chain (GstPad * pad, GstBuffer * buffer) { GstShapeWipe *self = GST_SHAPE_WIPE (GST_PAD_PARENT (pad)); GstFlowReturn ret = GST_FLOW_OK; g_mutex_lock (self->mask_mutex); GST_DEBUG_OBJECT (self, "Setting new mask buffer: %" GST_PTR_FORMAT, buffer); gst_buffer_replace (&self->mask, buffer); g_cond_signal (self->mask_cond); g_mutex_unlock (self->mask_mutex); gst_buffer_unref (buffer); return ret; }
static gboolean gst_shape_wipe_video_sink_setcaps (GstPad * pad, GstCaps * caps) { GstShapeWipe *self = GST_SHAPE_WIPE (gst_pad_get_parent (pad)); gboolean ret = TRUE; GstStructure *s; GstVideoFormat fmt; gint width, height; gint fps_n, fps_d; GST_DEBUG_OBJECT (pad, "Setting caps: %" GST_PTR_FORMAT, caps); s = gst_caps_get_structure (caps, 0); if (!gst_video_format_parse_caps (caps, &fmt, &width, &height) || !gst_structure_get_fraction (s, "framerate", &fps_n, &fps_d)) { GST_ERROR_OBJECT (pad, "Invalid caps"); ret = FALSE; goto done; } self->fmt = fmt; if (self->width != width || self->height != height) { g_mutex_lock (self->mask_mutex); self->width = width; self->height = height; if (self->mask) gst_buffer_unref (self->mask); self->mask = NULL; g_mutex_unlock (self->mask_mutex); } if (fps_n != 0) self->frame_duration = gst_util_uint64_scale (GST_SECOND, fps_d, fps_n); else self->frame_duration = 0; ret = gst_pad_set_caps (self->srcpad, caps); done: gst_object_unref (self); return ret; }
static void gst_shape_wipe_get_property (GObject * object, guint prop_id, GValue * value, GParamSpec * pspec) { GstShapeWipe *self = GST_SHAPE_WIPE (object); switch (prop_id) { case PROP_POSITION: g_value_set_float (value, self->mask_position); break; case PROP_BORDER: g_value_set_float (value, self->mask_border); break; default: G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); break; } }
static GstFlowReturn gst_shape_wipe_video_sink_bufferalloc (GstPad * pad, guint64 offset, guint size, GstCaps * caps, GstBuffer ** buf) { GstShapeWipe *self = GST_SHAPE_WIPE (gst_pad_get_parent (pad)); GstFlowReturn ret = GST_FLOW_OK; GST_DEBUG_OBJECT (pad, "Allocating buffer with offset 0x%" G_GINT64_MODIFIER "x and size %u with caps: %" GST_PTR_FORMAT, offset, size, caps); *buf = NULL; ret = gst_pad_alloc_buffer (self->srcpad, offset, size, caps, buf); gst_object_unref (self); return ret; }
static gboolean gst_shape_wipe_video_sink_event (GstPad * pad, GstObject * parent, GstEvent * event) { GstShapeWipe *self = GST_SHAPE_WIPE (parent); gboolean ret; GST_LOG_OBJECT (pad, "Got %s event", GST_EVENT_TYPE_NAME (event)); switch (GST_EVENT_TYPE (event)) { case GST_EVENT_CAPS: { GstCaps *caps; gst_event_parse_caps (event, &caps); ret = gst_shape_wipe_video_sink_setcaps (self, caps); gst_event_unref (event); break; } case GST_EVENT_SEGMENT: { GstSegment seg; gst_event_copy_segment (event, &seg); if (seg.format == GST_FORMAT_TIME) { GST_DEBUG_OBJECT (pad, "Got SEGMENT event in GST_FORMAT_TIME %" GST_PTR_FORMAT, &seg); self->segment = seg; } else { gst_segment_init (&self->segment, GST_FORMAT_TIME); } } /* fall through */ case GST_EVENT_FLUSH_STOP: gst_shape_wipe_reset_qos (self); /* fall through */ default: ret = gst_pad_push_event (self->srcpad, event); break; } return ret; }
static gboolean gst_shape_wipe_src_query (GstPad * pad, GstQuery * query) { GstShapeWipe *self = GST_SHAPE_WIPE (gst_pad_get_parent (pad)); gboolean ret; GstPad *peer = gst_pad_get_peer (self->video_sinkpad); GST_DEBUG_OBJECT (pad, "Handling query of type '%s'", gst_query_type_get_name (GST_QUERY_TYPE (query))); if (!peer) { GST_INFO_OBJECT (pad, "No peer yet"); ret = FALSE; } else { ret = gst_pad_query (peer, query); gst_object_unref (peer); } gst_object_unref (self); return ret; }
static gboolean gst_shape_wipe_video_sink_event (GstPad * pad, GstEvent * event) { GstShapeWipe *self = GST_SHAPE_WIPE (gst_pad_get_parent (pad)); gboolean ret; GST_DEBUG_OBJECT (pad, "Got %s event", GST_EVENT_TYPE_NAME (event)); switch (GST_EVENT_TYPE (event)) { case GST_EVENT_NEWSEGMENT:{ GstFormat fmt; gboolean is_update; gint64 start, end, base; gdouble rate; gst_event_parse_new_segment (event, &is_update, &rate, &fmt, &start, &end, &base); if (fmt == GST_FORMAT_TIME) { GST_DEBUG_OBJECT (pad, "Got NEWSEGMENT event in GST_FORMAT_TIME, passing on (%" GST_TIME_FORMAT " - %" GST_TIME_FORMAT ")", GST_TIME_ARGS (start), GST_TIME_ARGS (end)); gst_segment_set_newsegment (&self->segment, is_update, rate, fmt, start, end, base); } else { gst_segment_init (&self->segment, GST_FORMAT_TIME); } } /* fall through */ case GST_EVENT_FLUSH_STOP: gst_shape_wipe_reset_qos (self); /* fall through */ default: ret = gst_pad_push_event (self->srcpad, event); break; } gst_object_unref (self); return ret; }
static gboolean gst_shape_wipe_mask_sink_setcaps (GstPad * pad, GstCaps * caps) { GstShapeWipe *self = GST_SHAPE_WIPE (gst_pad_get_parent (pad)); gboolean ret = TRUE; GstStructure *s; gint width, height, bpp; GST_DEBUG_OBJECT (pad, "Setting caps: %" GST_PTR_FORMAT, caps); s = gst_caps_get_structure (caps, 0); if (!gst_structure_get_int (s, "width", &width) || !gst_structure_get_int (s, "height", &height) || !gst_structure_get_int (s, "bpp", &bpp)) { ret = FALSE; goto done; } if ((self->width != width || self->height != height) && self->width > 0 && self->height > 0) { GST_ERROR_OBJECT (pad, "Mask caps must have the same width/height " "as the video caps"); ret = FALSE; goto done; } else { self->width = width; self->height = height; } self->mask_bpp = bpp; done: gst_object_unref (self); return ret; }
static GstFlowReturn gst_shape_wipe_video_sink_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer) { GstShapeWipe *self = GST_SHAPE_WIPE (parent); GstFlowReturn ret = GST_FLOW_OK; GstBuffer *mask = NULL, *outbuf = NULL; GstClockTime timestamp; gboolean new_outbuf = FALSE; GstVideoFrame inframe, outframe, maskframe; if (G_UNLIKELY (GST_VIDEO_INFO_FORMAT (&self->vinfo) == GST_VIDEO_FORMAT_UNKNOWN)) goto not_negotiated; timestamp = GST_BUFFER_TIMESTAMP (buffer); timestamp = gst_segment_to_stream_time (&self->segment, GST_FORMAT_TIME, timestamp); if (GST_CLOCK_TIME_IS_VALID (timestamp)) gst_object_sync_values (GST_OBJECT (self), timestamp); GST_LOG_OBJECT (self, "Blending buffer with timestamp %" GST_TIME_FORMAT " at position %f", GST_TIME_ARGS (timestamp), self->mask_position); g_mutex_lock (&self->mask_mutex); if (self->shutdown) goto shutdown; if (!self->mask) g_cond_wait (&self->mask_cond, &self->mask_mutex); if (self->mask == NULL || self->shutdown) { goto shutdown; } else { mask = gst_buffer_ref (self->mask); } g_mutex_unlock (&self->mask_mutex); if (!gst_shape_wipe_do_qos (self, GST_BUFFER_TIMESTAMP (buffer))) goto qos; /* Try to blend inplace, if it's not possible * get a new buffer from downstream. */ if (!gst_buffer_is_writable (buffer)) { outbuf = gst_buffer_new_allocate (NULL, gst_buffer_get_size (buffer), NULL); gst_buffer_copy_into (outbuf, buffer, GST_BUFFER_COPY_METADATA, 0, -1); new_outbuf = TRUE; } else { outbuf = buffer; } gst_video_frame_map (&inframe, &self->vinfo, buffer, new_outbuf ? GST_MAP_READ : GST_MAP_READWRITE); gst_video_frame_map (&outframe, &self->vinfo, outbuf, new_outbuf ? GST_MAP_WRITE : GST_MAP_READWRITE); gst_video_frame_map (&maskframe, &self->minfo, mask, GST_MAP_READ); switch (GST_VIDEO_INFO_FORMAT (&self->vinfo)) { case GST_VIDEO_FORMAT_AYUV: case GST_VIDEO_FORMAT_ARGB: case GST_VIDEO_FORMAT_ABGR: if (self->mask_bpp == 16) gst_shape_wipe_blend_argb_16 (self, &inframe, &maskframe, &outframe); else gst_shape_wipe_blend_argb_8 (self, &inframe, &maskframe, &outframe); break; case GST_VIDEO_FORMAT_BGRA: case GST_VIDEO_FORMAT_RGBA: if (self->mask_bpp == 16) gst_shape_wipe_blend_bgra_16 (self, &inframe, &maskframe, &outframe); else gst_shape_wipe_blend_bgra_8 (self, &inframe, &maskframe, &outframe); break; default: g_assert_not_reached (); break; } gst_video_frame_unmap (&outframe); gst_video_frame_unmap (&inframe); gst_video_frame_unmap (&maskframe); gst_buffer_unref (mask); if (new_outbuf) gst_buffer_unref (buffer); ret = gst_pad_push (self->srcpad, outbuf); if (G_UNLIKELY (ret != GST_FLOW_OK)) goto push_failed; return ret; /* Errors */ not_negotiated: { GST_ERROR_OBJECT (self, "No valid caps yet"); gst_buffer_unref (buffer); return GST_FLOW_NOT_NEGOTIATED; } shutdown: { GST_DEBUG_OBJECT (self, "Shutting down"); gst_buffer_unref (buffer); return GST_FLOW_FLUSHING; } qos: { GST_DEBUG_OBJECT (self, "Dropping buffer because of QoS"); gst_buffer_unref (buffer); gst_buffer_unref (mask); return GST_FLOW_OK; } push_failed: { GST_ERROR_OBJECT (self, "Pushing buffer downstream failed: %s", gst_flow_get_name (ret)); return ret; } }
static GstCaps * gst_shape_wipe_src_getcaps (GstPad * pad, GstCaps * filter) { GstShapeWipe *self = GST_SHAPE_WIPE (gst_pad_get_parent (pad)); GstCaps *templ, *ret, *tmp; if (gst_pad_has_current_caps (pad)) return gst_pad_get_current_caps (pad); else if (gst_pad_has_current_caps (self->video_sinkpad)) return gst_pad_get_current_caps (self->video_sinkpad); templ = gst_pad_get_pad_template_caps (self->video_sinkpad); tmp = gst_pad_peer_query_caps (self->video_sinkpad, NULL); if (tmp) { ret = gst_caps_intersect (tmp, templ); gst_caps_unref (templ); gst_caps_unref (tmp); } else { ret = templ; } GST_LOG_OBJECT (pad, "video sink accepted caps: %" GST_PTR_FORMAT, ret); if (gst_caps_is_empty (ret)) goto done; tmp = gst_pad_peer_query_caps (pad, NULL); GST_LOG_OBJECT (pad, "peer accepted caps: %" GST_PTR_FORMAT, ret); if (tmp) { GstCaps *intersection; intersection = gst_caps_intersect (tmp, ret); gst_caps_unref (tmp); gst_caps_unref (ret); ret = intersection; } GST_LOG_OBJECT (pad, "intersection: %" GST_PTR_FORMAT, ret); if (gst_caps_is_empty (ret)) goto done; if (self->vinfo.height && self->vinfo.width) { guint i, n; ret = gst_caps_make_writable (ret); n = gst_caps_get_size (ret); for (i = 0; i < n; i++) { GstStructure *s = gst_caps_get_structure (ret, i); gst_structure_set (s, "width", G_TYPE_INT, self->vinfo.width, "height", G_TYPE_INT, self->vinfo.height, NULL); } } tmp = gst_pad_peer_query_caps (self->mask_sinkpad, NULL); GST_LOG_OBJECT (pad, "mask sink accepted caps: %" GST_PTR_FORMAT, ret); if (tmp) { GstCaps *intersection, *tmp2; guint i, n; tmp2 = gst_pad_get_pad_template_caps (self->mask_sinkpad); intersection = gst_caps_intersect (tmp, tmp2); gst_caps_unref (tmp); gst_caps_unref (tmp2); tmp = gst_caps_make_writable (intersection); n = gst_caps_get_size (tmp); for (i = 0; i < n; i++) { GstStructure *s = gst_caps_get_structure (tmp, i); gst_structure_remove_fields (s, "format", "framerate", NULL); gst_structure_set_name (s, "video/x-raw"); } intersection = gst_caps_intersect (tmp, ret); gst_caps_unref (tmp); gst_caps_unref (ret); ret = intersection; } done: gst_object_unref (self); GST_LOG_OBJECT (pad, "Returning caps: %" GST_PTR_FORMAT, ret); return ret; }
static GstCaps * gst_shape_wipe_mask_sink_getcaps (GstPad * pad, GstCaps * filter) { GstShapeWipe *self = GST_SHAPE_WIPE (gst_pad_get_parent (pad)); GstCaps *ret, *tmp; guint i, n; if (gst_pad_has_current_caps (pad)) return gst_pad_get_current_caps (pad); tmp = gst_pad_peer_query_caps (self->video_sinkpad, NULL); if (tmp) { ret = gst_caps_intersect (tmp, gst_pad_get_pad_template_caps (self->video_sinkpad)); gst_caps_unref (tmp); } else { ret = gst_pad_get_pad_template_caps (self->video_sinkpad); } GST_LOG_OBJECT (pad, "video sink accepted caps: %" GST_PTR_FORMAT, ret); if (gst_caps_is_empty (ret)) goto done; tmp = gst_pad_peer_query_caps (self->srcpad, NULL); GST_LOG_OBJECT (pad, "srcpad accepted caps: %" GST_PTR_FORMAT, ret); if (tmp) { GstCaps *intersection; intersection = gst_caps_intersect (ret, tmp); gst_caps_unref (ret); gst_caps_unref (tmp); ret = intersection; } GST_LOG_OBJECT (pad, "intersection: %" GST_PTR_FORMAT, ret); if (gst_caps_is_empty (ret)) goto done; n = gst_caps_get_size (ret); tmp = gst_caps_new_empty (); for (i = 0; i < n; i++) { GstStructure *s = gst_caps_get_structure (ret, i); GstStructure *t; gst_structure_set_name (s, "video/x-raw"); gst_structure_remove_fields (s, "format", "framerate", NULL); if (self->vinfo.width && self->vinfo.height) gst_structure_set (s, "width", G_TYPE_INT, self->vinfo.width, "height", G_TYPE_INT, self->vinfo.height, NULL); gst_structure_set (s, "framerate", GST_TYPE_FRACTION, 0, 1, NULL); t = gst_structure_copy (s); gst_structure_set (s, "format", G_TYPE_STRING, GST_VIDEO_NE (GRAY16), NULL); gst_structure_set (t, "format", G_TYPE_STRING, "GRAY8", NULL); gst_caps_append_structure (tmp, t); } gst_caps_append (ret, tmp); tmp = gst_pad_peer_query_caps (pad, NULL); GST_LOG_OBJECT (pad, "peer accepted caps: %" GST_PTR_FORMAT, tmp); if (tmp) { GstCaps *intersection; intersection = gst_caps_intersect (tmp, ret); gst_caps_unref (tmp); gst_caps_unref (ret); ret = intersection; } done: gst_object_unref (self); GST_LOG_OBJECT (pad, "Returning caps: %" GST_PTR_FORMAT, ret); return ret; }
static GstCaps * gst_shape_wipe_mask_sink_getcaps (GstPad * pad) { GstShapeWipe *self = GST_SHAPE_WIPE (gst_pad_get_parent (pad)); GstCaps *ret, *tmp; guint i, n; if (GST_PAD_CAPS (pad)) return gst_caps_copy (GST_PAD_CAPS (pad)); tmp = gst_pad_peer_get_caps (self->video_sinkpad); if (tmp) { ret = gst_caps_intersect (tmp, gst_pad_get_pad_template_caps (self->video_sinkpad)); gst_caps_unref (tmp); } else { ret = gst_caps_copy (gst_pad_get_pad_template_caps (self->video_sinkpad)); } tmp = gst_pad_peer_get_caps (self->srcpad); if (tmp) { GstCaps *intersection; intersection = gst_caps_intersect (ret, tmp); gst_caps_unref (ret); gst_caps_unref (tmp); ret = intersection; } n = gst_caps_get_size (ret); tmp = gst_caps_new_empty (); for (i = 0; i < n; i++) { GstStructure *s = gst_caps_get_structure (ret, i); GstStructure *t; gst_structure_set_name (s, "video/x-raw-gray"); gst_structure_remove_fields (s, "format", "framerate", "bpp", "depth", "endianness", "framerate", "red_mask", "green_mask", "blue_mask", "alpha_mask", NULL); if (self->width && self->height) gst_structure_set (s, "width", G_TYPE_INT, self->width, "height", G_TYPE_INT, self->height, NULL); gst_structure_set (s, "framerate", GST_TYPE_FRACTION, 0, 1, NULL); t = gst_structure_copy (s); gst_structure_set (s, "bpp", G_TYPE_INT, 16, "depth", G_TYPE_INT, 16, "endianness", G_TYPE_INT, G_BYTE_ORDER, NULL); gst_structure_set (t, "bpp", G_TYPE_INT, 8, "depth", G_TYPE_INT, 8, NULL); gst_caps_append_structure (tmp, t); } gst_caps_append (ret, tmp); tmp = gst_pad_peer_get_caps (pad); if (tmp) { GstCaps *intersection; intersection = gst_caps_intersect (tmp, ret); gst_caps_unref (tmp); gst_caps_unref (ret); ret = intersection; } gst_object_unref (self); GST_DEBUG_OBJECT (pad, "Returning caps: %" GST_PTR_FORMAT, ret); return ret; }
static GstCaps * gst_shape_wipe_src_getcaps (GstPad * pad) { GstShapeWipe *self = GST_SHAPE_WIPE (gst_pad_get_parent (pad)); GstCaps *ret, *tmp; if (GST_PAD_CAPS (pad)) return gst_caps_copy (GST_PAD_CAPS (pad)); else if (GST_PAD_CAPS (self->video_sinkpad)) return gst_caps_copy (GST_PAD_CAPS (self->video_sinkpad)); tmp = gst_pad_peer_get_caps (self->video_sinkpad); if (tmp) { ret = gst_caps_intersect (tmp, gst_pad_get_pad_template_caps (self->video_sinkpad)); gst_caps_unref (tmp); } else { ret = gst_caps_copy (gst_pad_get_pad_template_caps (self->video_sinkpad)); } tmp = gst_pad_peer_get_caps (pad); if (tmp) { GstCaps *intersection; intersection = gst_caps_intersect (tmp, ret); gst_caps_unref (tmp); gst_caps_unref (ret); ret = intersection; } if (self->height && self->width) { guint i, n; n = gst_caps_get_size (ret); for (i = 0; i < n; i++) { GstStructure *s = gst_caps_get_structure (ret, i); gst_structure_set (s, "width", G_TYPE_INT, self->width, "height", G_TYPE_INT, self->height, NULL); } } tmp = gst_pad_peer_get_caps (self->mask_sinkpad); if (tmp) { GstCaps *intersection, *tmp2; guint i, n; tmp = gst_caps_make_writable (tmp); tmp2 = gst_caps_copy (gst_pad_get_pad_template_caps (self->mask_sinkpad)); intersection = gst_caps_intersect (tmp, tmp2); gst_caps_unref (tmp); gst_caps_unref (tmp2); tmp = intersection; n = gst_caps_get_size (tmp); tmp2 = gst_caps_new_empty (); for (i = 0; i < n; i++) { GstStructure *s = gst_caps_get_structure (tmp, i); GstStructure *c; gst_structure_remove_fields (s, "format", "bpp", "depth", "endianness", "framerate", "red_mask", "green_mask", "blue_mask", "alpha_mask", NULL); gst_structure_set_name (s, "video/x-raw-yuv"); c = gst_structure_copy (s); gst_caps_append_structure (tmp2, c); } gst_caps_append (tmp, tmp2); intersection = gst_caps_intersect (tmp, ret); gst_caps_unref (tmp); gst_caps_unref (ret); ret = intersection; } gst_object_unref (self); GST_DEBUG_OBJECT (pad, "Returning caps: %" GST_PTR_FORMAT, ret); return ret; }
static GstFlowReturn gst_shape_wipe_video_sink_chain (GstPad * pad, GstBuffer * buffer) { GstShapeWipe *self = GST_SHAPE_WIPE (GST_PAD_PARENT (pad)); GstFlowReturn ret = GST_FLOW_OK; GstBuffer *mask = NULL, *outbuf = NULL; GstClockTime timestamp; gboolean new_outbuf = FALSE; if (G_UNLIKELY (self->fmt == GST_VIDEO_FORMAT_UNKNOWN)) goto not_negotiated; timestamp = GST_BUFFER_TIMESTAMP (buffer); timestamp = gst_segment_to_stream_time (&self->segment, GST_FORMAT_TIME, timestamp); if (GST_CLOCK_TIME_IS_VALID (timestamp)) gst_object_sync_values (G_OBJECT (self), timestamp); GST_LOG_OBJECT (self, "Blending buffer with timestamp %" GST_TIME_FORMAT " at position %f", GST_TIME_ARGS (timestamp), self->mask_position); g_mutex_lock (self->mask_mutex); if (self->shutdown) goto shutdown; if (!self->mask) g_cond_wait (self->mask_cond, self->mask_mutex); if (self->mask == NULL || self->shutdown) { goto shutdown; } else { mask = gst_buffer_ref (self->mask); } g_mutex_unlock (self->mask_mutex); if (!gst_shape_wipe_do_qos (self, GST_BUFFER_TIMESTAMP (buffer))) goto qos; /* Try to blend inplace, if it's not possible * get a new buffer from downstream. */ if (!gst_buffer_is_writable (buffer)) { ret = gst_pad_alloc_buffer_and_set_caps (self->srcpad, GST_BUFFER_OFFSET_NONE, GST_BUFFER_SIZE (buffer), GST_PAD_CAPS (self->srcpad), &outbuf); if (G_UNLIKELY (ret != GST_FLOW_OK)) goto alloc_failed; gst_buffer_copy_metadata (outbuf, buffer, GST_BUFFER_COPY_ALL); new_outbuf = TRUE; } else { outbuf = buffer; } switch (self->fmt) { case GST_VIDEO_FORMAT_AYUV: case GST_VIDEO_FORMAT_ARGB: case GST_VIDEO_FORMAT_ABGR: if (self->mask_bpp == 16) gst_shape_wipe_blend_argb_16 (self, buffer, mask, outbuf); else gst_shape_wipe_blend_argb_8 (self, buffer, mask, outbuf); break; case GST_VIDEO_FORMAT_BGRA: case GST_VIDEO_FORMAT_RGBA: if (self->mask_bpp == 16) gst_shape_wipe_blend_bgra_16 (self, buffer, mask, outbuf); else gst_shape_wipe_blend_bgra_8 (self, buffer, mask, outbuf); break; default: g_assert_not_reached (); break; } gst_buffer_unref (mask); if (new_outbuf) gst_buffer_unref (buffer); ret = gst_pad_push (self->srcpad, outbuf); if (G_UNLIKELY (ret != GST_FLOW_OK)) goto push_failed; return ret; /* Errors */ not_negotiated: GST_ERROR_OBJECT (self, "No valid caps yet"); gst_buffer_unref (buffer); return GST_FLOW_NOT_NEGOTIATED; shutdown: GST_DEBUG_OBJECT (self, "Shutting down"); gst_buffer_unref (buffer); return GST_FLOW_WRONG_STATE; qos: GST_DEBUG_OBJECT (self, "Dropping buffer because of QoS"); gst_buffer_unref (buffer); gst_buffer_unref (mask); return GST_FLOW_OK; alloc_failed: GST_ERROR_OBJECT (self, "Buffer allocation from downstream failed: %s", gst_flow_get_name (ret)); gst_buffer_unref (buffer); gst_buffer_unref (mask); return ret; push_failed: GST_ERROR_OBJECT (self, "Pushing buffer downstream failed: %s", gst_flow_get_name (ret)); return ret; }
static GstFlowReturn gst_shape_wipe_video_sink_chain (GstPad * pad, GstBuffer * buffer) { GstShapeWipe *self = GST_SHAPE_WIPE (GST_PAD_PARENT (pad)); GstFlowReturn ret = GST_FLOW_OK; GstBuffer *mask = NULL, *outbuf = NULL; GstClockTime timestamp; gboolean new_outbuf = FALSE; if (G_UNLIKELY (self->fmt == GST_VIDEO_FORMAT_UNKNOWN)) return GST_FLOW_NOT_NEGOTIATED; timestamp = GST_BUFFER_TIMESTAMP (buffer); timestamp = gst_segment_to_stream_time (&self->segment, GST_FORMAT_TIME, timestamp); if (GST_CLOCK_TIME_IS_VALID (timestamp)) gst_object_sync_values (G_OBJECT (self), timestamp); GST_DEBUG_OBJECT (self, "Blending buffer with timestamp %" GST_TIME_FORMAT " at position %lf", GST_TIME_ARGS (timestamp), self->mask_position); g_mutex_lock (self->mask_mutex); if (!self->mask) g_cond_wait (self->mask_cond, self->mask_mutex); if (self->mask == NULL) { g_mutex_unlock (self->mask_mutex); gst_buffer_unref (buffer); return GST_FLOW_UNEXPECTED; } else { mask = gst_buffer_ref (self->mask); } g_mutex_unlock (self->mask_mutex); if (!gst_shape_wipe_do_qos (self, GST_BUFFER_TIMESTAMP (buffer))) { gst_buffer_unref (buffer); gst_buffer_unref (mask); return GST_FLOW_OK; } /* Try to blend inplace, if it's not possible * get a new buffer from downstream. */ if (!gst_buffer_is_writable (buffer)) { ret = gst_pad_alloc_buffer_and_set_caps (self->srcpad, GST_BUFFER_OFFSET_NONE, GST_BUFFER_SIZE (buffer), GST_PAD_CAPS (self->srcpad), &outbuf); if (G_UNLIKELY (ret != GST_FLOW_OK)) { gst_buffer_unref (buffer); gst_buffer_unref (mask); return ret; } gst_buffer_copy_metadata (outbuf, buffer, GST_BUFFER_COPY_ALL); new_outbuf = TRUE; } else { outbuf = buffer; } if (self->fmt == GST_VIDEO_FORMAT_AYUV && self->mask_bpp == 16) ret = gst_shape_wipe_blend_ayuv_16 (self, buffer, mask, outbuf); else if (self->fmt == GST_VIDEO_FORMAT_AYUV) ret = gst_shape_wipe_blend_ayuv_8 (self, buffer, mask, outbuf); else if (self->fmt == GST_VIDEO_FORMAT_ARGB && self->mask_bpp == 16) ret = gst_shape_wipe_blend_argb_16 (self, buffer, mask, outbuf); else if (self->fmt == GST_VIDEO_FORMAT_ARGB) ret = gst_shape_wipe_blend_argb_8 (self, buffer, mask, outbuf); else if (self->fmt == GST_VIDEO_FORMAT_BGRA && self->mask_bpp == 16) ret = gst_shape_wipe_blend_bgra_16 (self, buffer, mask, outbuf); else if (self->fmt == GST_VIDEO_FORMAT_BGRA) ret = gst_shape_wipe_blend_bgra_8 (self, buffer, mask, outbuf); else g_assert_not_reached (); gst_buffer_unref (mask); if (new_outbuf) gst_buffer_unref (buffer); if (ret != GST_FLOW_OK) { gst_buffer_unref (outbuf); return ret; } ret = gst_pad_push (self->srcpad, outbuf); return ret; }