GstBuffer * rsn_wrapped_buffer_new (GstBuffer * buf_to_wrap, GstElement * owner) { GstBuffer *buf; RsnMetaWrapped *meta; g_return_val_if_fail (buf_to_wrap, NULL); buf = gst_buffer_new (); meta = RSN_META_WRAPPED_ADD (buf); meta->wrapped_buffer = buf_to_wrap; meta->owner = gst_object_ref (owner); GST_BUFFER_DATA (buf) = GST_BUFFER_DATA (buf_to_wrap); GST_BUFFER_SIZE (buf) = GST_BUFFER_SIZE (buf_to_wrap); gst_buffer_copy_metadata (GST_BUFFER (buf), buf_to_wrap, GST_BUFFER_COPY_ALL); /* If the wrapped buffer isn't writable, make sure this one isn't either */ if (!gst_buffer_is_writable (buf_to_wrap)) GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_READONLY); return buf; }
static GstFlowReturn gst_shape_wipe_video_sink_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer) { GstShapeWipe *self = GST_SHAPE_WIPE (parent); GstFlowReturn ret = GST_FLOW_OK; GstBuffer *mask = NULL, *outbuf = NULL; GstClockTime timestamp; gboolean new_outbuf = FALSE; GstVideoFrame inframe, outframe, maskframe; if (G_UNLIKELY (GST_VIDEO_INFO_FORMAT (&self->vinfo) == GST_VIDEO_FORMAT_UNKNOWN)) goto not_negotiated; timestamp = GST_BUFFER_TIMESTAMP (buffer); timestamp = gst_segment_to_stream_time (&self->segment, GST_FORMAT_TIME, timestamp); if (GST_CLOCK_TIME_IS_VALID (timestamp)) gst_object_sync_values (GST_OBJECT (self), timestamp); GST_LOG_OBJECT (self, "Blending buffer with timestamp %" GST_TIME_FORMAT " at position %f", GST_TIME_ARGS (timestamp), self->mask_position); g_mutex_lock (&self->mask_mutex); if (self->shutdown) goto shutdown; if (!self->mask) g_cond_wait (&self->mask_cond, &self->mask_mutex); if (self->mask == NULL || self->shutdown) { goto shutdown; } else { mask = gst_buffer_ref (self->mask); } g_mutex_unlock (&self->mask_mutex); if (!gst_shape_wipe_do_qos (self, GST_BUFFER_TIMESTAMP (buffer))) goto qos; /* Try to blend inplace, if it's not possible * get a new buffer from downstream. */ if (!gst_buffer_is_writable (buffer)) { outbuf = gst_buffer_new_allocate (NULL, gst_buffer_get_size (buffer), NULL); gst_buffer_copy_into (outbuf, buffer, GST_BUFFER_COPY_METADATA, 0, -1); new_outbuf = TRUE; } else { outbuf = buffer; } gst_video_frame_map (&inframe, &self->vinfo, buffer, new_outbuf ? GST_MAP_READ : GST_MAP_READWRITE); gst_video_frame_map (&outframe, &self->vinfo, outbuf, new_outbuf ? GST_MAP_WRITE : GST_MAP_READWRITE); gst_video_frame_map (&maskframe, &self->minfo, mask, GST_MAP_READ); switch (GST_VIDEO_INFO_FORMAT (&self->vinfo)) { case GST_VIDEO_FORMAT_AYUV: case GST_VIDEO_FORMAT_ARGB: case GST_VIDEO_FORMAT_ABGR: if (self->mask_bpp == 16) gst_shape_wipe_blend_argb_16 (self, &inframe, &maskframe, &outframe); else gst_shape_wipe_blend_argb_8 (self, &inframe, &maskframe, &outframe); break; case GST_VIDEO_FORMAT_BGRA: case GST_VIDEO_FORMAT_RGBA: if (self->mask_bpp == 16) gst_shape_wipe_blend_bgra_16 (self, &inframe, &maskframe, &outframe); else gst_shape_wipe_blend_bgra_8 (self, &inframe, &maskframe, &outframe); break; default: g_assert_not_reached (); break; } gst_video_frame_unmap (&outframe); gst_video_frame_unmap (&inframe); gst_video_frame_unmap (&maskframe); gst_buffer_unref (mask); if (new_outbuf) gst_buffer_unref (buffer); ret = gst_pad_push (self->srcpad, outbuf); if (G_UNLIKELY (ret != GST_FLOW_OK)) goto push_failed; return ret; /* Errors */ not_negotiated: { GST_ERROR_OBJECT (self, "No valid caps yet"); gst_buffer_unref (buffer); return GST_FLOW_NOT_NEGOTIATED; } shutdown: { GST_DEBUG_OBJECT (self, "Shutting down"); gst_buffer_unref (buffer); return GST_FLOW_FLUSHING; } qos: { GST_DEBUG_OBJECT (self, "Dropping buffer because of QoS"); gst_buffer_unref (buffer); gst_buffer_unref (mask); return GST_FLOW_OK; } push_failed: { GST_ERROR_OBJECT (self, "Pushing buffer downstream failed: %s", gst_flow_get_name (ret)); return ret; } }
/* Fill */ static GstFlowReturn gst_android_video_source_fill(GstPushSrc * p_pushsrc, GstBuffer * p_buf) { GstAndroidVideoSource *p_src; int vcd_ret; static struct timeval time_of_day; static struct timeval window_time; static struct timeval start_time; int timeDiffUsec; static gint frame_count = 0; static gint frame_count_window = 0; GstBuffer *p_outbuf; GstMapInfo mem_info; gboolean ok; GA_LOGTRACE("ENTER %s --xx--> thread(%ld)", __FUNCTION__, pthread_self()); p_src = GST_ANDROIDVIDEOSOURCE(p_pushsrc); if (gst_buffer_get_size(p_buf) != p_src->m_bufSize) { GA_LOGWARN("%s: WARNING: gst_buffer_get_size(p_buf)==%d != p_src->m_bufSize==%d", __FUNCTION__, gst_buffer_get_size(p_buf), p_src->m_bufSize); goto fill_error_negotiation; } VCD_checkChangeCamera(p_src->m_devHandle); if (!p_src->vcdStarted) { AV_CHECK_ERR(VCD_start(p_src->m_devHandle), fill_error_vcd_start); p_src->vcdStarted = TRUE; } if (!frame_count) { // Only first time gettimeofday(&start_time, NULL); gettimeofday(&window_time, NULL); } frame_count++; frame_count_window++; gettimeofday(&time_of_day, NULL); timeDiffUsec = time_diff_usec(&time_of_day, &window_time); if (timeDiffUsec > p_src->log_interval || !p_src->log_interval) { int framerate; int framerateWindow; int timeDiffSec; timeDiffSec = time_diff_sec(&time_of_day, &start_time); framerate = frame_count / (timeDiffSec > 0 ? timeDiffSec : 1); framerateWindow = frame_count_window * 1000000 / timeDiffUsec; GA_LOGVERB("%s ------> has now been called %d times --Create--> framerate since start: %d fps, framerate last %d usec: %d fps", __FUNCTION__, frame_count, framerate, timeDiffUsec, framerateWindow); gettimeofday(&window_time, NULL); frame_count_window = 0; } g_warn_if_fail(gst_buffer_is_writable(p_buf)); /* g_warn_if_fail() used for internal error (exception to our rules for this special "buffer copying case") */ g_assert(gst_buffer_is_writable(p_buf)); /* this buf should be allocated in the base class and should always be writable */ p_outbuf = gst_buffer_make_writable(p_buf); /* do this cause we never wanna crash in release even if somebody makes a mistake somewhere... */ ok = gst_buffer_map(p_outbuf, &mem_info, GST_MAP_WRITE); if (!ok) { goto fill_error_gst_buffer_map; } vcd_ret = VCD_read(p_src->m_devHandle, &(mem_info.data), mem_info.size); if (vcd_ret == VCD_ERR_NO_DATA) { // This should never happen. There should always be more data from the device. // In any case, if it happens we don't want to end or lock or anything, we just // want to go on as if there actually were data. Specifically, we do not want // to block the streaming thread... memset(mem_info.data, 0, mem_info.size); gst_buffer_unmap(p_outbuf, &mem_info); GST_BUFFER_OFFSET(p_outbuf) = GST_BUFFER_OFFSET_NONE; GST_BUFFER_OFFSET_END(p_outbuf) = GST_BUFFER_OFFSET_NONE; GST_BUFFER_PTS(p_outbuf) = GST_CLOCK_TIME_NONE; GST_BUFFER_DTS(p_outbuf) = GST_CLOCK_TIME_NONE; GST_BUFFER_DURATION(p_outbuf) = GST_CLOCK_TIME_NONE; GA_LOGWARN("%s: WARNING: Returning GST_FLOW_OK with a buffer with zeros...", __FUNCTION__); return GST_FLOW_OK; } if (vcd_ret != VCD_NO_ERROR) { gst_buffer_unmap(p_outbuf, &mem_info); goto fill_error_read; } gst_buffer_unmap(p_outbuf, &mem_info); set_gstbuf_time_and_offset(p_src, p_outbuf); GA_LOGTRACE("EXIT %s", __FUNCTION__); return GST_FLOW_OK; /* * propagate unhandled errors */ fill_error_read: { GA_LOGERROR("%s: Error when reading data from device!", __FUNCTION__); return GST_FLOW_ERROR; } fill_error_negotiation: { GA_LOGERROR("%s: ERROR: Strange buffer size. Negotiation not done? Disallowed renegotiation done?", __FUNCTION__); return GST_FLOW_ERROR; } fill_error_gst_buffer_map: { GA_LOGERROR("%s: gst_buffer_map() failed!", __FUNCTION__); return GST_FLOW_ERROR; } fill_error_vcd_start: { GA_LOGERROR("%s: FATAL ERROR: Could not start the video device!", __FUNCTION__); return GST_FLOW_ERROR; } }
/* * Performs the face detection */ static GstFlowReturn gst_face_detect_transform_ip (GstOpencvVideoFilter * base, GstBuffer * buf, IplImage * img) { GstFaceDetect *filter = GST_FACE_DETECT (base); if (filter->cvFaceDetect) { GstMessage *msg = NULL; GstStructure *s; GValue facelist = { 0 }; GValue facedata = { 0 }; CvSeq *faces; CvSeq *mouth = NULL, *nose = NULL, *eyes = NULL; gint i; gboolean do_display = FALSE; if (filter->display) { if (gst_buffer_is_writable (buf)) { do_display = TRUE; } else { GST_LOG_OBJECT (filter, "Buffer is not writable, not drawing faces."); } } cvCvtColor (img, filter->cvGray, CV_RGB2GRAY); cvClearMemStorage (filter->cvStorage); faces = gst_face_detect_run_detector (filter, filter->cvFaceDetect, filter->min_size_width, filter->min_size_height); msg = gst_face_detect_message_new (filter, buf); g_value_init (&facelist, GST_TYPE_LIST); for (i = 0; i < (faces ? faces->total : 0); i++) { CvRect *r = (CvRect *) cvGetSeqElem (faces, i); guint mw = filter->min_size_width / 8; guint mh = filter->min_size_height / 8; guint rnx = 0, rny = 0, rnw, rnh; guint rmx = 0, rmy = 0, rmw, rmh; guint rex = 0, rey = 0, rew, reh; gboolean have_nose, have_mouth, have_eyes; /* detect face features */ if (filter->cvNoseDetect) { rnx = r->x + r->width / 4; rny = r->y + r->height / 4; rnw = r->width / 2; rnh = r->height / 2; cvSetImageROI (filter->cvGray, cvRect (rnx, rny, rnw, rnh)); nose = gst_face_detect_run_detector (filter, filter->cvNoseDetect, mw, mh); have_nose = (nose && nose->total); cvResetImageROI (filter->cvGray); } else { have_nose = FALSE; } if (filter->cvMouthDetect) { rmx = r->x; rmy = r->y + r->height / 2; rmw = r->width; rmh = r->height / 2; cvSetImageROI (filter->cvGray, cvRect (rmx, rmy, rmw, rmh)); mouth = gst_face_detect_run_detector (filter, filter->cvMouthDetect, mw, mh); have_mouth = (mouth && mouth->total); cvResetImageROI (filter->cvGray); } else { have_mouth = FALSE; } if (filter->cvEyesDetect) { rex = r->x; rey = r->y; rew = r->width; reh = r->height / 2; cvSetImageROI (filter->cvGray, cvRect (rex, rey, rew, reh)); eyes = gst_face_detect_run_detector (filter, filter->cvEyesDetect, mw, mh); have_eyes = (eyes && eyes->total); cvResetImageROI (filter->cvGray); } else { have_eyes = FALSE; } GST_LOG_OBJECT (filter, "%2d/%2d: x,y = %4u,%4u: w.h = %4u,%4u : features(e,n,m) = %d,%d,%d", i, faces->total, r->x, r->y, r->width, r->height, have_eyes, have_nose, have_mouth); s = gst_structure_new ("face", "x", G_TYPE_UINT, r->x, "y", G_TYPE_UINT, r->y, "width", G_TYPE_UINT, r->width, "height", G_TYPE_UINT, r->height, NULL); if (have_nose) { CvRect *sr = (CvRect *) cvGetSeqElem (nose, 0); GST_LOG_OBJECT (filter, "nose/%d: x,y = %4u,%4u: w.h = %4u,%4u", nose->total, rnx + sr->x, rny + sr->y, sr->width, sr->height); gst_structure_set (s, "nose->x", G_TYPE_UINT, rnx + sr->x, "nose->y", G_TYPE_UINT, rny + sr->y, "nose->width", G_TYPE_UINT, sr->width, "nose->height", G_TYPE_UINT, sr->height, NULL); } if (have_mouth) { CvRect *sr = (CvRect *) cvGetSeqElem (mouth, 0); GST_LOG_OBJECT (filter, "mouth/%d: x,y = %4u,%4u: w.h = %4u,%4u", mouth->total, rmx + sr->x, rmy + sr->y, sr->width, sr->height); gst_structure_set (s, "mouth->x", G_TYPE_UINT, rmx + sr->x, "mouth->y", G_TYPE_UINT, rmy + sr->y, "mouth->width", G_TYPE_UINT, sr->width, "mouth->height", G_TYPE_UINT, sr->height, NULL); } if (have_eyes) { CvRect *sr = (CvRect *) cvGetSeqElem (eyes, 0); GST_LOG_OBJECT (filter, "eyes/%d: x,y = %4u,%4u: w.h = %4u,%4u", eyes->total, rex + sr->x, rey + sr->y, sr->width, sr->height); gst_structure_set (s, "eyes->x", G_TYPE_UINT, rex + sr->x, "eyes->y", G_TYPE_UINT, rey + sr->y, "eyes->width", G_TYPE_UINT, sr->width, "eyes->height", G_TYPE_UINT, sr->height, NULL); } g_value_init (&facedata, GST_TYPE_STRUCTURE); g_value_take_boxed (&facedata, s); gst_value_list_append_value (&facelist, &facedata); g_value_unset (&facedata); s = NULL; if (do_display) { CvPoint center; CvSize axes; gdouble w, h; gint cb = 255 - ((i & 3) << 7); gint cg = 255 - ((i & 12) << 5); gint cr = 255 - ((i & 48) << 3); w = r->width / 2; h = r->height / 2; center.x = cvRound ((r->x + w)); center.y = cvRound ((r->y + h)); axes.width = w; axes.height = h * 1.25; /* tweak for face form */ cvEllipse (img, center, axes, 0.0, 0.0, 360.0, CV_RGB (cr, cg, cb), 3, 8, 0); if (have_nose) { CvRect *sr = (CvRect *) cvGetSeqElem (nose, 0); w = sr->width / 2; h = sr->height / 2; center.x = cvRound ((rnx + sr->x + w)); center.y = cvRound ((rny + sr->y + h)); axes.width = w; axes.height = h * 1.25; /* tweak for nose form */ cvEllipse (img, center, axes, 0.0, 0.0, 360.0, CV_RGB (cr, cg, cb), 1, 8, 0); } if (have_mouth) { CvRect *sr = (CvRect *) cvGetSeqElem (mouth, 0); w = sr->width / 2; h = sr->height / 2; center.x = cvRound ((rmx + sr->x + w)); center.y = cvRound ((rmy + sr->y + h)); axes.width = w * 1.5; /* tweak for mouth form */ axes.height = h; cvEllipse (img, center, axes, 0.0, 0.0, 360.0, CV_RGB (cr, cg, cb), 1, 8, 0); } if (have_eyes) { CvRect *sr = (CvRect *) cvGetSeqElem (eyes, 0); w = sr->width / 2; h = sr->height / 2; center.x = cvRound ((rex + sr->x + w)); center.y = cvRound ((rey + sr->y + h)); axes.width = w * 1.5; /* tweak for eyes form */ axes.height = h; cvEllipse (img, center, axes, 0.0, 0.0, 360.0, CV_RGB (cr, cg, cb), 1, 8, 0); } } } gst_structure_set_value ((GstStructure *) gst_message_get_structure (msg), "faces", &facelist); g_value_unset (&facelist); gst_element_post_message (GST_ELEMENT (filter), msg); } return GST_FLOW_OK; }
/* * Performs the face detection */ static GstFlowReturn gst_facedetect_transform_ip (GstOpencvVideoFilter * base, GstBuffer * buf, IplImage * img) { Gstfacedetect *filter; CvSeq *faces; int i; filter = GST_FACEDETECT (base); cvCvtColor (img, filter->cvGray, CV_RGB2GRAY); cvClearMemStorage (filter->cvStorage); if (filter->cvCascade) { GstMessage *msg = NULL; GValue facelist = { 0 }; faces = cvHaarDetectObjects (filter->cvGray, filter->cvCascade, filter->cvStorage, filter->scale_factor, filter->min_neighbors, filter->flags, cvSize (filter->min_size_width, filter->min_size_height) #if (CV_MAJOR_VERSION >= 2) && (CV_MINOR_VERSION >= 2) , cvSize (filter->min_size_width + 2, filter->min_size_height + 2) #endif ); if (faces && faces->total > 0) { msg = gst_facedetect_message_new (filter, buf); g_value_init (&facelist, GST_TYPE_LIST); } for (i = 0; i < (faces ? faces->total : 0); i++) { CvRect *r = (CvRect *) cvGetSeqElem (faces, i); GValue value = { 0 }; GstStructure *s = gst_structure_new ("face", "x", G_TYPE_UINT, r->x, "y", G_TYPE_UINT, r->y, "width", G_TYPE_UINT, r->width, "height", G_TYPE_UINT, r->height, NULL); GstMessage *m = gst_message_new_element (GST_OBJECT (filter), s); g_value_init (&value, GST_TYPE_STRUCTURE); gst_value_set_structure (&value, s); gst_value_list_append_value (&facelist, &value); g_value_unset (&value); gst_element_post_message (GST_ELEMENT (filter), m); if (filter->display) { if (gst_buffer_is_writable (buf)) { CvPoint center; int radius; center.x = cvRound ((r->x + r->width * 0.5)); center.y = cvRound ((r->y + r->height * 0.5)); radius = cvRound ((r->width + r->height) * 0.25); cvCircle (img, center, radius, CV_RGB (255, 32, 32), 3, 8, 0); } else { GST_DEBUG_OBJECT (filter, "Buffer is not writable, not drawing " "circles for faces"); } } } if (msg) { gst_structure_set_value (msg->structure, "faces", &facelist); g_value_unset (&facelist); gst_element_post_message (GST_ELEMENT (filter), msg); } } return GST_FLOW_OK; }
static GstFlowReturn gst_shape_wipe_video_sink_chain (GstPad * pad, GstBuffer * buffer) { GstShapeWipe *self = GST_SHAPE_WIPE (GST_PAD_PARENT (pad)); GstFlowReturn ret = GST_FLOW_OK; GstBuffer *mask = NULL, *outbuf = NULL; GstClockTime timestamp; gboolean new_outbuf = FALSE; if (G_UNLIKELY (self->fmt == GST_VIDEO_FORMAT_UNKNOWN)) return GST_FLOW_NOT_NEGOTIATED; timestamp = GST_BUFFER_TIMESTAMP (buffer); timestamp = gst_segment_to_stream_time (&self->segment, GST_FORMAT_TIME, timestamp); if (GST_CLOCK_TIME_IS_VALID (timestamp)) gst_object_sync_values (G_OBJECT (self), timestamp); GST_DEBUG_OBJECT (self, "Blending buffer with timestamp %" GST_TIME_FORMAT " at position %lf", GST_TIME_ARGS (timestamp), self->mask_position); g_mutex_lock (self->mask_mutex); if (!self->mask) g_cond_wait (self->mask_cond, self->mask_mutex); if (self->mask == NULL) { g_mutex_unlock (self->mask_mutex); gst_buffer_unref (buffer); return GST_FLOW_UNEXPECTED; } else { mask = gst_buffer_ref (self->mask); } g_mutex_unlock (self->mask_mutex); if (!gst_shape_wipe_do_qos (self, GST_BUFFER_TIMESTAMP (buffer))) { gst_buffer_unref (buffer); gst_buffer_unref (mask); return GST_FLOW_OK; } /* Try to blend inplace, if it's not possible * get a new buffer from downstream. */ if (!gst_buffer_is_writable (buffer)) { ret = gst_pad_alloc_buffer_and_set_caps (self->srcpad, GST_BUFFER_OFFSET_NONE, GST_BUFFER_SIZE (buffer), GST_PAD_CAPS (self->srcpad), &outbuf); if (G_UNLIKELY (ret != GST_FLOW_OK)) { gst_buffer_unref (buffer); gst_buffer_unref (mask); return ret; } gst_buffer_copy_metadata (outbuf, buffer, GST_BUFFER_COPY_ALL); new_outbuf = TRUE; } else { outbuf = buffer; } if (self->fmt == GST_VIDEO_FORMAT_AYUV && self->mask_bpp == 16) ret = gst_shape_wipe_blend_ayuv_16 (self, buffer, mask, outbuf); else if (self->fmt == GST_VIDEO_FORMAT_AYUV) ret = gst_shape_wipe_blend_ayuv_8 (self, buffer, mask, outbuf); else if (self->fmt == GST_VIDEO_FORMAT_ARGB && self->mask_bpp == 16) ret = gst_shape_wipe_blend_argb_16 (self, buffer, mask, outbuf); else if (self->fmt == GST_VIDEO_FORMAT_ARGB) ret = gst_shape_wipe_blend_argb_8 (self, buffer, mask, outbuf); else if (self->fmt == GST_VIDEO_FORMAT_BGRA && self->mask_bpp == 16) ret = gst_shape_wipe_blend_bgra_16 (self, buffer, mask, outbuf); else if (self->fmt == GST_VIDEO_FORMAT_BGRA) ret = gst_shape_wipe_blend_bgra_8 (self, buffer, mask, outbuf); else g_assert_not_reached (); gst_buffer_unref (mask); if (new_outbuf) gst_buffer_unref (buffer); if (ret != GST_FLOW_OK) { gst_buffer_unref (outbuf); return ret; } ret = gst_pad_push (self->srcpad, outbuf); return ret; }
static GstFlowReturn gst_shape_wipe_video_sink_chain (GstPad * pad, GstBuffer * buffer) { GstShapeWipe *self = GST_SHAPE_WIPE (GST_PAD_PARENT (pad)); GstFlowReturn ret = GST_FLOW_OK; GstBuffer *mask = NULL, *outbuf = NULL; GstClockTime timestamp; gboolean new_outbuf = FALSE; if (G_UNLIKELY (self->fmt == GST_VIDEO_FORMAT_UNKNOWN)) goto not_negotiated; timestamp = GST_BUFFER_TIMESTAMP (buffer); timestamp = gst_segment_to_stream_time (&self->segment, GST_FORMAT_TIME, timestamp); if (GST_CLOCK_TIME_IS_VALID (timestamp)) gst_object_sync_values (G_OBJECT (self), timestamp); GST_LOG_OBJECT (self, "Blending buffer with timestamp %" GST_TIME_FORMAT " at position %f", GST_TIME_ARGS (timestamp), self->mask_position); g_mutex_lock (self->mask_mutex); if (self->shutdown) goto shutdown; if (!self->mask) g_cond_wait (self->mask_cond, self->mask_mutex); if (self->mask == NULL || self->shutdown) { goto shutdown; } else { mask = gst_buffer_ref (self->mask); } g_mutex_unlock (self->mask_mutex); if (!gst_shape_wipe_do_qos (self, GST_BUFFER_TIMESTAMP (buffer))) goto qos; /* Try to blend inplace, if it's not possible * get a new buffer from downstream. */ if (!gst_buffer_is_writable (buffer)) { ret = gst_pad_alloc_buffer_and_set_caps (self->srcpad, GST_BUFFER_OFFSET_NONE, GST_BUFFER_SIZE (buffer), GST_PAD_CAPS (self->srcpad), &outbuf); if (G_UNLIKELY (ret != GST_FLOW_OK)) goto alloc_failed; gst_buffer_copy_metadata (outbuf, buffer, GST_BUFFER_COPY_ALL); new_outbuf = TRUE; } else { outbuf = buffer; } switch (self->fmt) { case GST_VIDEO_FORMAT_AYUV: case GST_VIDEO_FORMAT_ARGB: case GST_VIDEO_FORMAT_ABGR: if (self->mask_bpp == 16) gst_shape_wipe_blend_argb_16 (self, buffer, mask, outbuf); else gst_shape_wipe_blend_argb_8 (self, buffer, mask, outbuf); break; case GST_VIDEO_FORMAT_BGRA: case GST_VIDEO_FORMAT_RGBA: if (self->mask_bpp == 16) gst_shape_wipe_blend_bgra_16 (self, buffer, mask, outbuf); else gst_shape_wipe_blend_bgra_8 (self, buffer, mask, outbuf); break; default: g_assert_not_reached (); break; } gst_buffer_unref (mask); if (new_outbuf) gst_buffer_unref (buffer); ret = gst_pad_push (self->srcpad, outbuf); if (G_UNLIKELY (ret != GST_FLOW_OK)) goto push_failed; return ret; /* Errors */ not_negotiated: GST_ERROR_OBJECT (self, "No valid caps yet"); gst_buffer_unref (buffer); return GST_FLOW_NOT_NEGOTIATED; shutdown: GST_DEBUG_OBJECT (self, "Shutting down"); gst_buffer_unref (buffer); return GST_FLOW_WRONG_STATE; qos: GST_DEBUG_OBJECT (self, "Dropping buffer because of QoS"); gst_buffer_unref (buffer); gst_buffer_unref (mask); return GST_FLOW_OK; alloc_failed: GST_ERROR_OBJECT (self, "Buffer allocation from downstream failed: %s", gst_flow_get_name (ret)); gst_buffer_unref (buffer); gst_buffer_unref (mask); return ret; push_failed: GST_ERROR_OBJECT (self, "Pushing buffer downstream failed: %s", gst_flow_get_name (ret)); return ret; }