static gboolean _gst_gl_sync_meta_transform (GstBuffer * dest, GstMeta * meta, GstBuffer * buffer, GQuark type, gpointer data) { GstGLSyncMeta *dmeta, *smeta; smeta = (GstGLSyncMeta *) meta; if (GST_META_TRANSFORM_IS_COPY (type)) { GstMetaTransformCopy *copy = data; if (!copy->region) { /* only copy if the complete data is copied as well */ dmeta = gst_buffer_add_gl_sync_meta (smeta->context, dest); if (!dmeta) return FALSE; GST_LOG ("copy sync object %p from meta %p to %p", smeta->glsync, smeta, dmeta); /* Setting a sync point here relies on GstBuffer copying * metas after data */ gst_gl_sync_meta_set_sync_point (dmeta, smeta->context); } } else { /* return FALSE, if transform type is not supported */ return FALSE; } return TRUE; }
static void _default_copy (GstGLSyncMeta * src, GstBuffer * sbuffer, GstGLSyncMeta * dest, GstBuffer * dbuffer) { GST_LOG ("copy sync object %p from meta %p to %p", src->data, src, dest); /* Setting a sync point here relies on GstBuffer copying * metas after data */ gst_gl_sync_meta_set_sync_point (src, src->context); }
static GstFlowReturn gst_gl_mixer_aggregate_frames (GstVideoAggregator * vagg, GstBuffer * outbuf) { gboolean res = FALSE; GstGLMixer *mix = GST_GL_MIXER (vagg); GstGLMixerClass *mix_class = GST_GL_MIXER_GET_CLASS (vagg); GstGLSyncMeta *sync_meta; if (mix_class->process_buffers) res = gst_gl_mixer_process_buffers (mix, outbuf); else if (mix_class->process_textures) res = gst_gl_mixer_process_textures (mix, outbuf); sync_meta = gst_buffer_get_gl_sync_meta (outbuf); if (sync_meta) gst_gl_sync_meta_set_sync_point (sync_meta, mix->context); return res ? GST_FLOW_OK : GST_FLOW_ERROR; }
static GstFlowReturn gst_gl_test_src_fill (GstPushSrc * psrc, GstBuffer * buffer) { GstGLTestSrc *src = GST_GL_TEST_SRC (psrc); GstClockTime next_time; GstVideoFrame out_frame; GstGLSyncMeta *sync_meta; if (G_UNLIKELY (!src->negotiated || !src->context)) goto not_negotiated; /* 0 framerate and we are at the second frame, eos */ if (G_UNLIKELY (GST_VIDEO_INFO_FPS_N (&src->out_info) == 0 && src->n_frames == 1)) goto eos; if (!gst_video_frame_map (&out_frame, &src->out_info, buffer, GST_MAP_WRITE | GST_MAP_GL)) { return GST_FLOW_NOT_NEGOTIATED; } src->out_tex = (GstGLMemory *) out_frame.map[0].memory; gst_gl_context_thread_add (src->context, (GstGLContextThreadFunc) _fill_gl, src); if (!src->gl_result) { gst_video_frame_unmap (&out_frame); goto gl_error; } gst_video_frame_unmap (&out_frame); if (!src->gl_result) goto gl_error; sync_meta = gst_buffer_get_gl_sync_meta (buffer); if (sync_meta) gst_gl_sync_meta_set_sync_point (sync_meta, src->context); GST_BUFFER_TIMESTAMP (buffer) = src->timestamp_offset + src->running_time; GST_BUFFER_OFFSET (buffer) = src->n_frames; src->n_frames++; GST_BUFFER_OFFSET_END (buffer) = src->n_frames; if (src->out_info.fps_n) { next_time = gst_util_uint64_scale_int (src->n_frames * GST_SECOND, src->out_info.fps_d, src->out_info.fps_n); GST_BUFFER_DURATION (buffer) = next_time - src->running_time; } else { next_time = src->timestamp_offset; /* NONE means forever */ GST_BUFFER_DURATION (buffer) = GST_CLOCK_TIME_NONE; } src->running_time = next_time; return GST_FLOW_OK; gl_error: { GST_ELEMENT_ERROR (src, RESOURCE, NOT_FOUND, (_("failed to draw pattern")), (_("A GL error occured"))); return GST_FLOW_NOT_NEGOTIATED; } not_negotiated: { GST_ELEMENT_ERROR (src, CORE, NEGOTIATION, (NULL), (_("format wasn't negotiated before get function"))); return GST_FLOW_NOT_NEGOTIATED; } eos: { GST_DEBUG_OBJECT (src, "eos: 0 framerate, frame %d", (gint) src->n_frames); return GST_FLOW_EOS; } }