static GstFlowReturn gst_timecodestamper_transform_ip (GstBaseTransform * vfilter, GstBuffer * buffer) { GstTimeCodeStamper *timecodestamper = GST_TIME_CODE_STAMPER (vfilter); GstVideoTimeCodeMeta *tc_meta; GstVideoTimeCode *tc; GST_OBJECT_LOCK (timecodestamper); tc_meta = gst_buffer_get_video_time_code_meta (buffer); if (tc_meta && !timecodestamper->override_existing) { GST_OBJECT_UNLOCK (timecodestamper); tc = gst_video_time_code_copy (&tc_meta->tc); goto beach; } else if (timecodestamper->override_existing) { gst_buffer_foreach_meta (buffer, remove_timecode_meta, NULL); } gst_buffer_add_video_time_code_meta (buffer, timecodestamper->current_tc); tc = gst_video_time_code_copy (timecodestamper->current_tc); gst_video_time_code_increment_frame (timecodestamper->current_tc); GST_OBJECT_UNLOCK (timecodestamper); beach: if (timecodestamper->post_messages) { GstClockTime stream_time, running_time, duration; GstStructure *s; GstMessage *msg; running_time = gst_segment_to_running_time (&vfilter->segment, GST_FORMAT_TIME, GST_BUFFER_PTS (buffer)); stream_time = gst_segment_to_stream_time (&vfilter->segment, GST_FORMAT_TIME, GST_BUFFER_PTS (buffer)); duration = gst_util_uint64_scale_int (GST_SECOND, timecodestamper->vinfo.fps_d, timecodestamper->vinfo.fps_n); s = gst_structure_new ("timecodestamper", "timestamp", G_TYPE_UINT64, GST_BUFFER_PTS (buffer), "stream-time", G_TYPE_UINT64, stream_time, "running-time", G_TYPE_UINT64, running_time, "duration", G_TYPE_UINT64, duration, "timecode", GST_TYPE_VIDEO_TIME_CODE, tc, NULL); msg = gst_message_new_element (GST_OBJECT (timecodestamper), s); gst_element_post_message (GST_ELEMENT (timecodestamper), msg); } gst_video_time_code_free (tc); return GST_FLOW_OK; }
static GstFlowReturn gst_decklink_video_src_create (GstPushSrc * bsrc, GstBuffer ** buffer) { GstDecklinkVideoSrc *self = GST_DECKLINK_VIDEO_SRC_CAST (bsrc); GstFlowReturn flow_ret = GST_FLOW_OK; const guint8 *data; gsize data_size; VideoFrame *vf; CaptureFrame *f; GstCaps *caps; gboolean caps_changed = FALSE; g_mutex_lock (&self->lock); while (g_queue_is_empty (&self->current_frames) && !self->flushing) { g_cond_wait (&self->cond, &self->lock); } f = (CaptureFrame *) g_queue_pop_head (&self->current_frames); g_mutex_unlock (&self->lock); if (self->flushing) { if (f) capture_frame_free (f); GST_DEBUG_OBJECT (self, "Flushing"); return GST_FLOW_FLUSHING; } // If we're not flushing, we should have a valid frame from the queue g_assert (f != NULL); g_mutex_lock (&self->lock); if (self->caps_mode != f->mode) { if (self->mode == GST_DECKLINK_MODE_AUTO) { GST_DEBUG_OBJECT (self, "Mode changed from %d to %d", self->caps_mode, f->mode); caps_changed = TRUE; self->caps_mode = f->mode; } else { g_mutex_unlock (&self->lock); GST_ELEMENT_ERROR (self, CORE, NEGOTIATION, ("Invalid mode in captured frame"), ("Mode set to %d but captured %d", self->caps_mode, f->mode)); capture_frame_free (f); return GST_FLOW_NOT_NEGOTIATED; } } if (self->caps_format != f->format) { if (self->video_format == GST_DECKLINK_VIDEO_FORMAT_AUTO) { GST_DEBUG_OBJECT (self, "Format changed from %d to %d", self->caps_format, f->format); caps_changed = TRUE; self->caps_format = f->format; } else { g_mutex_unlock (&self->lock); GST_ELEMENT_ERROR (self, CORE, NEGOTIATION, ("Invalid pixel format in captured frame"), ("Format set to %d but captured %d", self->caps_format, f->format)); capture_frame_free (f); return GST_FLOW_NOT_NEGOTIATED; } } g_mutex_unlock (&self->lock); if (caps_changed) { caps = gst_decklink_mode_get_caps (f->mode, f->format); gst_video_info_from_caps (&self->info, caps); gst_base_src_set_caps (GST_BASE_SRC_CAST (bsrc), caps); gst_element_post_message (GST_ELEMENT_CAST (self), gst_message_new_latency (GST_OBJECT_CAST (self))); gst_caps_unref (caps); } f->frame->GetBytes ((gpointer *) & data); data_size = self->info.size; vf = (VideoFrame *) g_malloc0 (sizeof (VideoFrame)); *buffer = gst_buffer_new_wrapped_full ((GstMemoryFlags) GST_MEMORY_FLAG_READONLY, (gpointer) data, data_size, 0, data_size, vf, (GDestroyNotify) video_frame_free); vf->frame = f->frame; f->frame->AddRef (); vf->input = self->input->input; vf->input->AddRef (); GST_BUFFER_TIMESTAMP (*buffer) = f->capture_time; GST_BUFFER_DURATION (*buffer) = f->capture_duration; gst_buffer_add_video_time_code_meta (*buffer, f->tc); GST_DEBUG_OBJECT (self, "Outputting buffer %p with timestamp %" GST_TIME_FORMAT " and duration %" GST_TIME_FORMAT, *buffer, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (*buffer)), GST_TIME_ARGS (GST_BUFFER_DURATION (*buffer))); capture_frame_free (f); return flow_ret; }
static GstFlowReturn gst_timecodestamper_transform_ip (GstBaseTransform * vfilter, GstBuffer * buffer) { GstTimeCodeStamper *timecodestamper = GST_TIME_CODE_STAMPER (vfilter); GstClockTime ref_time; GST_OBJECT_LOCK (timecodestamper); if (gst_buffer_get_video_time_code_meta (buffer) && !timecodestamper->override_existing) { GST_OBJECT_UNLOCK (timecodestamper); return GST_FLOW_OK; } else if (timecodestamper->override_existing) { gst_buffer_foreach_meta (buffer, remove_timecode_meta, NULL); } if (timecodestamper->source_clock != NULL) { if (timecodestamper->current_tc->hours == 0 && timecodestamper->current_tc->minutes == 0 && timecodestamper->current_tc->seconds == 0 && timecodestamper->current_tc->frames == 0) { guint64 hours, minutes, seconds, frames; /* Daily jam time */ ref_time = gst_clock_get_time (timecodestamper->source_clock); ref_time = ref_time % (24 * 60 * 60 * GST_SECOND); hours = ref_time / (GST_SECOND * 60 * 60); ref_time -= hours * GST_SECOND * 60 * 60; minutes = ref_time / (GST_SECOND * 60); ref_time -= minutes * GST_SECOND * 60; seconds = ref_time / GST_SECOND; ref_time -= seconds * GST_SECOND; /* Converting to frames for the whole ref_time might be inaccurate in case * we have a drop frame timecode */ frames = gst_util_uint64_scale (ref_time, timecodestamper->vinfo.fps_n, timecodestamper->vinfo.fps_d * GST_SECOND); GST_DEBUG_OBJECT (timecodestamper, "Initializing with %" G_GUINT64_FORMAT ":%" G_GUINT64_FORMAT ":%" G_GUINT64_FORMAT ":%" G_GUINT64_FORMAT "", hours, minutes, seconds, frames); gst_video_time_code_init (timecodestamper->current_tc, timecodestamper->vinfo.fps_n, timecodestamper->vinfo.fps_d, NULL, timecodestamper->vinfo.interlace_mode == GST_VIDEO_INTERLACE_MODE_PROGRESSIVE ? 0 : GST_VIDEO_TIME_CODE_FLAGS_INTERLACED, hours, minutes, seconds, 0, 0); gst_timecodestamper_set_drop_frame (timecodestamper); /* Do not use frames when initializing because maybe we have drop frame */ gst_video_time_code_add_frames (timecodestamper->current_tc, frames); } } else if (timecodestamper->source_clock == NULL) { GstClockTime timecode_time; timecode_time = gst_video_time_code_nsec_since_daily_jam (timecodestamper->current_tc); ref_time = gst_segment_to_stream_time (&vfilter->segment, GST_FORMAT_TIME, buffer->pts); if (timecode_time != GST_CLOCK_TIME_NONE && ref_time != GST_CLOCK_TIME_NONE && ((timecode_time > ref_time && timecode_time - ref_time > GST_SECOND) || (ref_time > timecode_time && ref_time - timecode_time > GST_SECOND))) { gchar *tc_str = gst_video_time_code_to_string (timecodestamper->current_tc); GST_WARNING_OBJECT (timecodestamper, "Time code %s (stream time %" GST_TIME_FORMAT ") has drifted more than one second from stream time %" GST_TIME_FORMAT, tc_str, GST_TIME_ARGS (timecode_time), GST_TIME_ARGS (ref_time)); g_free (tc_str); } } gst_buffer_add_video_time_code_meta (buffer, timecodestamper->current_tc); gst_video_time_code_increment_frame (timecodestamper->current_tc); GST_OBJECT_UNLOCK (timecodestamper); return GST_FLOW_OK; }