コード例 #1
0
static GstFlowReturn
gst_timecodestamper_transform_ip (GstBaseTransform * vfilter,
    GstBuffer * buffer)
{
  GstTimeCodeStamper *timecodestamper = GST_TIME_CODE_STAMPER (vfilter);
  GstVideoTimeCodeMeta *tc_meta;
  GstVideoTimeCode *tc;

  GST_OBJECT_LOCK (timecodestamper);
  tc_meta = gst_buffer_get_video_time_code_meta (buffer);
  if (tc_meta && !timecodestamper->override_existing) {
    GST_OBJECT_UNLOCK (timecodestamper);
    tc = gst_video_time_code_copy (&tc_meta->tc);
    goto beach;
  } else if (timecodestamper->override_existing) {
    gst_buffer_foreach_meta (buffer, remove_timecode_meta, NULL);
  }

  gst_buffer_add_video_time_code_meta (buffer, timecodestamper->current_tc);
  tc = gst_video_time_code_copy (timecodestamper->current_tc);
  gst_video_time_code_increment_frame (timecodestamper->current_tc);
  GST_OBJECT_UNLOCK (timecodestamper);

beach:
  if (timecodestamper->post_messages) {
    GstClockTime stream_time, running_time, duration;
    GstStructure *s;
    GstMessage *msg;

    running_time =
        gst_segment_to_running_time (&vfilter->segment, GST_FORMAT_TIME,
        GST_BUFFER_PTS (buffer));
    stream_time =
        gst_segment_to_stream_time (&vfilter->segment, GST_FORMAT_TIME,
        GST_BUFFER_PTS (buffer));
    duration =
        gst_util_uint64_scale_int (GST_SECOND, timecodestamper->vinfo.fps_d,
        timecodestamper->vinfo.fps_n);
    s = gst_structure_new ("timecodestamper", "timestamp", G_TYPE_UINT64,
        GST_BUFFER_PTS (buffer), "stream-time", G_TYPE_UINT64, stream_time,
        "running-time", G_TYPE_UINT64, running_time, "duration", G_TYPE_UINT64,
        duration, "timecode", GST_TYPE_VIDEO_TIME_CODE, tc, NULL);
    msg = gst_message_new_element (GST_OBJECT (timecodestamper), s);
    gst_element_post_message (GST_ELEMENT (timecodestamper), msg);
  }
  gst_video_time_code_free (tc);
  return GST_FLOW_OK;
}
コード例 #2
0
static GstFlowReturn
gst_timecodewait_vsink_chain (GstPad * pad, GstObject * parent,
    GstBuffer * inbuf)
{
  GstClockTime timestamp;
  GstTimeCodeWait *self = GST_TIMECODEWAIT (parent);
  GstClockTime duration;
  GstVideoTimeCode *tc = NULL;
  GstVideoTimeCodeMeta *tc_meta;

  timestamp = GST_BUFFER_TIMESTAMP (inbuf);
  if (timestamp == GST_CLOCK_TIME_NONE) {
    gst_buffer_unref (inbuf);
    return GST_FLOW_ERROR;
  }
  g_mutex_lock (&self->mutex);
  self->vsegment.position = timestamp;
  duration = GST_BUFFER_DURATION (inbuf);
  if (duration != GST_CLOCK_TIME_NONE)
    self->vsegment.position += duration;
  tc_meta = gst_buffer_get_video_time_code_meta (inbuf);
  if (tc_meta)
    tc = &tc_meta->tc;
  if (self->tc != NULL && tc != NULL) {
    if (gst_video_time_code_compare (tc, self->tc) < 0
        && self->running_time_of_timecode == GST_CLOCK_TIME_NONE) {
      GST_DEBUG_OBJECT (self, "Timecode not yet reached, ignoring frame");
      gst_buffer_unref (inbuf);
      inbuf = NULL;
    } else if (self->running_time_of_timecode == GST_CLOCK_TIME_NONE) {
      GST_INFO_OBJECT (self, "Target timecode reached at %" GST_TIME_FORMAT,
          GST_TIME_ARGS (self->vsegment.position));
      self->running_time_of_timecode =
          gst_segment_to_running_time (&self->vsegment, GST_FORMAT_TIME,
          self->vsegment.position);
    }
  }
  g_cond_signal (&self->cond);
  g_mutex_unlock (&self->mutex);
  if (inbuf)
    return gst_pad_push (self->vsrcpad, inbuf);
  else
    return GST_FLOW_OK;
}
コード例 #3
0
static GstFlowReturn
gst_decklink_video_sink_prepare (GstBaseSink * bsink, GstBuffer * buffer)
{
  GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (bsink);
  GstVideoFrame vframe;
  IDeckLinkMutableVideoFrame *frame;
  guint8 *outdata, *indata;
  GstFlowReturn flow_ret;
  HRESULT ret;
  GstClockTime timestamp, duration;
  GstClockTime running_time, running_time_duration;
  GstClockTime latency, render_delay;
  GstClockTimeDiff ts_offset;
  gint i;
  GstDecklinkVideoFormat caps_format;
  BMDPixelFormat format;
  gint bpp;
  GstVideoTimeCodeMeta *tc_meta;

  GST_DEBUG_OBJECT (self, "Preparing buffer %p", buffer);

  // FIXME: Handle no timestamps
  if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) {
    return GST_FLOW_ERROR;
  }

  caps_format = gst_decklink_type_from_video_format (self->info.finfo->format);
  format = gst_decklink_pixel_format_from_type (caps_format);
  bpp = gst_decklink_bpp_from_type (caps_format);

  timestamp = GST_BUFFER_TIMESTAMP (buffer);
  duration = GST_BUFFER_DURATION (buffer);
  if (duration == GST_CLOCK_TIME_NONE) {
    duration =
        gst_util_uint64_scale_int (GST_SECOND, self->info.fps_d,
        self->info.fps_n);
  }
  running_time =
      gst_segment_to_running_time (&GST_BASE_SINK_CAST (self)->segment,
      GST_FORMAT_TIME, timestamp);
  running_time_duration =
      gst_segment_to_running_time (&GST_BASE_SINK_CAST (self)->segment,
      GST_FORMAT_TIME, timestamp + duration) - running_time;

  /* See gst_base_sink_adjust_time() */
  latency = gst_base_sink_get_latency (bsink);
  render_delay = gst_base_sink_get_render_delay (bsink);
  ts_offset = gst_base_sink_get_ts_offset (bsink);

  running_time += latency;

  if (ts_offset < 0) {
    ts_offset = -ts_offset;
    if ((GstClockTime) ts_offset < running_time)
      running_time -= ts_offset;
    else
      running_time = 0;
  } else {
    running_time += ts_offset;
  }

  if (running_time > render_delay)
    running_time -= render_delay;
  else
    running_time = 0;

  ret = self->output->output->CreateVideoFrame (self->info.width,
      self->info.height, self->info.stride[0], format, bmdFrameFlagDefault,
      &frame);
  if (ret != S_OK) {
    GST_ELEMENT_ERROR (self, STREAM, FAILED,
        (NULL), ("Failed to create video frame: 0x%08x", ret));
    return GST_FLOW_ERROR;
  }

  if (!gst_video_frame_map (&vframe, &self->info, buffer, GST_MAP_READ)) {
    GST_ERROR_OBJECT (self, "Failed to map video frame");
    flow_ret = GST_FLOW_ERROR;
    goto out;
  }

  frame->GetBytes ((void **) &outdata);
  indata = (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (&vframe, 0);
  for (i = 0; i < self->info.height; i++) {
    memcpy (outdata, indata, GST_VIDEO_FRAME_WIDTH (&vframe) * bpp);
    indata += GST_VIDEO_FRAME_PLANE_STRIDE (&vframe, 0);
    outdata += frame->GetRowBytes ();
  }
  gst_video_frame_unmap (&vframe);

  tc_meta = gst_buffer_get_video_time_code_meta (buffer);
  if (tc_meta) {
    BMDTimecodeFlags bflags = (BMDTimecodeFlags) 0;
    gchar *tc_str;

    if (((GstVideoTimeCodeFlags) (tc_meta->tc.
                config.flags)) & GST_VIDEO_TIME_CODE_FLAGS_DROP_FRAME)
      bflags = (BMDTimecodeFlags) (bflags | bmdTimecodeIsDropFrame);
    else
      bflags = (BMDTimecodeFlags) (bflags | bmdTimecodeFlagDefault);
    if (tc_meta->tc.field_count == 2)
      bflags = (BMDTimecodeFlags) (bflags | bmdTimecodeFieldMark);

    tc_str = gst_video_time_code_to_string (&tc_meta->tc);
    ret = frame->SetTimecodeFromComponents (self->timecode_format,
        (uint8_t) tc_meta->tc.hours,
        (uint8_t) tc_meta->tc.minutes,
        (uint8_t) tc_meta->tc.seconds, (uint8_t) tc_meta->tc.frames, bflags);
    if (ret != S_OK) {
      GST_ERROR_OBJECT (self,
          "Failed to set timecode %s to video frame: 0x%08x", tc_str, ret);
      flow_ret = GST_FLOW_ERROR;
      g_free (tc_str);
      goto out;
    }
    GST_DEBUG_OBJECT (self, "Set frame timecode to %s", tc_str);
    g_free (tc_str);
  }

  convert_to_internal_clock (self, &running_time, &running_time_duration);

  GST_LOG_OBJECT (self, "Scheduling video frame %p at %" GST_TIME_FORMAT
      " with duration %" GST_TIME_FORMAT, frame, GST_TIME_ARGS (running_time),
      GST_TIME_ARGS (running_time_duration));

  ret = self->output->output->ScheduleVideoFrame (frame,
      running_time, running_time_duration, GST_SECOND);
  if (ret != S_OK) {
    GST_ELEMENT_ERROR (self, STREAM, FAILED,
        (NULL), ("Failed to schedule frame: 0x%08x", ret));
    flow_ret = GST_FLOW_ERROR;
    goto out;
  }

  flow_ret = GST_FLOW_OK;

out:

  frame->Release ();

  return flow_ret;
}
コード例 #4
0
static GstFlowReturn
gst_timecodestamper_transform_ip (GstBaseTransform * vfilter,
    GstBuffer * buffer)
{
  GstTimeCodeStamper *timecodestamper = GST_TIME_CODE_STAMPER (vfilter);
  GstClockTime ref_time;

  GST_OBJECT_LOCK (timecodestamper);
  if (gst_buffer_get_video_time_code_meta (buffer)
      && !timecodestamper->override_existing) {
    GST_OBJECT_UNLOCK (timecodestamper);
    return GST_FLOW_OK;
  } else if (timecodestamper->override_existing) {
    gst_buffer_foreach_meta (buffer, remove_timecode_meta, NULL);
  }

  if (timecodestamper->source_clock != NULL) {
    if (timecodestamper->current_tc->hours == 0
        && timecodestamper->current_tc->minutes == 0
        && timecodestamper->current_tc->seconds == 0
        && timecodestamper->current_tc->frames == 0) {
      guint64 hours, minutes, seconds, frames;
      /* Daily jam time */

      ref_time = gst_clock_get_time (timecodestamper->source_clock);
      ref_time = ref_time % (24 * 60 * 60 * GST_SECOND);
      hours = ref_time / (GST_SECOND * 60 * 60);
      ref_time -= hours * GST_SECOND * 60 * 60;
      minutes = ref_time / (GST_SECOND * 60);
      ref_time -= minutes * GST_SECOND * 60;
      seconds = ref_time / GST_SECOND;
      ref_time -= seconds * GST_SECOND;
      /* Converting to frames for the whole ref_time might be inaccurate in case
       * we have a drop frame timecode */
      frames = gst_util_uint64_scale (ref_time, timecodestamper->vinfo.fps_n,
          timecodestamper->vinfo.fps_d * GST_SECOND);

      GST_DEBUG_OBJECT (timecodestamper,
          "Initializing with %" G_GUINT64_FORMAT ":%" G_GUINT64_FORMAT ":%"
          G_GUINT64_FORMAT ":%" G_GUINT64_FORMAT "", hours, minutes, seconds,
          frames);
      gst_video_time_code_init (timecodestamper->current_tc,
          timecodestamper->vinfo.fps_n,
          timecodestamper->vinfo.fps_d,
          NULL,
          timecodestamper->vinfo.interlace_mode ==
          GST_VIDEO_INTERLACE_MODE_PROGRESSIVE ? 0 :
          GST_VIDEO_TIME_CODE_FLAGS_INTERLACED, hours, minutes, seconds, 0, 0);
      gst_timecodestamper_set_drop_frame (timecodestamper);
      /* Do not use frames when initializing because maybe we have drop frame */
      gst_video_time_code_add_frames (timecodestamper->current_tc, frames);
    }
  } else if (timecodestamper->source_clock == NULL) {
    GstClockTime timecode_time;

    timecode_time =
        gst_video_time_code_nsec_since_daily_jam (timecodestamper->current_tc);
    ref_time =
        gst_segment_to_stream_time (&vfilter->segment, GST_FORMAT_TIME,
        buffer->pts);
    if (timecode_time != GST_CLOCK_TIME_NONE && ref_time != GST_CLOCK_TIME_NONE
        && ((timecode_time > ref_time && timecode_time - ref_time > GST_SECOND)
            || (ref_time > timecode_time
                && ref_time - timecode_time > GST_SECOND))) {
      gchar *tc_str =
          gst_video_time_code_to_string (timecodestamper->current_tc);
      GST_WARNING_OBJECT (timecodestamper,
          "Time code %s (stream time %" GST_TIME_FORMAT
          ") has drifted more than one second from stream time %"
          GST_TIME_FORMAT, tc_str, GST_TIME_ARGS (timecode_time),
          GST_TIME_ARGS (ref_time));
      g_free (tc_str);
    }
  }
  gst_buffer_add_video_time_code_meta (buffer, timecodestamper->current_tc);
  gst_video_time_code_increment_frame (timecodestamper->current_tc);
  GST_OBJECT_UNLOCK (timecodestamper);
  return GST_FLOW_OK;
}
コード例 #5
0
/* Called with lock held */
static gchar *
gst_time_overlay_get_text (GstBaseTextOverlay * overlay,
    GstBuffer * video_frame)
{
  GstTimeOverlayTimeLine time_line;
  gchar *time_str, *txt, *ret;

  overlay->need_render = TRUE;

  time_line = g_atomic_int_get (&GST_TIME_OVERLAY_CAST (overlay)->time_line);
  if (time_line == GST_TIME_OVERLAY_TIME_LINE_TIME_CODE) {
    GstVideoTimeCodeMeta *tc_meta =
        gst_buffer_get_video_time_code_meta (video_frame);
    if (!tc_meta) {
      GST_DEBUG ("buffer without valid timecode");
      return g_strdup ("00:00:00:00");
    }
    time_str = gst_video_time_code_to_string (&tc_meta->tc);
    GST_DEBUG ("buffer with timecode %s", time_str);
  } else {
    GstClockTime ts, ts_buffer;
    GstSegment *segment = &overlay->segment;

    ts_buffer = GST_BUFFER_TIMESTAMP (video_frame);

    if (!GST_CLOCK_TIME_IS_VALID (ts_buffer)) {
      GST_DEBUG ("buffer without valid timestamp");
      return g_strdup ("");
    }

    GST_DEBUG ("buffer with timestamp %" GST_TIME_FORMAT,
        GST_TIME_ARGS (ts_buffer));

    switch (time_line) {
      case GST_TIME_OVERLAY_TIME_LINE_STREAM_TIME:
        ts = gst_segment_to_stream_time (segment, GST_FORMAT_TIME, ts_buffer);
        break;
      case GST_TIME_OVERLAY_TIME_LINE_RUNNING_TIME:
        ts = gst_segment_to_running_time (segment, GST_FORMAT_TIME, ts_buffer);
        break;
      case GST_TIME_OVERLAY_TIME_LINE_BUFFER_TIME:
      default:
        ts = ts_buffer;
        break;
    }

    time_str = gst_time_overlay_render_time (GST_TIME_OVERLAY (overlay), ts);
  }
  txt = g_strdup (overlay->default_text);

  if (txt != NULL && *txt != '\0') {
    ret = g_strdup_printf ("%s %s", txt, time_str);
  } else {
    ret = time_str;
    time_str = NULL;
  }

  g_free (txt);
  g_free (time_str);

  return ret;
}