/* The create() virtual function is responsible for returning the next buffer.
 * We just pop buffers off of the queue and block if necessary.
 */
static GstFlowReturn
shell_recorder_src_create (GstPushSrc  *push_src,
			   GstBuffer  **buffer_out)
{
  ShellRecorderSrc *src = SHELL_RECORDER_SRC (push_src);
  GstBuffer *buffer;

  if (src->closed)
    return GST_FLOW_EOS;

  buffer = g_async_queue_pop (src->queue);

  if (src->last_frame_time == 0)
    src->last_frame_time = gst_clock_get_time (GST_CLOCK (src->clock));

  if (buffer == RECORDER_QUEUE_END)
    {
      /* Returning UNEXPECTED here will cause a EOS message to be sent */
      src->closed = TRUE;
      return GST_FLOW_EOS;
    }

  shell_recorder_src_update_memory_used (src,
					 - (int)(gst_buffer_get_size(buffer) / 1024));

  *buffer_out = buffer;
  GST_BUFFER_DURATION(*buffer_out) = GST_CLOCK_DIFF (src->last_frame_time, gst_clock_get_time (GST_CLOCK (src->clock)));

  src->last_frame_time = gst_clock_get_time (GST_CLOCK (src->clock));

  return GST_FLOW_OK;
}
static void
test_position (InsanityTest * test, GstBuffer * buf)
{
  GstQuery *query;
  GstClockTimeDiff diff;

  if (GST_BUFFER_PTS_IS_VALID (buf) == FALSE)
    return;

  if (GST_CLOCK_TIME_IS_VALID (glob_first_pos_point) == FALSE) {
    glob_first_pos_point = gst_segment_to_stream_time (&glob_last_segment,
        glob_last_segment.format, GST_BUFFER_PTS (buf));
  }

  glob_expected_pos = gst_segment_to_stream_time (&glob_last_segment,
      glob_last_segment.format, GST_BUFFER_PTS (buf));

  diff = ABS (GST_CLOCK_DIFF (glob_expected_pos, glob_first_pos_point));

  if (diff < glob_playback_duration * GST_SECOND)
    return;

  query = gst_query_new_position (GST_FORMAT_TIME);

  if (gst_element_query (glob_pipeline, query)) {
    gint64 pos;
    GstFormat fmt;
    GstClockTimeDiff diff;

    gst_query_parse_position (query, &fmt, &pos);
    diff = ABS (GST_CLOCK_DIFF (glob_expected_pos, pos));

    if (diff <= POSITION_THRESHOLD) {
      insanity_test_validate_checklist_item (test, "position-detection", TRUE,
          NULL);
    } else {
      gchar *validate_msg = g_strdup_printf ("Found position: %" GST_TIME_FORMAT
          " expected: %" GST_TIME_FORMAT, GST_TIME_ARGS (pos),
          GST_TIME_ARGS (glob_expected_pos));

      insanity_test_validate_checklist_item (test, "position-detection",
          FALSE, validate_msg);

      g_free (validate_msg);
    }
  } else {
    LOG (test,
        "%s Does not handle position queries (position-detection \"SKIP\")",
        gst_element_factory_get_metadata (gst_element_get_factory
            (glob_demuxer), GST_ELEMENT_METADATA_LONGNAME));
  }

  next_test (test);
}
Example #3
0
static gboolean
update_trace_value (GstTraceValues * self, GstClockTime nts,
    GstClockTime nval, GstClockTime * dts, GstClockTime * dval)
{
  GstTraceValue *lv;
  GstClockTimeDiff dt;
  GstClockTime window = self->window;
  GQueue *q = &self->values;
  GList *node = q->tail;
  gboolean ret = FALSE;


  /* search from the tail of the queue for a good GstTraceValue */
  while (node) {
    lv = node->data;
    dt = GST_CLOCK_DIFF (lv->ts, nts);
    if (dt < window) {
      break;
    } else {
      node = g_list_previous (node);
    }
  }

  if (node) {
    /* calculate the windowed value */
    *dts = dt;
    *dval = GST_CLOCK_DIFF (lv->val, nval);

    /* drop all older measurements */
    while (q->tail != node) {
      free_trace_value (g_queue_pop_tail (q));
    }
    ret = TRUE;
  } else {
    *dts = nts;
    *dval = nval;
  }

  /* don't push too many data items */
  lv = q->head ? q->head->data : NULL;
  if (!lv || (GST_CLOCK_DIFF (lv->ts, nts) > (window / WINDOW_SUBDIV))) {
    /* push the new measurement */
    lv = g_slice_new0 (GstTraceValue);
    lv->ts = nts;
    lv->val = nval;
    g_queue_push_head (q, lv);
  }
  return ret;
}
Example #4
0
/**
 * gst_net_client_clock_new:
 * @name: a name for the clock
 * @remote_address: the address of the remote clock provider
 * @remote_port: the port of the remote clock provider
 * @base_time: initial time of the clock
 *
 * Create a new #GstNetClientClock that will report the time
 * provided by the #GstNetTimeProvider on @remote_address and 
 * @remote_port.
 *
 * Returns: a new #GstClock that receives a time from the remote
 * clock.
 */
GstClock *
gst_net_client_clock_new (gchar * name, const gchar * remote_address,
    gint remote_port, GstClockTime base_time)
{
  /* FIXME: gst_net_client_clock_new() should be a thin wrapper for g_object_new() */
  GstNetClientClock *ret;
  GstClockTime internal;

  g_return_val_if_fail (remote_address != NULL, NULL);
  g_return_val_if_fail (remote_port > 0, NULL);
  g_return_val_if_fail (remote_port <= G_MAXUINT16, NULL);
  g_return_val_if_fail (base_time != GST_CLOCK_TIME_NONE, NULL);

  ret = g_object_new (GST_TYPE_NET_CLIENT_CLOCK, "address", remote_address,
      "port", remote_port, NULL);

  /* gst_clock_get_time() values are guaranteed to be increasing. because no one
   * has called get_time on this clock yet we are free to adjust to any value
   * without worrying about worrying about MAX() issues with the clock's
   * internal time.
   */

  /* update our internal time so get_time() give something around base_time.
     assume that the rate is 1 in the beginning. */
  internal = gst_clock_get_internal_time (GST_CLOCK (ret));
  gst_clock_set_calibration (GST_CLOCK (ret), internal, base_time, 1, 1);

  {
    GstClockTime now = gst_clock_get_time (GST_CLOCK (ret));

    if (GST_CLOCK_DIFF (now, base_time) > 0 ||
        GST_CLOCK_DIFF (now, base_time + GST_SECOND) < 0) {
      g_warning ("unable to set the base time, expect sync problems!");
    }
  }

  if (!gst_net_client_clock_start (ret))
    goto failed_start;

  /* all systems go, cap'n */
  return (GstClock *) ret;

failed_start:
  {
    /* already printed a nice error */
    gst_object_unref (ret);
    return NULL;
  }
}
/*
 * gst_debug_bin_to_dot_file_with_ts:
 * @bin: the top-level pipeline that should be analyzed
 * @file_name: output base filename (e.g. "myplayer")
 *
 * This works like gst_debug_bin_to_dot_file(), but adds the current timestamp
 * to the filename, so that it can be used to take multiple snapshots.
 */
void
gst_debug_bin_to_dot_file_with_ts (GstBin * bin,
    GstDebugGraphDetails details, const gchar * file_name)
{
  gchar *ts_file_name = NULL;
  GstClockTime elapsed;

  g_return_if_fail (GST_IS_BIN (bin));

  if (!file_name) {
    file_name = g_get_application_name ();
    if (!file_name)
      file_name = "unnamed";
  }

  /* add timestamp */
  elapsed = GST_CLOCK_DIFF (_priv_gst_info_start_time,
      gst_util_get_timestamp ());

  /* we don't use GST_TIME_FORMAT as such filenames would fail on some
   * filesystems like fat */
  ts_file_name =
      g_strdup_printf ("%u.%02u.%02u.%09u-%s", GST_TIME_ARGS (elapsed),
      file_name);

  gst_debug_bin_to_dot_file (bin, details, ts_file_name);
  g_free (ts_file_name);
}
GstFlowReturn
gst_base_video_decoder_have_frame (GstBaseVideoDecoder * base_video_decoder,
    gboolean include_current_buf, GstVideoFrame ** new_frame)
{
  GstVideoFrame *frame = base_video_decoder->current_frame;
  GstBaseVideoDecoderClass *klass;

  guint64 frame_end_offset;
  GstClockTime timestamp, duration;
  GstClockTime running_time;
  GstClockTimeDiff deadline;
  GstFlowReturn ret;

  klass = GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);

  if (include_current_buf)
    frame_end_offset = base_video_decoder->current_buf_offset;
  else
    frame_end_offset = base_video_decoder->prev_buf_offset;

  gst_base_video_decoder_get_timestamp_at_offset (base_video_decoder,
      frame_end_offset, &timestamp, &duration);

  frame->presentation_timestamp = timestamp;
  frame->presentation_duration = duration;

  if (GST_VIDEO_FRAME_FLAG_IS_SET (frame, GST_VIDEO_FRAME_FLAG_SYNC_POINT))
    base_video_decoder->distance_from_sync = 0;

  frame->distance_from_sync = base_video_decoder->distance_from_sync;
  base_video_decoder->distance_from_sync++;

  GST_DEBUG ("pts %" GST_TIME_FORMAT,
      GST_TIME_ARGS (frame->presentation_timestamp));
  GST_DEBUG ("dts %" GST_TIME_FORMAT, GST_TIME_ARGS (frame->decode_timestamp));
  GST_DEBUG ("dist %d", frame->distance_from_sync);

  running_time = gst_segment_to_running_time (&base_video_decoder->segment,
      GST_FORMAT_TIME, frame->presentation_timestamp);

  if (GST_CLOCK_TIME_IS_VALID (base_video_decoder->earliest_time))
    deadline = GST_CLOCK_DIFF (base_video_decoder->earliest_time, running_time);
  else
    deadline = G_MAXINT64;

  /* do something with frame */
  ret = klass->handle_frame (base_video_decoder, frame, deadline);
  if (!GST_FLOW_IS_SUCCESS (ret)) {
    GST_DEBUG ("flow error!");
  }

  /* create new frame */
  base_video_decoder->current_frame =
      gst_base_video_decoder_new_frame (base_video_decoder);

  if (new_frame)
    *new_frame = base_video_decoder->current_frame;

  return ret;
}
Example #7
0
static void
update_timestamps (GstDvdLpcmDec * dvdlpcmdec, GstBuffer * buf, int samples)
{
  gboolean take_buf_ts = FALSE;

  GST_BUFFER_DURATION (buf) =
      gst_util_uint64_scale (samples, GST_SECOND, dvdlpcmdec->rate);

  if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {
    if (GST_CLOCK_TIME_IS_VALID (dvdlpcmdec->timestamp)) {
      GstClockTimeDiff one_sample = GST_SECOND / dvdlpcmdec->rate;
      GstClockTimeDiff diff = GST_CLOCK_DIFF (GST_BUFFER_TIMESTAMP (buf),
          dvdlpcmdec->timestamp);

      if (diff > one_sample || diff < -one_sample)
        take_buf_ts = TRUE;
    } else {
      take_buf_ts = TRUE;
    }
  } else if (!GST_CLOCK_TIME_IS_VALID (dvdlpcmdec->timestamp)) {
    dvdlpcmdec->timestamp = 0;
  }

  if (take_buf_ts) {
    /* Take buffer timestamp */
    dvdlpcmdec->timestamp = GST_BUFFER_TIMESTAMP (buf);
  } else {
    GST_BUFFER_TIMESTAMP (buf) = dvdlpcmdec->timestamp;
  }

  dvdlpcmdec->timestamp += GST_BUFFER_DURATION (buf);

  GST_LOG_OBJECT (dvdlpcmdec, "Updated timestamp to %" GST_TIME_FORMAT,
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)));
}
static GstPadProbeReturn
on_video_sink_data_flow (GstPad * pad, GstPadProbeInfo * info,
    gpointer user_data)
{
  GstMiniObject *mini_obj = GST_PAD_PROBE_INFO_DATA (info);
  GstFPSDisplaySink *self = GST_FPS_DISPLAY_SINK (user_data);

  if (GST_IS_BUFFER (mini_obj)) {
    GstClockTime ts;

    /* assume the frame is going to be rendered. If it isnt', we'll get a qos
     * message and reset ->frames_rendered from there.
     */
    g_atomic_int_inc (&self->frames_rendered);

    ts = gst_util_get_timestamp ();
    if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (self->start_ts))) {
      self->interval_ts = self->last_ts = self->start_ts = ts;
    }
    if (GST_CLOCK_DIFF (self->interval_ts, ts) > self->fps_update_interval) {
      display_current_fps (self);
      self->interval_ts = ts;
    }
  }

  return GST_PAD_PROBE_OK;
}
Example #9
0
EXPORT_C
#endif

void
_gst_debug_bin_to_dot_file_with_ts (GstBin * bin, GstDebugGraphDetails details,
    const gchar * file_name)
{
  gchar *ts_file_name = NULL;
  GstClockTime elapsed;

  g_return_if_fail (GST_IS_BIN (bin));

  if (!file_name) {
    file_name = g_get_application_name ();
    if (!file_name)
      file_name = "unnamed";
  }

  /* add timestamp */
  elapsed = GST_CLOCK_DIFF (_priv_gst_info_start_time,
      gst_util_get_timestamp ());
  ts_file_name =
      g_strdup_printf ("%" GST_TIME_FORMAT "-%s", GST_TIME_ARGS (elapsed),
      file_name);

  _gst_debug_bin_to_dot_file (bin, details, ts_file_name);
  g_free (ts_file_name);
}
static gboolean
gst_dshowvideosrc_push_buffer (guint8 * buffer, guint size, gpointer src_object,
    GstClockTime duration)
{
  GstDshowVideoSrc *src = GST_DSHOWVIDEOSRC (src_object);
  GstBuffer *buf = NULL;
  IPin *pPin = NULL;
  HRESULT hres = S_FALSE;
  AM_MEDIA_TYPE *pMediaType = NULL;

  if (!buffer || size == 0 || !src) {
    return FALSE;
  }

  /* create a new buffer assign to it the clock time as timestamp */
  buf = gst_buffer_new_and_alloc (size);

  GST_BUFFER_SIZE (buf) = size;

  GstClock *clock = gst_element_get_clock (GST_ELEMENT (src));
  GST_BUFFER_TIMESTAMP (buf) =
    GST_CLOCK_DIFF (gst_element_get_base_time (GST_ELEMENT (src)), gst_clock_get_time (clock));
  gst_object_unref (clock);

  GST_BUFFER_DURATION (buf) = duration;

  if (src->is_rgb) {
    /* FOR RGB directshow decoder will return bottom-up BITMAP
     * There is probably a way to get top-bottom video frames from
     * the decoder...
     */
    gint line = 0;
    gint stride = size / src->height;

    for (; line < src->height; line++) {
      memcpy (GST_BUFFER_DATA (buf) + (line * stride),
          buffer + (size - ((line + 1) * (stride))), stride);
    }
  } else {
    memcpy (GST_BUFFER_DATA (buf), buffer, size);
  }

  GST_DEBUG ("push_buffer => pts %" GST_TIME_FORMAT "duration %"
      GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
      GST_TIME_ARGS (duration));

  /* the negotiate() method already set caps on the source pad */
  gst_buffer_set_caps (buf, GST_PAD_CAPS (GST_BASE_SRC_PAD (src)));

  g_mutex_lock (src->buffer_mutex);
  if (src->buffer != NULL)
    gst_buffer_unref (src->buffer);
  src->buffer = buf;
  g_cond_signal (src->buffer_cond);
  g_mutex_unlock (src->buffer_mutex);

  return TRUE;
}
Example #11
0
static void
check_log_handler (const gchar * const log_domain,
    const GLogLevelFlags log_level, const gchar * const message,
    gpointer const user_data)
{
  gchar *msg, *level;
  GstClockTime elapsed;

  //-- check message contents
  if (__check_method && (strstr (message, __check_method) != NULL)
      && __check_test && (strstr (message, __check_test) != NULL))
    __check_error_trapped = TRUE;
  else if (__check_method && (strstr (message, __check_method) != NULL)
      && !__check_test)
    __check_error_trapped = TRUE;
  else if (__check_test && (strstr (message, __check_test) != NULL)
      && !__check_method)
    __check_error_trapped = TRUE;

  //-- format
  switch (log_level & G_LOG_LEVEL_MASK) {
    case G_LOG_LEVEL_ERROR:
      level = "ERROR";
      break;
    case G_LOG_LEVEL_CRITICAL:
      level = "CRITICAL";
      break;
    case G_LOG_LEVEL_WARNING:
      level = "WARNING";
      break;
    case G_LOG_LEVEL_MESSAGE:
      level = "MESSAGE";
      break;
    case G_LOG_LEVEL_INFO:
      level = "INFO";
      break;
    case G_LOG_LEVEL_DEBUG:
      level = "DEBUG";
      break;
    default:
      level = "???";
      break;
  }

  elapsed =
      GST_CLOCK_DIFF (_priv_bt_info_start_time, gst_util_get_timestamp ());

  msg = g_alloca (85 + strlen (level) + (log_domain ? strlen (log_domain) : 0)
      + (message ? strlen (message) : 0));
  g_sprintf (msg,
      "%" GST_TIME_FORMAT " %" PID_FMT " %" PTR_FMT " %-7s %20s ::: %s",
      GST_TIME_ARGS (elapsed), getpid (), g_thread_self (), level,
      (log_domain ? log_domain : ""), (message ? message : ""));
  check_print_handler (msg);
}
static void
gst_identity_check_imperfect_timestamp (GstIdentity * identity, GstBuffer * buf)
{
  GstClockTime timestamp = GST_BUFFER_TIMESTAMP (buf);

  /* invalid timestamp drops us out of check.  FIXME: maybe warn ? */
  if (timestamp != GST_CLOCK_TIME_NONE) {
    /* check if we had a previous buffer to compare to */
    if (identity->prev_timestamp != GST_CLOCK_TIME_NONE &&
        identity->prev_duration != GST_CLOCK_TIME_NONE) {
      GstClockTime t_expected;
      GstClockTimeDiff dt;

      t_expected = identity->prev_timestamp + identity->prev_duration;
      dt = GST_CLOCK_DIFF (t_expected, timestamp);
      if (dt != 0) {
        /*
         * "imperfect-timestamp" bus message:
         * @identity:        the identity instance
         * @delta:           the GST_CLOCK_DIFF to the prev timestamp
         * @prev-timestamp:  the previous buffer timestamp
         * @prev-duration:   the previous buffer duration
         * @prev-offset:     the previous buffer offset
         * @prev-offset-end: the previous buffer offset end
         * @cur-timestamp:   the current buffer timestamp
         * @cur-duration:    the current buffer duration
         * @cur-offset:      the current buffer offset
         * @cur-offset-end:  the current buffer offset end
         *
         * This bus message gets emitted if the check-imperfect-timestamp
         * property is set and there is a gap in time between the
         * last buffer and the newly received buffer.
         */
        gst_element_post_message (GST_ELEMENT (identity),
            gst_message_new_element (GST_OBJECT (identity),
                gst_structure_new ("imperfect-timestamp",
                    "delta", G_TYPE_INT64, dt,
                    "prev-timestamp", G_TYPE_UINT64,
                    identity->prev_timestamp, "prev-duration", G_TYPE_UINT64,
                    identity->prev_duration, "prev-offset", G_TYPE_UINT64,
                    identity->prev_offset, "prev-offset-end", G_TYPE_UINT64,
                    identity->prev_offset_end, "cur-timestamp", G_TYPE_UINT64,
                    timestamp, "cur-duration", G_TYPE_UINT64,
                    GST_BUFFER_DURATION (buf), "cur-offset", G_TYPE_UINT64,
                    GST_BUFFER_OFFSET (buf), "cur-offset-end", G_TYPE_UINT64,
                    GST_BUFFER_OFFSET_END (buf), NULL)));
      }
    } else {
      GST_DEBUG_OBJECT (identity, "can't check data-contiguity, no "
          "offset_end was set on previous buffer");
    }
  }
}
static void
gst_test_clock_add_entry (GstTestClock * test_clock,
    GstClockEntry * entry, GstClockTimeDiff * jitter)
{
  GstTestClockPrivate *priv = GST_TEST_CLOCK_GET_PRIVATE (test_clock);
  GstClockTime now;
  GstClockEntryContext *ctx;

  now = gst_clock_adjust_unlocked (GST_CLOCK (test_clock), priv->internal_time);

  if (jitter != NULL)
    *jitter = GST_CLOCK_DIFF (GST_CLOCK_ENTRY_TIME (entry), now);

  ctx = g_slice_new (GstClockEntryContext);
  ctx->clock_entry = GST_CLOCK_ENTRY (gst_clock_id_ref (entry));
  ctx->time_diff = GST_CLOCK_DIFF (now, GST_CLOCK_ENTRY_TIME (entry));

  priv->entry_contexts = g_list_insert_sorted (priv->entry_contexts, ctx,
      gst_clock_entry_context_compare_func);

  g_cond_broadcast (&priv->entry_added_cond);
}
Example #14
0
static void
check_gst_log_handler (GstDebugCategory * category,
    GstDebugLevel level, const gchar * file, const gchar * function, gint line,
    GObject * object, GstDebugMessage * _message, gpointer data)
{
  const gchar *message = gst_debug_message_get (_message);
  gchar *msg, *obj_str;
  const gchar *level_str, *cat_str;
  GstClockTime elapsed;

  //-- check message contents
  if (__check_method && (strstr (function, __check_method) != NULL)
      && __check_test && (strstr (message, __check_test) != NULL))
    __check_error_trapped = TRUE;
  else if (__check_method && (strstr (function, __check_method) != NULL)
      && !__check_test)
    __check_error_trapped = TRUE;
  else if (__check_test && (strstr (message, __check_test) != NULL)
      && !__check_method)
    __check_error_trapped = TRUE;

  if (level > gst_debug_category_get_threshold (category))
    return;

  elapsed =
      GST_CLOCK_DIFF (_priv_bt_info_start_time, gst_util_get_timestamp ());
  level_str = gst_debug_level_get_name (level);
  cat_str = gst_debug_category_get_name (category);
  if (object) {
    if (GST_IS_OBJECT (object)) {
      obj_str = g_strdup_printf ("<%s,%" G_OBJECT_REF_COUNT_FMT ">",
          GST_OBJECT_NAME (object), G_OBJECT_LOG_REF_COUNT (object));
    } else if (GST_IS_OBJECT (object)) {
      obj_str = g_strdup_printf ("<%s,%" G_OBJECT_REF_COUNT_FMT ">",
          G_OBJECT_TYPE_NAME (object), G_OBJECT_LOG_REF_COUNT (object));
    } else {
      obj_str = g_strdup_printf ("%p", object);
    }
  } else {
    obj_str = g_strdup ("");
  }

  msg = g_alloca (95 + strlen (cat_str) + strlen (level_str) + strlen (message)
      + strlen (file) + strlen (function) + strlen (obj_str));
  g_sprintf (msg,
      "%" GST_TIME_FORMAT " %" PID_FMT " %" PTR_FMT " %-7s %20s %s:%d:%s:%s %s",
      GST_TIME_ARGS (elapsed), getpid (), g_thread_self (),
      level_str, cat_str, file, line, function, obj_str, message);
  g_free (obj_str);
  check_print_handler (msg);
}
Example #15
0
static gboolean
on_video_sink_data_flow (GstPad * pad, GstMiniObject * mini_obj,
    gpointer user_data)
{
  GstFPSDisplaySink *self = GST_FPS_DISPLAY_SINK (user_data);

#if 0
  if (GST_IS_BUFFER (mini_obj)) {
    GstBuffer *buf = GST_BUFFER_CAST (mini_obj);

    if (GST_CLOCK_TIME_IS_VALID (self->next_ts)) {
      if (GST_BUFFER_TIMESTAMP (buf) <= self->next_ts) {
        self->frames_rendered++;
      } else {
        GST_WARNING_OBJECT (self, "dropping frame : ts %" GST_TIME_FORMAT
            " < expected_ts %" GST_TIME_FORMAT,
            GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
            GST_TIME_ARGS (self->next_ts));
        self->frames_dropped++;
      }
    } else {
      self->frames_rendered++;
    }
  } else
#endif
  if (GST_IS_EVENT (mini_obj)) {
    GstEvent *ev = GST_EVENT_CAST (mini_obj);

    if (GST_EVENT_TYPE (ev) == GST_EVENT_QOS) {
      GstClockTimeDiff diff;
      GstClockTime ts;

      gst_event_parse_qos (ev, NULL, &diff, &ts);
      if (diff <= 0.0) {
        g_atomic_int_inc (&self->frames_rendered);
      } else {
        g_atomic_int_inc (&self->frames_dropped);
      }

      ts = gst_util_get_timestamp ();
      if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (self->start_ts))) {
        self->interval_ts = self->last_ts = self->start_ts = ts;
      }
      if (GST_CLOCK_DIFF (self->interval_ts, ts) > self->fps_update_interval) {
        display_current_fps (self);
        self->interval_ts = ts;
      }
    }
  }
  return TRUE;
}
Example #16
0
/**
 * @brief Handle buffer data.
 * @return None
 * @param self pointer to GstTensorSink2
 * @param buffer pointer to GstBuffer to be handled
 */
static void
gst_tensor_sink2_render_buffer (GstTensorSink2 * self, GstBuffer * buffer)
{
  GstClockTime now = GST_CLOCK_TIME_NONE;
  guint signal_rate;
  gboolean notify = FALSE;

  g_return_if_fail (GST_IS_TENSOR_SINK2 (self));

  signal_rate = gst_tensor_sink2_get_signal_rate (self);

  if (signal_rate) {
    GstClock *clock;
    GstClockTime render_time;
    GstClockTime last_render_time;

    clock = gst_element_get_clock (GST_ELEMENT (self));

    if (clock) {
      now = gst_clock_get_time (clock);
      last_render_time = gst_tensor_sink2_get_last_render_time (self);

      /** time for next signal */
      render_time = (1000 / signal_rate) * GST_MSECOND + last_render_time;

      if (!GST_CLOCK_TIME_IS_VALID (last_render_time) ||
          GST_CLOCK_DIFF (now, render_time) <= 0) {
        /** send data after render time, or firstly received buffer */
        notify = TRUE;
      }

      gst_object_unref (clock);
    }
  } else {
    /** send data if signal rate is 0 */
    notify = TRUE;
  }

  if (notify) {
    gst_tensor_sink2_set_last_render_time (self, now);

    if (gst_tensor_sink2_get_emit_signal (self)) {
      silent_debug ("Emit signal for new data [%" GST_TIME_FORMAT "] rate [%d]",
          GST_TIME_ARGS (now), signal_rate);

      g_signal_emit (self, _tensor_sink2_signals[SIGNAL_NEW_DATA], 0, buffer);
    }
  }
}
static inline gdouble
_interpolate_trigger (GstTimedValueControlSource * self, GSequenceIter * iter,
    GstClockTime timestamp)
{
  GstControlPoint *cp;
  gint64 tolerance = ((GstTriggerControlSource *) self)->priv->tolerance;
  gboolean found = FALSE;

  cp = g_sequence_get (iter);
  if (GST_CLOCK_DIFF (cp->timestamp, timestamp) <= tolerance) {
    found = TRUE;
  } else {
    if ((iter = g_sequence_iter_next (iter)) && !g_sequence_iter_is_end (iter)) {
      cp = g_sequence_get (iter);
      if (GST_CLOCK_DIFF (timestamp, cp->timestamp) <= tolerance) {
        found = TRUE;
      }
    }
  }
  if (found) {
    return cp->value;
  }
  return NAN;
}
Example #18
0
static GstFlowReturn
gst_base_video_decoder_have_frame_2 (GstBaseVideoDecoder * base_video_decoder)
{
  GstVideoFrame *frame = base_video_decoder->current_frame;
  GstBaseVideoDecoderClass *base_video_decoder_class;
  GstFlowReturn ret = GST_FLOW_OK;
  GstClockTime running_time;
  GstClockTimeDiff deadline;

  base_video_decoder_class =
      GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);

  frame->distance_from_sync = base_video_decoder->distance_from_sync;
  base_video_decoder->distance_from_sync++;

  frame->presentation_timestamp = GST_BUFFER_TIMESTAMP (frame->sink_buffer);
  frame->presentation_duration = GST_BUFFER_DURATION (frame->sink_buffer);

  GST_DEBUG ("pts %" GST_TIME_FORMAT,
      GST_TIME_ARGS (frame->presentation_timestamp));
  GST_DEBUG ("dts %" GST_TIME_FORMAT, GST_TIME_ARGS (frame->decode_timestamp));
  GST_DEBUG ("dist %d", frame->distance_from_sync);

  base_video_decoder->frames = g_list_append (base_video_decoder->frames,
      frame);

  running_time = gst_segment_to_running_time (&base_video_decoder->segment,
      GST_FORMAT_TIME, frame->presentation_timestamp);

  if (GST_CLOCK_TIME_IS_VALID (base_video_decoder->earliest_time))
    deadline = GST_CLOCK_DIFF (base_video_decoder->earliest_time, running_time);
  else
    deadline = G_MAXINT64;

  /* do something with frame */
  ret = base_video_decoder_class->handle_frame (base_video_decoder, frame,
      deadline);
  if (!GST_FLOW_IS_SUCCESS (ret)) {
    GST_DEBUG ("flow error!");
  }

  /* create new frame */
  base_video_decoder->current_frame =
      gst_base_video_decoder_new_frame (base_video_decoder);

  return ret;
}
Example #19
0
static gboolean
gst_shm_sink_can_render (GstShmSink * self, GstClockTime time)
{
  ShmBuffer *b;

  if (time == GST_CLOCK_TIME_NONE || self->buffer_time == GST_CLOCK_TIME_NONE)
    return TRUE;

  b = sp_writer_get_pending_buffers (self->pipe);
  for (; b != NULL; b = sp_writer_get_next_buffer (b)) {
    GstBuffer *buf = sp_writer_buf_get_tag (b);
    if (GST_CLOCK_DIFF (time, GST_BUFFER_PTS (buf)) > self->buffer_time)
      return FALSE;
  }

  return TRUE;
}
Example #20
0
static void source_check (Gstreamill *gstreamill, Job *job)
{
        gint i;
        GstClockTimeDiff time_diff;
        GstClockTime now;

        /* log source timestamp. */
        for (i = 0; i < job->output->source.stream_count; i++) {
                GST_INFO ("%s.source.%s timestamp %" GST_TIME_FORMAT,
                                job->name,
                                job->output->source.streams[i].name,
                                GST_TIME_ARGS (job->output->source.streams[i].current_timestamp));
        }

        /* non live job, don't check heartbeat */
        if (!job->is_live) {
                return;
        }

        /* source heartbeat check */
        for (i = 0; i < job->output->source.stream_count; i++) {
                /* check video and audio */
                if (!g_str_has_prefix (job->output->source.streams[i].name, "video") &&
                    !g_str_has_prefix (job->output->source.streams[i].name, "audio")) {
                        continue;
                }

                now = gst_clock_get_time (gstreamill->system_clock);
                time_diff = GST_CLOCK_DIFF (job->output->source.streams[i].last_heartbeat, now);
                if ((time_diff > HEARTBEAT_THRESHHOLD) && gstreamill->daemon) {
                        GST_ERROR ("%s.source.%s heart beat error %lu, restart job.",
                                        job->name,
                                        job->output->source.streams[i].name,
                                        time_diff);
                        /* restart job. */
                        stop_job (job, SIGKILL);
                        return;

                } else {
                        GST_INFO ("%s.source.%s heartbeat %" GST_TIME_FORMAT,
                                        job->name,
                                        job->output->source.streams[i].name,
                                        GST_TIME_ARGS (job->output->source.streams[i].last_heartbeat));
                }
        }
}
static void
log_latency (const GstStructure * data, GstPad * sink_pad, guint64 sink_ts)
{
  GstPad *src_pad;
  guint64 src_ts;
  gchar *src, *sink;

  gst_structure_id_get (data,
      latency_probe_pad, GST_TYPE_PAD, &src_pad,
      latency_probe_ts, G_TYPE_UINT64, &src_ts, NULL);

  src = g_strdup_printf ("%s_%s", GST_DEBUG_PAD_NAME (src_pad));
  sink = g_strdup_printf ("%s_%s", GST_DEBUG_PAD_NAME (sink_pad));

  gst_tracer_record_log (tr_latency, src, sink,
      GST_CLOCK_DIFF (src_ts, sink_ts));
  g_free (src);
  g_free (sink);
}
Example #22
0
static void
calculate_latency (GstPad * pad, GstBuffer * buffer)
{
  KmsBufferLatencyMeta *meta;
  GstClockTime now;

  meta = kms_buffer_get_buffer_latency_meta (buffer);

  fail_if (meta == NULL);
  fail_if (!GST_CLOCK_TIME_IS_VALID (meta->ts));

  now = g_get_monotonic_time () * GST_USECOND;

  GST_INFO_OBJECT (pad, "got meta %" GST_TIME_FORMAT " at %" GST_TIME_FORMAT
      " (diff: %" GST_STIME_FORMAT ")",
      GST_TIME_ARGS (GST_TIME_AS_USECONDS (meta->ts)),
      GST_TIME_ARGS (GST_TIME_AS_USECONDS (now)),
      GST_STIME_ARGS (GST_CLOCK_DIFF (meta->ts, now)));
}
gint
main (gint argc, gchar ** argv)
{
  GOptionContext *opt_ctx;
  GstClock *clock;
  GError *err = NULL;

  opt_ctx = g_option_context_new ("- GStreamer PTP clock test app");
  g_option_context_add_main_entries (opt_ctx, opt_entries, NULL);
  g_option_context_add_group (opt_ctx, gst_init_get_option_group ());
  if (!g_option_context_parse (opt_ctx, &argc, &argv, &err))
    g_error ("Error parsing options: %s", err->message);
  g_clear_error (&err);
  g_option_context_free (opt_ctx);

  if (!gst_ptp_init (GST_PTP_CLOCK_ID_NONE, NULL))
    g_error ("failed to init ptp");

  if (stats)
    gst_ptp_statistics_callback_add (stats_cb, NULL, NULL);

  clock = gst_ptp_clock_new ("test-clock", domain);

  gst_clock_wait_for_sync (GST_CLOCK (clock), GST_CLOCK_TIME_NONE);

  while (TRUE) {
    GstClockTime local, remote;
    GstClockTimeDiff diff;

    local = g_get_real_time () * 1000;
    remote = gst_clock_get_time (clock);
    diff = GST_CLOCK_DIFF (local, remote);

    g_print ("local: %" GST_TIME_FORMAT " ptp: %" GST_TIME_FORMAT " diff: %s%"
        GST_TIME_FORMAT "\n", GST_TIME_ARGS (local), GST_TIME_ARGS (remote),
        (diff < 0 ? "-" : " "), GST_TIME_ARGS (ABS (diff)));
    g_usleep (100000);
  }

  return 0;
}
Example #24
0
/**
 * @brief Push GstBuffer
 */
static void
gst_tensor_reposink_render_buffer (GstTensorRepoSink * self, GstBuffer * buffer)
{
  GstClockTime now = GST_CLOCK_TIME_NONE;
  guint signal_rate;
  gboolean notify = FALSE;
  g_return_if_fail (GST_IS_TENSOR_REPOSINK (self));

  signal_rate = self->signal_rate;

  if (signal_rate) {
    GstClock *clock;
    GstClockTime render_time;

    clock = gst_element_get_clock (GST_ELEMENT (self));

    if (clock) {
      now = gst_clock_get_time (clock);
      render_time = (1000 / signal_rate) * GST_MSECOND + self->last_render_time;

      if (!GST_CLOCK_TIME_IS_VALID (self->last_render_time) ||
          GST_CLOCK_DIFF (now, render_time) <= 0) {
        notify = TRUE;
      }

      gst_object_unref (clock);
    }
  } else {
    notify = TRUE;
  }

  if (notify) {
    self->last_render_time = now;

    if (!gst_tensor_repo_set_buffer (self->myid, self->o_myid, buffer,
            self->in_caps)) {
      GST_ELEMENT_ERROR (self, RESOURCE, WRITE,
          ("Cannot Set buffer into repo [key: %d]", self->myid), NULL);
    }
  }
}
Example #25
0
static gboolean
buffer_for_each_meta_cb (GstBuffer * buffer, GstMeta ** meta, ProbeData * pdata)
{
  BufferLatencyCallback func = (BufferLatencyCallback) pdata->cb;
  GstPad *pad = GST_PAD (pdata->invoke_data);
  KmsBufferLatencyMeta *blmeta;
  GstClockTimeDiff diff;
  GstClockTime now;

  if ((*meta)->info->api != KMS_BUFFER_LATENCY_META_API_TYPE) {
    /* continue iterating */
    return TRUE;
  }

  blmeta = (KmsBufferLatencyMeta *) * meta;

  if (!blmeta->valid) {
    /* Ignore this meta */
    return TRUE;
  }

  if (func == NULL) {
    return TRUE;
  }

  now = kms_utils_get_time_nsecs ();
  diff = GST_CLOCK_DIFF (blmeta->ts, now);

  if (pdata->locked) {
    KMS_BUFFER_LATENCY_DATA_LOCK (blmeta);
  }

  func (pad, blmeta->type, diff, blmeta->data, pdata->user_data);

  if (pdata->locked) {
    KMS_BUFFER_LATENCY_DATA_UNLOCK (blmeta);
  }

  return TRUE;
}
Example #26
0
static void sync_check (Gstreamill *gstreamill, Job *job)
{
        gint j;
        GstClockTimeDiff time_diff;
        GstClockTime min, max;

        min = GST_CLOCK_TIME_NONE;
        max = 0;
        for (j = 0; j < job->output->source.stream_count; j++) {
                if (!g_str_has_prefix (job->output->source.streams[j].name, "video") &&
                    !g_str_has_prefix (job->output->source.streams[j].name, "audio")) {
                        continue;
                }

                if (min > job->output->source.streams[j].current_timestamp) {
                        min = job->output->source.streams[j].current_timestamp;
                }

                if (max < job->output->source.streams[j].current_timestamp) {
                        max = job->output->source.streams[j].current_timestamp;
                }
        }
        time_diff = GST_CLOCK_DIFF (min, max);
        if ((time_diff > SYNC_THRESHHOLD) && gstreamill->daemon){
                GST_ERROR ("%s sync error %lu", job->name, time_diff);
                job->output->source.sync_error_times += 1;
                if (job->output->source.sync_error_times == 3) {
                        GST_ERROR ("sync error times %ld, restart %s", job->output->source.sync_error_times, job->name);
                        /* restart job. */
                        stop_job (job, SIGKILL);
                        return;
                }

        } else {
                job->output->source.sync_error_times = 0;
        }
}
static GstFlowReturn
gst_audio_fx_base_fir_filter_transform (GstBaseTransform * base,
    GstBuffer * inbuf, GstBuffer * outbuf)
{
  GstAudioFXBaseFIRFilter *self = GST_AUDIO_FX_BASE_FIR_FILTER (base);
  GstClockTime timestamp, expected_timestamp;
  gint channels = GST_AUDIO_FILTER_CHANNELS (self);
  gint rate = GST_AUDIO_FILTER_RATE (self);
  gint bps = GST_AUDIO_FILTER_BPS (self);
  GstMapInfo inmap, outmap;
  guint input_samples;
  guint output_samples;
  guint generated_samples;
  guint64 output_offset;
  gint64 diff = 0;
  GstClockTime stream_time;

  timestamp = GST_BUFFER_TIMESTAMP (outbuf);

  if (!GST_CLOCK_TIME_IS_VALID (timestamp)
      && !GST_CLOCK_TIME_IS_VALID (self->start_ts)) {
    GST_ERROR_OBJECT (self, "Invalid timestamp");
    return GST_FLOW_ERROR;
  }

  g_mutex_lock (&self->lock);
  stream_time =
      gst_segment_to_stream_time (&base->segment, GST_FORMAT_TIME, timestamp);

  GST_DEBUG_OBJECT (self, "sync to %" GST_TIME_FORMAT,
      GST_TIME_ARGS (timestamp));

  if (GST_CLOCK_TIME_IS_VALID (stream_time))
    gst_object_sync_values (GST_OBJECT (self), stream_time);

  g_return_val_if_fail (self->kernel != NULL, GST_FLOW_ERROR);
  g_return_val_if_fail (channels != 0, GST_FLOW_ERROR);

  if (GST_CLOCK_TIME_IS_VALID (self->start_ts))
    expected_timestamp =
        self->start_ts + gst_util_uint64_scale_int (self->nsamples_in,
        GST_SECOND, rate);
  else
    expected_timestamp = GST_CLOCK_TIME_NONE;

  /* Reset the residue if already existing on discont buffers */
  if (GST_BUFFER_IS_DISCONT (inbuf)
      || (GST_CLOCK_TIME_IS_VALID (expected_timestamp)
          && (ABS (GST_CLOCK_DIFF (timestamp,
                      expected_timestamp) > 5 * GST_MSECOND)))) {
    GST_DEBUG_OBJECT (self, "Discontinuity detected - flushing");
    if (GST_CLOCK_TIME_IS_VALID (expected_timestamp))
      gst_audio_fx_base_fir_filter_push_residue (self);
    self->buffer_fill = 0;
    g_free (self->buffer);
    self->buffer = NULL;
    self->start_ts = timestamp;
    self->start_off = GST_BUFFER_OFFSET (inbuf);
    self->nsamples_out = 0;
    self->nsamples_in = 0;
  } else if (!GST_CLOCK_TIME_IS_VALID (self->start_ts)) {
    self->start_ts = timestamp;
    self->start_off = GST_BUFFER_OFFSET (inbuf);
  }

  gst_buffer_map (inbuf, &inmap, GST_MAP_READ);
  gst_buffer_map (outbuf, &outmap, GST_MAP_WRITE);

  input_samples = (inmap.size / bps) / channels;
  output_samples = (outmap.size / bps) / channels;

  self->nsamples_in += input_samples;

  generated_samples =
      self->process (self, inmap.data, outmap.data, input_samples);

  gst_buffer_unmap (inbuf, &inmap);
  gst_buffer_unmap (outbuf, &outmap);

  g_assert (generated_samples <= output_samples);
  self->nsamples_out += generated_samples;
  if (generated_samples == 0)
    goto no_samples;

  /* Calculate the number of samples we can push out now without outputting
   * latency zeros in the beginning */
  diff = ((gint64) self->nsamples_out) - ((gint64) self->latency);
  if (diff < 0)
    goto no_samples;

  if (diff < generated_samples) {
    gint64 tmp = diff;
    diff = generated_samples - diff;
    generated_samples = tmp;
  } else {
    diff = 0;
  }

  gst_buffer_resize (outbuf, diff * bps * channels,
      generated_samples * bps * channels);

  output_offset = self->nsamples_out - self->latency - generated_samples;
  GST_BUFFER_TIMESTAMP (outbuf) =
      self->start_ts + gst_util_uint64_scale_int (output_offset, GST_SECOND,
      rate);
  GST_BUFFER_DURATION (outbuf) =
      gst_util_uint64_scale_int (output_samples, GST_SECOND, rate);
  if (self->start_off != GST_BUFFER_OFFSET_NONE) {
    GST_BUFFER_OFFSET (outbuf) = self->start_off + output_offset;
    GST_BUFFER_OFFSET_END (outbuf) =
        GST_BUFFER_OFFSET (outbuf) + generated_samples;
  } else {
    GST_BUFFER_OFFSET (outbuf) = GST_BUFFER_OFFSET_NONE;
    GST_BUFFER_OFFSET_END (outbuf) = GST_BUFFER_OFFSET_NONE;
  }
  g_mutex_unlock (&self->lock);

  GST_DEBUG_OBJECT (self,
      "Pushing buffer of size %" G_GSIZE_FORMAT " with timestamp: %"
      GST_TIME_FORMAT ", duration: %" GST_TIME_FORMAT ", offset: %"
      G_GUINT64_FORMAT ", offset_end: %" G_GUINT64_FORMAT ", nsamples_out: %d",
      gst_buffer_get_size (outbuf),
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
      GST_TIME_ARGS (GST_BUFFER_DURATION (outbuf)), GST_BUFFER_OFFSET (outbuf),
      GST_BUFFER_OFFSET_END (outbuf), generated_samples);

  return GST_FLOW_OK;

no_samples:
  {
    g_mutex_unlock (&self->lock);
    return GST_BASE_TRANSFORM_FLOW_DROPPED;
  }
}
Example #28
0
bool mmsGstPlay(GstElement *pipelineX) {

	pipeline = pipelineX;


	GstState state, pending;


    caught_error = event_loop (pipeline, FALSE, GST_STATE_PLAYING);

    if (caught_error) {
      fprintf (stderr, "ERROR: pipeline doesn't want to preroll.\n");
    } else {
      GstClockTime tfthen, tfnow;
      GstClockTimeDiff diff;

      fprintf (stderr, "Setting pipeline to PLAYING ...\n");
      if (gst_element_set_state (pipeline,
              GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) {
        GstMessage *err_msg;
        GstBus *bus;

        fprintf (stderr, "ERROR: pipeline doesn't want to play.\n");
        bus = gst_element_get_bus (pipeline);
        if ((err_msg = gst_bus_poll (bus, GST_MESSAGE_ERROR, 0))) {
          GError *gerror;
          gchar *debug;

          gst_message_parse_error (err_msg, &gerror, &debug);
          gst_object_default_error (GST_MESSAGE_SRC (err_msg), gerror, debug);
          gst_message_unref (err_msg);
          g_error_free (gerror);
          g_free (debug);
        }
        gst_object_unref (bus);

        mmsGstFree();

        return false;
      }

      tfthen = gst_util_get_timestamp ();
      caught_error = event_loop (pipeline, TRUE, GST_STATE_PLAYING);
      tfnow = gst_util_get_timestamp ();

      diff = GST_CLOCK_DIFF (tfthen, tfnow);

      g_print ("Execution ended after %" G_GUINT64_FORMAT " ns.\n", diff);
    }

    /* iterate mainloop to process pending stuff */
    while (g_main_context_iteration (NULL, FALSE));

    fprintf (stderr, "Setting pipeline to PAUSED ...\n");
    gst_element_set_state (pipeline, GST_STATE_PAUSED);
    if (!caught_error)
      gst_element_get_state (pipeline, &state, &pending, GST_CLOCK_TIME_NONE);
    fprintf (stderr, "Setting pipeline to READY ...\n");
    gst_element_set_state (pipeline, GST_STATE_READY);
    gst_element_get_state (pipeline, &state, &pending, GST_CLOCK_TIME_NONE);

    // playback is finished
    return true;
}
Example #29
0
static void encoders_check (Gstreamill *gstreamill, Job *job)
{
        gint j, k;
        GstClockTimeDiff time_diff;
        GstClockTime now;

        /* log encoder current timestamp. */
        for (j = 0; j < job->output->encoder_count; j++) {
                for (k = 0; k < job->output->encoders[j].stream_count; k++) {
                        GST_INFO ("%s.%s timestamp %" GST_TIME_FORMAT,
                                        job->output->encoders[j].name,
                                        job->output->encoders[j].streams[k].name,
                                        GST_TIME_ARGS (job->output->encoders[j].streams[k].current_timestamp));
                }
        }

        /* non live job, don't check heartbeat */
        if (!job->is_live) {
                return;
        }

        /* encoder heartbeat check */
        for (j = 0; j < job->output->encoder_count; j++) {
                for (k = 0; k < job->output->encoders[j].stream_count; k++) {
                        if (!g_str_has_prefix (job->output->encoders[j].streams[k].name, "video") &&
                            !g_str_has_prefix (job->output->encoders[j].streams[k].name, "audio")) {
                                continue;
                        }

                        now = gst_clock_get_time (gstreamill->system_clock);
                        time_diff = GST_CLOCK_DIFF (job->output->encoders[j].streams[k].last_heartbeat, now);
                        if ((time_diff > HEARTBEAT_THRESHHOLD) && gstreamill->daemon) {
                                GST_ERROR ("%s.%s heartbeat error %lu, restart",
                                                job->output->encoders[j].name,
                                                job->output->encoders[j].streams[k].name,
                                                time_diff);
                                /* restart job. */
                                stop_job (job, SIGKILL);
                                return;

                        } else {
                                GST_INFO ("%s.%s heartbeat %" GST_TIME_FORMAT,
                                                job->output->encoders[j].name,
                                                job->output->encoders[j].streams[k].name,
                                                GST_TIME_ARGS (job->output->encoders[j].streams[k].last_heartbeat));
                        }
                }
        }

        /* encoder job->output heartbeat check. */
        for (j = 0; j < job->output->encoder_count; j++) {
                now = gst_clock_get_time (gstreamill->system_clock);
                time_diff = GST_CLOCK_DIFF (*(job->output->encoders[j].heartbeat), now);
                if ((time_diff > ENCODER_OUTPUT_HEARTBEAT_THRESHHOLD) && gstreamill->daemon) {
                        GST_ERROR ("%s job->output heart beat error %lu, restart",
                                        job->output->encoders[j].name,
                                        time_diff);
                        /* restart job. */
                        stop_job (job, SIGKILL);
                        return;

                } else {
                        GST_INFO ("%s job->output heartbeat %" GST_TIME_FORMAT,
                                        job->output->encoders[j].name,
                                        GST_TIME_ARGS (*(job->output->encoders[j].heartbeat)));
                }
        }
}
Example #30
0
static gboolean
gst_ks_video_src_timestamp_buffer (GstKsVideoSrc * self, GstBuffer * buf,
    GstClockTime presentation_time)
{
  GstKsVideoSrcPrivate *priv = GST_KS_VIDEO_SRC_GET_PRIVATE (self);
  GstClockTime duration;
  GstClock *clock;
  GstClockTime timestamp;

  duration = gst_ks_video_device_get_duration (priv->device);

  GST_OBJECT_LOCK (self);
  clock = GST_ELEMENT_CLOCK (self);
  if (clock != NULL) {
    gst_object_ref (clock);
    timestamp = GST_ELEMENT (self)->base_time;

    if (GST_CLOCK_TIME_IS_VALID (presentation_time)) {
      if (presentation_time > GST_ELEMENT (self)->base_time)
        presentation_time -= GST_ELEMENT (self)->base_time;
      else
        presentation_time = 0;
    }
  } else {
    timestamp = GST_CLOCK_TIME_NONE;
  }
  GST_OBJECT_UNLOCK (self);

  if (clock != NULL) {

    /* The time according to the current clock */
    timestamp = gst_clock_get_time (clock) - timestamp;
    if (timestamp > duration)
      timestamp -= duration;
    else
      timestamp = 0;

    if (GST_CLOCK_TIME_IS_VALID (presentation_time)) {
      /*
       * We don't use this for anything yet, need to ponder how to deal
       * with pins that use an internal clock and timestamp from 0.
       */
      GstClockTimeDiff diff = GST_CLOCK_DIFF (presentation_time, timestamp);
      GST_DEBUG_OBJECT (self, "diff between gst and driver timestamp: %"
          G_GINT64_FORMAT, diff);
    }

    gst_object_unref (clock);
    clock = NULL;

    /* Unless it's the first frame, align the current timestamp on a multiple
     * of duration since the previous */
    if (GST_CLOCK_TIME_IS_VALID (priv->prev_ts)) {
      GstClockTime delta;
      guint delta_remainder, delta_offset;

      /* REVISIT: I've seen this happen with the GstSystemClock on Windows,
       *          scary... */
      if (timestamp < priv->prev_ts) {
        GST_INFO_OBJECT (self, "clock is ticking backwards");
        return FALSE;
      }

      /* Round to a duration boundary */
      delta = timestamp - priv->prev_ts;
      delta_remainder = delta % duration;

      if (delta_remainder < duration / 3)
        timestamp -= delta_remainder;
      else
        timestamp += duration - delta_remainder;

      /* How many frames are we off then? */
      delta = timestamp - priv->prev_ts;
      delta_offset = delta / duration;

      if (delta_offset == 1)    /* perfect */
        GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DISCONT);
      else if (delta_offset > 1) {
        guint lost = delta_offset - 1;
        GST_INFO_OBJECT (self, "lost %d frame%s, setting discont flag",
            lost, (lost > 1) ? "s" : "");
        GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);
      } else if (delta_offset == 0) {   /* overproduction, skip this frame */
        GST_INFO_OBJECT (self, "skipping frame");
        return FALSE;
      }

      priv->offset += delta_offset;
    }

    priv->prev_ts = timestamp;
  }

  GST_BUFFER_OFFSET (buf) = priv->offset;
  GST_BUFFER_OFFSET_END (buf) = GST_BUFFER_OFFSET (buf) + 1;
  GST_BUFFER_TIMESTAMP (buf) = timestamp;
  GST_BUFFER_DURATION (buf) = duration;

  return TRUE;
}