示例#1
0
static void
gst_video_rate_get_property (GObject * object,
    guint prop_id, GValue * value, GParamSpec * pspec)
{
  GstVideoRate *videorate = GST_VIDEO_RATE (object);

  switch (prop_id) {
    case ARG_IN:
      g_value_set_uint64 (value, videorate->in);
      break;
    case ARG_OUT:
      g_value_set_uint64 (value, videorate->out);
      break;
    case ARG_DUP:
      g_value_set_uint64 (value, videorate->dup);
      break;
    case ARG_DROP:
      g_value_set_uint64 (value, videorate->drop);
      break;
    case ARG_SILENT:
      g_value_set_boolean (value, videorate->silent);
      break;
    case ARG_NEW_PREF:
      g_value_set_double (value, videorate->new_pref);
      break;
    case ARG_SKIP_TO_FIRST:
      g_value_set_boolean (value, videorate->skip_to_first);
      break;
    default:
      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
      break;
  }
}
示例#2
0
static GstStateChangeReturn
gst_video_rate_change_state (GstElement * element, GstStateChange transition)
{
  GstStateChangeReturn ret;
  GstVideoRate *videorate;

  videorate = GST_VIDEO_RATE (element);

  switch (transition) {
    case GST_STATE_CHANGE_READY_TO_PAUSED:
      videorate->discont = TRUE;
      videorate->last_ts = -1;
      break;
    default:
      break;
  }

  ret = parent_class->change_state (element, transition);

  switch (transition) {
    case GST_STATE_CHANGE_PAUSED_TO_READY:
      gst_video_rate_reset (videorate);
      break;
    default:
      break;
  }

  return ret;
}
示例#3
0
static GstCaps *
gst_video_rate_getcaps (GstPad * pad)
{
  GstVideoRate *videorate;
  GstPad *otherpad;
  GstCaps *caps;

  videorate = GST_VIDEO_RATE (GST_PAD_PARENT (pad));

  otherpad = (pad == videorate->srcpad) ? videorate->sinkpad :
      videorate->srcpad;

  /* we can do what the peer can */
  caps = gst_pad_peer_get_caps (otherpad);
  if (caps) {
    GstCaps *transform;

    gst_video_rate_transformcaps (otherpad, caps, pad, &transform);
    gst_caps_unref (caps);
    caps = transform;
  } else {
    /* no peer, our padtemplate is enough then */
    caps = gst_caps_copy (gst_pad_get_pad_template_caps (pad));
  }

  return caps;
}
static void
gst_video_rate_set_property (GObject * object,
    guint prop_id, const GValue * value, GParamSpec * pspec)
{
  GstVideoRate *videorate = GST_VIDEO_RATE (object);

  GST_OBJECT_LOCK (videorate);
  switch (prop_id) {
    case PROP_SILENT:
      videorate->silent = g_value_get_boolean (value);
      GST_OBJECT_UNLOCK (videorate);
      break;
    case PROP_NEW_PREF:
      videorate->new_pref = g_value_get_double (value);
      GST_OBJECT_UNLOCK (videorate);
      break;
    case PROP_SKIP_TO_FIRST:
      videorate->skip_to_first = g_value_get_boolean (value);
      GST_OBJECT_UNLOCK (videorate);
      break;
    case PROP_DROP_ONLY:
      videorate->drop_only = g_value_get_boolean (value);
      goto reconfigure;
      break;
    case PROP_AVERAGE_PERIOD:
      videorate->average_period_set = g_value_get_uint64 (value);
      GST_OBJECT_UNLOCK (videorate);
      break;
    case PROP_MAX_RATE:
      g_atomic_int_set (&videorate->max_rate, g_value_get_int (value));
      goto reconfigure;
      break;
    case PROP_RATE:
      videorate->rate = g_value_get_double (value);
      GST_OBJECT_UNLOCK (videorate);
      gst_videorate_update_duration (videorate);
      break;
    default:
      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
      GST_OBJECT_UNLOCK (videorate);
      break;
  }
  return;

reconfigure:
  GST_OBJECT_UNLOCK (videorate);
  gst_base_transform_reconfigure_src (GST_BASE_TRANSFORM (videorate));
}
示例#5
0
static void
gst_video_rate_set_property (GObject * object,
    guint prop_id, const GValue * value, GParamSpec * pspec)
{
  GstVideoRate *videorate = GST_VIDEO_RATE (object);
  gboolean latency_changed = FALSE;

  GST_OBJECT_LOCK (videorate);
  switch (prop_id) {
    case PROP_SILENT:
      videorate->silent = g_value_get_boolean (value);
      break;
    case PROP_NEW_PREF:
      videorate->new_pref = g_value_get_double (value);
      break;
    case PROP_SKIP_TO_FIRST:
      videorate->skip_to_first = g_value_get_boolean (value);
      break;
    case PROP_DROP_ONLY:{
      gboolean new_value = g_value_get_boolean (value);

      /* Latency changes if we switch drop-only mode */
      latency_changed = new_value != videorate->drop_only;
      videorate->drop_only = g_value_get_boolean (value);
      goto reconfigure;
    }
    case PROP_AVERAGE_PERIOD:
      videorate->average_period_set = g_value_get_uint64 (value);
      break;
    case PROP_MAX_RATE:
      g_atomic_int_set (&videorate->max_rate, g_value_get_int (value));
      goto reconfigure;
    default:
      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
      break;
  }
  GST_OBJECT_UNLOCK (videorate);
  return;

reconfigure:
  GST_OBJECT_UNLOCK (videorate);
  gst_base_transform_reconfigure_src (GST_BASE_TRANSFORM (videorate));

  if (latency_changed) {
    gst_element_post_message (GST_ELEMENT (videorate),
        gst_message_new_latency (GST_OBJECT (videorate)));
  }
}
static void
gst_video_rate_get_property (GObject * object,
    guint prop_id, GValue * value, GParamSpec * pspec)
{
  GstVideoRate *videorate = GST_VIDEO_RATE (object);

  GST_OBJECT_LOCK (videorate);
  switch (prop_id) {
    case PROP_IN:
      g_value_set_uint64 (value, videorate->in);
      break;
    case PROP_OUT:
      g_value_set_uint64 (value, videorate->out);
      break;
    case PROP_DUP:
      g_value_set_uint64 (value, videorate->dup);
      break;
    case PROP_DROP:
      g_value_set_uint64 (value, videorate->drop);
      break;
    case PROP_SILENT:
      g_value_set_boolean (value, videorate->silent);
      break;
    case PROP_NEW_PREF:
      g_value_set_double (value, videorate->new_pref);
      break;
    case PROP_SKIP_TO_FIRST:
      g_value_set_boolean (value, videorate->skip_to_first);
      break;
    case PROP_DROP_ONLY:
      g_value_set_boolean (value, videorate->drop_only);
      break;
    case PROP_AVERAGE_PERIOD:
      g_value_set_uint64 (value, videorate->average_period_set);
      break;
    case PROP_MAX_RATE:
      g_value_set_int (value, g_atomic_int_get (&videorate->max_rate));
      break;
    case PROP_RATE:
      g_value_set_double (value, videorate->rate);
      break;
    default:
      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
      break;
  }
  GST_OBJECT_UNLOCK (videorate);
}
示例#7
0
static GstFlowReturn
gst_video_rate_transform_ip (GstBaseTransform * trans, GstBuffer * buffer)
{
  GstVideoRate *videorate;
  GstFlowReturn res = GST_BASE_TRANSFORM_FLOW_DROPPED;
  GstClockTime intime, in_ts, in_dur;
  GstClockTime avg_period;
  gboolean skip = FALSE;

  videorate = GST_VIDEO_RATE (trans);

  /* make sure the denominators are not 0 */
  if (videorate->from_rate_denominator == 0 ||
      videorate->to_rate_denominator == 0)
    goto not_negotiated;

  GST_OBJECT_LOCK (videorate);
  avg_period = videorate->average_period_set;
  GST_OBJECT_UNLOCK (videorate);

  /* MT-safe switching between modes */
  if (G_UNLIKELY (avg_period != videorate->average_period)) {
    gboolean switch_mode = (avg_period == 0 || videorate->average_period == 0);
    videorate->average_period = avg_period;
    videorate->last_ts = GST_CLOCK_TIME_NONE;

    if (switch_mode) {
      if (avg_period) {
        /* enabling average mode */
        videorate->average = 0;
        /* make sure no cached buffers from regular mode are left */
        gst_video_rate_swap_prev (videorate, NULL, 0);
      } else {
        /* enable regular mode */
        videorate->next_ts = GST_CLOCK_TIME_NONE;
        skip = TRUE;
      }

      /* max averaging mode has a no latency, normal mode does */
      gst_element_post_message (GST_ELEMENT (videorate),
          gst_message_new_latency (GST_OBJECT (videorate)));
    }
  }

  if (videorate->average_period > 0)
    return gst_video_rate_trans_ip_max_avg (videorate, buffer);

  in_ts = GST_BUFFER_TIMESTAMP (buffer);
  in_dur = GST_BUFFER_DURATION (buffer);

  if (G_UNLIKELY (in_ts == GST_CLOCK_TIME_NONE)) {
    in_ts = videorate->last_ts;
    if (G_UNLIKELY (in_ts == GST_CLOCK_TIME_NONE))
      goto invalid_buffer;
  }

  /* get the time of the next expected buffer timestamp, we use this when the
   * next buffer has -1 as a timestamp */
  videorate->last_ts = in_ts;
  if (in_dur != GST_CLOCK_TIME_NONE)
    videorate->last_ts += in_dur;

  GST_DEBUG_OBJECT (videorate, "got buffer with timestamp %" GST_TIME_FORMAT,
      GST_TIME_ARGS (in_ts));

  /* the input time is the time in the segment + all previously accumulated
   * segments */
  intime = in_ts + videorate->segment.base;

  /* we need to have two buffers to compare */
  if (videorate->prevbuf == NULL) {
    gst_video_rate_swap_prev (videorate, buffer, intime);
    videorate->in++;
    if (!GST_CLOCK_TIME_IS_VALID (videorate->next_ts)) {
      /* new buffer, we expect to output a buffer that matches the first
       * timestamp in the segment */
      if (videorate->skip_to_first || skip) {
        videorate->next_ts = intime;
        videorate->base_ts = in_ts - videorate->segment.start;
        videorate->out_frame_count = 0;
      } else {
        videorate->next_ts = videorate->segment.start + videorate->segment.base;
      }
    }
  } else {
    GstClockTime prevtime;
    gint count = 0;
    gint64 diff1, diff2;

    prevtime = videorate->prev_ts;

    GST_LOG_OBJECT (videorate,
        "BEGINNING prev buf %" GST_TIME_FORMAT " new buf %" GST_TIME_FORMAT
        " outgoing ts %" GST_TIME_FORMAT, GST_TIME_ARGS (prevtime),
        GST_TIME_ARGS (intime), GST_TIME_ARGS (videorate->next_ts));

    videorate->in++;

    /* drop new buffer if it's before previous one */
    if (intime < prevtime) {
      GST_DEBUG_OBJECT (videorate,
          "The new buffer (%" GST_TIME_FORMAT
          ") is before the previous buffer (%"
          GST_TIME_FORMAT "). Dropping new buffer.",
          GST_TIME_ARGS (intime), GST_TIME_ARGS (prevtime));
      videorate->drop++;
      if (!videorate->silent)
        gst_video_rate_notify_drop (videorate);
      goto done;
    }

    /* got 2 buffers, see which one is the best */
    do {

      diff1 = prevtime - videorate->next_ts;
      diff2 = intime - videorate->next_ts;

      /* take absolute values, beware: abs and ABS don't work for gint64 */
      if (diff1 < 0)
        diff1 = -diff1;
      if (diff2 < 0)
        diff2 = -diff2;

      GST_LOG_OBJECT (videorate,
          "diff with prev %" GST_TIME_FORMAT " diff with new %"
          GST_TIME_FORMAT " outgoing ts %" GST_TIME_FORMAT,
          GST_TIME_ARGS (diff1), GST_TIME_ARGS (diff2),
          GST_TIME_ARGS (videorate->next_ts));

      /* output first one when its the best */
      if (diff1 <= diff2) {
        GstFlowReturn r;
        count++;

        /* on error the _flush function posted a warning already */
        if ((r = gst_video_rate_flush_prev (videorate,
                    count > 1)) != GST_FLOW_OK) {
          res = r;
          goto done;
        }
      }

      /* Do not produce any dups. We can exit loop now */
      if (videorate->drop_only)
        break;
      /* continue while the first one was the best, if they were equal avoid
       * going into an infinite loop */
    }
    while (diff1 < diff2);

    /* if we outputed the first buffer more then once, we have dups */
    if (count > 1) {
      videorate->dup += count - 1;
      if (!videorate->silent)
        gst_video_rate_notify_duplicate (videorate);
    }
    /* if we didn't output the first buffer, we have a drop */
    else if (count == 0) {
      videorate->drop++;

      if (!videorate->silent)
        gst_video_rate_notify_drop (videorate);

      GST_LOG_OBJECT (videorate,
          "new is best, old never used, drop, outgoing ts %"
          GST_TIME_FORMAT, GST_TIME_ARGS (videorate->next_ts));
    }
    GST_LOG_OBJECT (videorate,
        "END, putting new in old, diff1 %" GST_TIME_FORMAT
        ", diff2 %" GST_TIME_FORMAT ", next_ts %" GST_TIME_FORMAT
        ", in %" G_GUINT64_FORMAT ", out %" G_GUINT64_FORMAT ", drop %"
        G_GUINT64_FORMAT ", dup %" G_GUINT64_FORMAT, GST_TIME_ARGS (diff1),
        GST_TIME_ARGS (diff2), GST_TIME_ARGS (videorate->next_ts),
        videorate->in, videorate->out, videorate->drop, videorate->dup);

    /* swap in new one when it's the best */
    gst_video_rate_swap_prev (videorate, buffer, intime);
  }
done:
  return res;

  /* ERRORS */
not_negotiated:
  {
    GST_WARNING_OBJECT (videorate, "no framerate negotiated");
    res = GST_FLOW_NOT_NEGOTIATED;
    goto done;
  }

invalid_buffer:
  {
    GST_WARNING_OBJECT (videorate,
        "Got buffer with GST_CLOCK_TIME_NONE timestamp, discarding it");
    res = GST_BASE_TRANSFORM_FLOW_DROPPED;
    goto done;
  }
}
示例#8
0
static gboolean
gst_video_rate_query (GstBaseTransform * trans, GstPadDirection direction,
    GstQuery * query)
{
  GstVideoRate *videorate = GST_VIDEO_RATE (trans);
  gboolean res = FALSE;
  GstPad *otherpad;

  otherpad = (direction == GST_PAD_SRC) ?
      GST_BASE_TRANSFORM_SINK_PAD (trans) : GST_BASE_TRANSFORM_SRC_PAD (trans);

  switch (GST_QUERY_TYPE (query)) {
    case GST_QUERY_LATENCY:
    {
      GstClockTime min, max;
      gboolean live;
      guint64 latency;
      guint64 avg_period;
      GstPad *peer;

      GST_OBJECT_LOCK (videorate);
      avg_period = videorate->average_period_set;
      GST_OBJECT_UNLOCK (videorate);

      if (avg_period == 0 && (peer = gst_pad_get_peer (otherpad))) {
        if ((res = gst_pad_query (peer, query))) {
          gst_query_parse_latency (query, &live, &min, &max);

          GST_DEBUG_OBJECT (videorate, "Peer latency: min %"
              GST_TIME_FORMAT " max %" GST_TIME_FORMAT,
              GST_TIME_ARGS (min), GST_TIME_ARGS (max));

          if (videorate->from_rate_numerator != 0) {
            /* add latency. We don't really know since we hold on to the frames
             * until we get a next frame, which can be anything. We assume
             * however that this will take from_rate time. */
            latency = gst_util_uint64_scale (GST_SECOND,
                videorate->from_rate_denominator,
                videorate->from_rate_numerator);
          } else {
            /* no input framerate, we don't know */
            latency = 0;
          }

          GST_DEBUG_OBJECT (videorate, "Our latency: %"
              GST_TIME_FORMAT, GST_TIME_ARGS (latency));

          min += latency;
          if (max != -1)
            max += latency;

          GST_DEBUG_OBJECT (videorate, "Calculated total latency : min %"
              GST_TIME_FORMAT " max %" GST_TIME_FORMAT,
              GST_TIME_ARGS (min), GST_TIME_ARGS (max));

          gst_query_set_latency (query, live, min, max);
        }
        gst_object_unref (peer);
        break;
      }
      /* Simple fallthrough if we don't have a latency or not a peer that we
       * can't ask about its latency yet.. */
    }
    default:
      res =
          GST_BASE_TRANSFORM_CLASS (parent_class)->query (trans, direction,
          query);
      break;
  }

  return res;
}
示例#9
0
static gboolean
gst_video_rate_sink_event (GstBaseTransform * trans, GstEvent * event)
{
  GstVideoRate *videorate;

  videorate = GST_VIDEO_RATE (trans);

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_SEGMENT:
    {
      const GstSegment *segment;

      gst_event_parse_segment (event, &segment);

      if (segment->format != GST_FORMAT_TIME)
        goto format_error;

      GST_DEBUG_OBJECT (videorate, "handle NEWSEGMENT");

      /* close up the previous segment, if appropriate */
      if (videorate->prevbuf) {
        gint count = 0;
        GstFlowReturn res;

        res = GST_FLOW_OK;
        /* fill up to the end of current segment,
         * or only send out the stored buffer if there is no specific stop.
         * regardless, prevent going loopy in strange cases */
        while (res == GST_FLOW_OK && count <= MAGIC_LIMIT &&
            ((GST_CLOCK_TIME_IS_VALID (videorate->segment.stop) &&
                    videorate->next_ts - videorate->segment.base
                    < videorate->segment.stop)
                || count < 1)) {
          res = gst_video_rate_flush_prev (videorate, count > 0);
          count++;
        }
        if (count > 1) {
          videorate->dup += count - 1;
          if (!videorate->silent)
            gst_video_rate_notify_duplicate (videorate);
        } else if (count == 0) {
          videorate->drop++;
          if (!videorate->silent)
            gst_video_rate_notify_drop (videorate);
        }
        /* clean up for the new one; _chain will resume from the new start */
        gst_video_rate_swap_prev (videorate, NULL, 0);
      }

      videorate->base_ts = 0;
      videorate->out_frame_count = 0;
      videorate->next_ts = GST_CLOCK_TIME_NONE;

      /* We just want to update the accumulated stream_time  */
      gst_segment_copy_into (segment, &videorate->segment);

      GST_DEBUG_OBJECT (videorate, "updated segment: %" GST_SEGMENT_FORMAT,
          &videorate->segment);
      break;
    }
    case GST_EVENT_EOS:{
      gint count = 0;
      GstFlowReturn res = GST_FLOW_OK;

      GST_DEBUG_OBJECT (videorate, "Got EOS");

      /* If the segment has a stop position, fill the segment */
      if (GST_CLOCK_TIME_IS_VALID (videorate->segment.stop)) {
        /* fill up to the end of current segment,
         * or only send out the stored buffer if there is no specific stop.
         * regardless, prevent going loopy in strange cases */
        while (res == GST_FLOW_OK && count <= MAGIC_LIMIT &&
            ((videorate->next_ts - videorate->segment.base <
                    videorate->segment.stop)
                || count < 1)) {
          res = gst_video_rate_flush_prev (videorate, count > 0);
          count++;
        }
      } else if (videorate->prevbuf) {
        /* Output at least one frame but if the buffer duration is valid, output
         * enough frames to use the complete buffer duration */
        if (GST_BUFFER_DURATION_IS_VALID (videorate->prevbuf)) {
          GstClockTime end_ts =
              videorate->next_ts + GST_BUFFER_DURATION (videorate->prevbuf);

          while (res == GST_FLOW_OK && count <= MAGIC_LIMIT &&
              ((videorate->next_ts - videorate->segment.base < end_ts)
                  || count < 1)) {
            res = gst_video_rate_flush_prev (videorate, count > 0);
            count++;
          }
        } else {
          res = gst_video_rate_flush_prev (videorate, FALSE);
          count = 1;
        }
      }

      if (count > 1) {
        videorate->dup += count - 1;
        if (!videorate->silent)
          gst_video_rate_notify_duplicate (videorate);
      } else if (count == 0) {
        videorate->drop++;
        if (!videorate->silent)
          gst_video_rate_notify_drop (videorate);
      }

      break;
    }
    case GST_EVENT_FLUSH_STOP:
      /* also resets the segment */
      GST_DEBUG_OBJECT (videorate, "Got FLUSH_STOP");
      gst_video_rate_reset (videorate);
      break;
    case GST_EVENT_GAP:
      /* no gaps after videorate, ignore the event */
      gst_event_unref (event);
      return TRUE;
    default:
      break;
  }

  return GST_BASE_TRANSFORM_CLASS (parent_class)->sink_event (trans, event);

  /* ERRORS */
format_error:
  {
    GST_WARNING_OBJECT (videorate,
        "Got segment but doesn't have GST_FORMAT_TIME value");
    return FALSE;
  }
}
示例#10
0
static gboolean
gst_video_rate_setcaps (GstBaseTransform * trans, GstCaps * in_caps,
    GstCaps * out_caps)
{
  GstVideoRate *videorate = GST_VIDEO_RATE (trans);
  GstStructure *structure;
  gboolean ret = TRUE;
  gint rate_numerator, rate_denominator;

  GST_DEBUG_OBJECT (trans, "setcaps called in: %" GST_PTR_FORMAT
      " out: %" GST_PTR_FORMAT, in_caps, out_caps);

  structure = gst_caps_get_structure (in_caps, 0);
  if (!gst_structure_get_fraction (structure, "framerate",
          &rate_numerator, &rate_denominator))
    goto no_framerate;

  videorate->from_rate_numerator = rate_numerator;
  videorate->from_rate_denominator = rate_denominator;

  structure = gst_caps_get_structure (out_caps, 0);
  if (!gst_structure_get_fraction (structure, "framerate",
          &rate_numerator, &rate_denominator))
    goto no_framerate;

  /* out_frame_count is scaled by the frame rate caps when calculating next_ts.
   * when the frame rate caps change, we must update base_ts and reset
   * out_frame_count */
  if (videorate->to_rate_numerator) {
    videorate->base_ts +=
        gst_util_uint64_scale (videorate->out_frame_count,
        videorate->to_rate_denominator * GST_SECOND,
        videorate->to_rate_numerator);
  }
  videorate->out_frame_count = 0;
  videorate->to_rate_numerator = rate_numerator;
  videorate->to_rate_denominator = rate_denominator;

  if (rate_numerator)
    videorate->wanted_diff = gst_util_uint64_scale_int (GST_SECOND,
        rate_denominator, rate_numerator);
  else
    videorate->wanted_diff = 0;

done:
  /* After a setcaps, our caps may have changed. In that case, we can't use
   * the old buffer, if there was one (it might have different dimensions) */
  GST_DEBUG_OBJECT (videorate, "swapping old buffers");
  gst_video_rate_swap_prev (videorate, NULL, GST_CLOCK_TIME_NONE);
  videorate->last_ts = GST_CLOCK_TIME_NONE;
  videorate->average = 0;

  return ret;

no_framerate:
  {
    GST_DEBUG_OBJECT (videorate, "no framerate specified");
    ret = FALSE;
    goto done;
  }
}
示例#11
0
static GstCaps *
gst_video_rate_transform_caps (GstBaseTransform * trans,
    GstPadDirection direction, GstCaps * caps, GstCaps * filter)
{
  GstVideoRate *videorate = GST_VIDEO_RATE (trans);
  GstCaps *ret;
  GstStructure *s, *s1, *s2, *s3 = NULL;
  int maxrate = g_atomic_int_get (&videorate->max_rate);
  gint i;

  ret = gst_caps_new_empty ();

  for (i = 0; i < gst_caps_get_size (caps); i++) {
    s = gst_caps_get_structure (caps, i);

    s1 = gst_structure_copy (s);
    s2 = gst_structure_copy (s);

    if (videorate->drop_only) {
      gint min_num = 0, min_denom = 1;
      gint max_num = G_MAXINT, max_denom = 1;

      /* Clamp the caps to our maximum rate as the first caps if possible */
      if (!gst_video_max_rate_clamp_structure (s1, maxrate,
              &min_num, &min_denom, &max_num, &max_denom)) {
        min_num = 0;
        min_denom = 1;
        max_num = maxrate;
        max_denom = 1;

        /* clamp wouldn't be a real subset of 1..maxrate, in this case the sink
         * caps should become [1..maxrate], [1..maxint] and the src caps just
         * [1..maxrate].  In case there was a caps incompatibility things will
         * explode later as appropriate :)
         *
         * In case [X..maxrate] == [X..maxint], skip as we'll set it later
         */
        if (direction == GST_PAD_SRC && maxrate != G_MAXINT)
          gst_structure_set (s1, "framerate", GST_TYPE_FRACTION_RANGE,
              min_num, min_denom, maxrate, 1, NULL);
        else {
          gst_structure_free (s1);
          s1 = NULL;
        }
      }

      if (direction == GST_PAD_SRC) {
        /* We can accept anything as long as it's at least the minimal framerate
         * the the sink needs */
        gst_structure_set (s2, "framerate", GST_TYPE_FRACTION_RANGE,
            min_num, min_denom, G_MAXINT, 1, NULL);

        /* Also allow unknown framerate, if it isn't already */
        if (min_num != 0 || min_denom != 1) {
          s3 = gst_structure_copy (s);
          gst_structure_set (s3, "framerate", GST_TYPE_FRACTION, 0, 1, NULL);
        }
      } else if (max_num != 0 || max_denom != 1) {
        /* We can provide everything upto the maximum framerate at the src */
        gst_structure_set (s2, "framerate", GST_TYPE_FRACTION_RANGE,
            0, 1, max_num, max_denom, NULL);
      }
    } else if (direction == GST_PAD_SINK) {
      gint min_num = 0, min_denom = 1;
      gint max_num = G_MAXINT, max_denom = 1;

      if (!gst_video_max_rate_clamp_structure (s1, maxrate,
              &min_num, &min_denom, &max_num, &max_denom)) {
        gst_structure_free (s1);
        s1 = NULL;
      }
      gst_structure_set (s2, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1,
          maxrate, 1, NULL);
    } else {
      /* set the framerate as a range */
      gst_structure_set (s2, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1,
          G_MAXINT, 1, NULL);
    }
    if (s1 != NULL)
      ret = gst_caps_merge_structure (ret, s1);
    ret = gst_caps_merge_structure (ret, s2);
    if (s3 != NULL)
      ret = gst_caps_merge_structure (ret, s3);
  }
  if (filter) {
    GstCaps *intersection;

    intersection =
        gst_caps_intersect_full (filter, ret, GST_CAPS_INTERSECT_FIRST);
    gst_caps_unref (ret);
    ret = intersection;
  }
  return ret;
}
示例#12
0
static gboolean
gst_video_rate_stop (GstBaseTransform * trans)
{
  gst_video_rate_reset (GST_VIDEO_RATE (trans));
  return TRUE;
}
示例#13
0
static gboolean
gst_video_rate_setcaps (GstPad * pad, GstCaps * caps)
{
  GstVideoRate *videorate;
  GstStructure *structure;
  gboolean ret = TRUE;
  GstPad *otherpad, *opeer;
  gint rate_numerator, rate_denominator;

  videorate = GST_VIDEO_RATE (gst_pad_get_parent (pad));

  GST_DEBUG_OBJECT (pad, "setcaps called %" GST_PTR_FORMAT, caps);

  structure = gst_caps_get_structure (caps, 0);
  if (!gst_structure_get_fraction (structure, "framerate",
          &rate_numerator, &rate_denominator))
    goto no_framerate;

  if (pad == videorate->srcpad) {
    /* out_frame_count is scaled by the frame rate caps when calculating next_ts.
     * when the frame rate caps change, we must update base_ts and reset
     * out_frame_count */
    if (videorate->to_rate_numerator) {
      videorate->base_ts +=
          gst_util_uint64_scale (videorate->out_frame_count,
          videorate->to_rate_denominator * GST_SECOND,
          videorate->to_rate_numerator);
    }
    videorate->out_frame_count = 0;
    videorate->to_rate_numerator = rate_numerator;
    videorate->to_rate_denominator = rate_denominator;
    otherpad = videorate->sinkpad;
  } else {
    videorate->from_rate_numerator = rate_numerator;
    videorate->from_rate_denominator = rate_denominator;
    otherpad = videorate->srcpad;
  }

  /* now try to find something for the peer */
  opeer = gst_pad_get_peer (otherpad);
  if (opeer) {
    if (gst_pad_accept_caps (opeer, caps)) {
      /* the peer accepts the caps as they are */
      gst_pad_set_caps (otherpad, caps);

      ret = TRUE;
    } else {
      GstCaps *peercaps;
      GstCaps *transform = NULL;

      ret = FALSE;

      /* see how we can transform the input caps */
      if (!gst_video_rate_transformcaps (pad, caps, otherpad, &transform))
        goto no_transform;

      /* see what the peer can do */
      peercaps = gst_pad_get_caps (opeer);

      GST_DEBUG_OBJECT (opeer, "icaps %" GST_PTR_FORMAT, peercaps);
      GST_DEBUG_OBJECT (videorate, "transform %" GST_PTR_FORMAT, transform);

      /* filter against our possibilities */
      caps = gst_caps_intersect (peercaps, transform);
      gst_caps_unref (peercaps);
      gst_caps_unref (transform);

      GST_DEBUG_OBJECT (videorate, "intersect %" GST_PTR_FORMAT, caps);

      /* could turn up empty, due to e.g. colorspace etc */
      if (gst_caps_get_size (caps) == 0) {
        gst_caps_unref (caps);
        goto no_transform;
      }

      /* take first possibility */
      gst_caps_truncate (caps);
      structure = gst_caps_get_structure (caps, 0);

      /* and fixate */
      gst_structure_fixate_field_nearest_fraction (structure, "framerate",
          rate_numerator, rate_denominator);

      gst_structure_get_fraction (structure, "framerate",
          &rate_numerator, &rate_denominator);

      if (otherpad == videorate->srcpad) {
        videorate->to_rate_numerator = rate_numerator;
        videorate->to_rate_denominator = rate_denominator;
      } else {
        videorate->from_rate_numerator = rate_numerator;
        videorate->from_rate_denominator = rate_denominator;
      }

      if (gst_structure_has_field (structure, "interlaced"))
        gst_structure_fixate_field_boolean (structure, "interlaced", FALSE);
      if (gst_structure_has_field (structure, "color-matrix"))
        gst_structure_fixate_field_string (structure, "color-matrix", "sdtv");
      if (gst_structure_has_field (structure, "chroma-site"))
        gst_structure_fixate_field_string (structure, "chroma-site", "mpeg2");
      if (gst_structure_has_field (structure, "pixel-aspect-ratio"))
        gst_structure_fixate_field_nearest_fraction (structure,
            "pixel-aspect-ratio", 1, 1);

      gst_pad_set_caps (otherpad, caps);
      gst_caps_unref (caps);
      ret = TRUE;
    }
    gst_object_unref (opeer);
  }
done:
  /* After a setcaps, our caps may have changed. In that case, we can't use
   * the old buffer, if there was one (it might have different dimensions) */
  GST_DEBUG_OBJECT (videorate, "swapping old buffers");
  gst_video_rate_swap_prev (videorate, NULL, GST_CLOCK_TIME_NONE);

  gst_object_unref (videorate);
  return ret;

no_framerate:
  {
    GST_DEBUG_OBJECT (videorate, "no framerate specified");
    goto done;
  }
no_transform:
  {
    GST_DEBUG_OBJECT (videorate, "no framerate transform possible");
    ret = FALSE;
    goto done;
  }
}
示例#14
0
static GstCaps *
gst_video_rate_transform_caps (GstBaseTransform * trans,
    GstPadDirection direction, GstCaps * caps)
{
  GstVideoRate *videorate = GST_VIDEO_RATE (trans);
  GstCaps *ret;
  GstStructure *s, *s2;
  GstStructure *s3 = NULL;
  int maxrate = g_atomic_int_get (&videorate->max_rate);

  /* Should always be called with simple caps */
  g_return_val_if_fail (GST_CAPS_IS_SIMPLE (caps), NULL);

  ret = gst_caps_copy (caps);

  s = gst_caps_get_structure (ret, 0);
  s2 = gst_structure_copy (s);

  if (videorate->drop_only) {
    gint min_num = 0, min_denom = 1;
    gint max_num = G_MAXINT, max_denom = 1;

    /* Clamp the caps to our maximum rate as the first caps if possible */
    if (!gst_video_max_rate_clamp_structure (s, maxrate,
            &min_num, &min_denom, &max_num, &max_denom)) {
      min_num = 0;
      min_denom = 1;
      max_num = maxrate;
      max_denom = 1;

      /* clamp wouldn't be a real subset of 1..maxrate, in this case the sink
       * caps should become [1..maxrate], [1..maxint] and the src caps just
       * [1..maxrate].  In case there was a caps incompatibility things will
       * explode later as appropriate :)
       *
       * In case [X..maxrate] == [X..maxint], skip as we'll set it later
       */
      if (direction == GST_PAD_SRC && maxrate != G_MAXINT)
        gst_structure_set (s, "framerate", GST_TYPE_FRACTION_RANGE,
            min_num, min_denom, maxrate, 1, NULL);
      else
        gst_caps_remove_structure (ret, 0);
    }

    if (direction == GST_PAD_SRC) {
      /* We can accept anything as long as it's at least the minimal framerate
       * the the sink needs */
      gst_structure_set (s2, "framerate", GST_TYPE_FRACTION_RANGE,
          min_num, min_denom, G_MAXINT, 1, NULL);

      /* Also allow unknown framerate, if it isn't already */
      if (min_num != 0 || min_denom != 1) {
        s3 = gst_structure_copy (s);
        gst_structure_set (s3, "framerate", GST_TYPE_FRACTION, 0, 1, NULL);
      }
    } else if (max_num != 0 || max_denom != 1) {
      /* We can provide everything upto the maximum framerate at the src */
      gst_structure_set (s2, "framerate", GST_TYPE_FRACTION_RANGE,
          0, 1, max_num, max_denom, NULL);
    }
  } else if (direction == GST_PAD_SINK) {
    gint min_num = 0, min_denom = 1;
    gint max_num = G_MAXINT, max_denom = 1;

    if (!gst_video_max_rate_clamp_structure (s, maxrate,
            &min_num, &min_denom, &max_num, &max_denom))
      gst_caps_remove_structure (ret, 0);

    gst_structure_set (s2, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1,
        maxrate, 1, NULL);
  } else {
    /* set the framerate as a range */
    gst_structure_set (s2, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1,
        G_MAXINT, 1, NULL);
  }

  gst_caps_merge_structure (ret, s2);
  if (s3 != NULL)
    gst_caps_merge_structure (ret, s3);

  return ret;
}
示例#15
0
static gboolean
gst_video_rate_event (GstPad * pad, GstEvent * event)
{
  GstVideoRate *videorate;
  gboolean ret;

  videorate = GST_VIDEO_RATE (gst_pad_get_parent (pad));

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_NEWSEGMENT:
    {
      gint64 start, stop, time;
      gdouble rate, arate;
      gboolean update;
      GstFormat format;

      gst_event_parse_new_segment_full (event, &update, &rate, &arate, &format,
          &start, &stop, &time);

      if (format != GST_FORMAT_TIME)
        goto format_error;

      GST_DEBUG_OBJECT (videorate, "handle NEWSEGMENT");

      /* close up the previous segment, if appropriate */
      if (!update && videorate->prevbuf) {
        gint count = 0;
        GstFlowReturn res;

        res = GST_FLOW_OK;
        /* fill up to the end of current segment,
         * or only send out the stored buffer if there is no specific stop.
         * regardless, prevent going loopy in strange cases */
        while (res == GST_FLOW_OK && count <= MAGIC_LIMIT &&
            ((GST_CLOCK_TIME_IS_VALID (videorate->segment.stop) &&
                    videorate->next_ts - videorate->segment.accum
                    < videorate->segment.stop)
                || count < 1)) {
          res = gst_video_rate_flush_prev (videorate, count > 0);
          count++;
        }
        if (count > 1) {
          videorate->dup += count - 1;
          if (!videorate->silent)
            gst_video_rate_notify_duplicate (videorate);
        } else if (count == 0) {
          videorate->drop++;
          if (!videorate->silent)
            gst_video_rate_notify_drop (videorate);
        }
        /* clean up for the new one; _chain will resume from the new start */
        videorate->base_ts = 0;
        videorate->out_frame_count = 0;
        gst_video_rate_swap_prev (videorate, NULL, 0);
        videorate->next_ts = GST_CLOCK_TIME_NONE;
      }

      /* We just want to update the accumulated stream_time  */
      gst_segment_set_newsegment_full (&videorate->segment, update, rate, arate,
          format, start, stop, time);

      GST_DEBUG_OBJECT (videorate, "updated segment: %" GST_SEGMENT_FORMAT,
          &videorate->segment);
      break;
    }
    case GST_EVENT_EOS:{
      gint count = 0;
      GstFlowReturn res = GST_FLOW_OK;

      GST_DEBUG_OBJECT (videorate, "Got EOS");

      /* If the segment has a stop position, fill the segment */
      if (GST_CLOCK_TIME_IS_VALID (videorate->segment.stop)) {
        /* fill up to the end of current segment,
         * or only send out the stored buffer if there is no specific stop.
         * regardless, prevent going loopy in strange cases */
        while (res == GST_FLOW_OK && count <= MAGIC_LIMIT &&
            ((videorate->next_ts - videorate->segment.accum <
                    videorate->segment.stop)
                || count < 1)) {
          res = gst_video_rate_flush_prev (videorate, count > 0);
          count++;
        }
      } else if (videorate->prevbuf) {
        /* Output at least one frame but if the buffer duration is valid, output
         * enough frames to use the complete buffer duration */
        if (GST_BUFFER_DURATION_IS_VALID (videorate->prevbuf)) {
          GstClockTime end_ts =
              videorate->next_ts + GST_BUFFER_DURATION (videorate->prevbuf);

          while (res == GST_FLOW_OK && count <= MAGIC_LIMIT &&
              ((videorate->next_ts - videorate->segment.accum < end_ts)
                  || count < 1)) {
            res = gst_video_rate_flush_prev (videorate, count > 0);
            count++;
          }
        } else {
          res = gst_video_rate_flush_prev (videorate, FALSE);
          count = 1;
        }
      }

      if (count > 1) {
        videorate->dup += count - 1;
        if (!videorate->silent)
          gst_video_rate_notify_duplicate (videorate);
      } else if (count == 0) {
        videorate->drop++;
        if (!videorate->silent)
          gst_video_rate_notify_drop (videorate);
      }

      break;
    }
    case GST_EVENT_FLUSH_STOP:
      /* also resets the segment */
      GST_DEBUG_OBJECT (videorate, "Got FLUSH_STOP");
      gst_video_rate_reset (videorate);
      break;
    default:
      break;
  }

  ret = gst_pad_push_event (videorate->srcpad, event);

done:
  gst_object_unref (videorate);

  return ret;

  /* ERRORS */
format_error:
  {
    GST_WARNING_OBJECT (videorate,
        "Got segment but doesn't have GST_FORMAT_TIME value");
    gst_event_unref (event);
    ret = FALSE;
    goto done;
  }
}
示例#16
0
static gboolean
gst_video_rate_query (GstPad * pad, GstQuery * query)
{
  GstVideoRate *videorate;
  gboolean res = FALSE;

  videorate = GST_VIDEO_RATE (gst_pad_get_parent (pad));

  switch (GST_QUERY_TYPE (query)) {
    case GST_QUERY_LATENCY:
    {
      GstClockTime min, max;
      gboolean live;
      guint64 latency;
      GstPad *peer;

      if ((peer = gst_pad_get_peer (videorate->sinkpad))) {
        if ((res = gst_pad_query (peer, query))) {
          gst_query_parse_latency (query, &live, &min, &max);

          GST_DEBUG_OBJECT (videorate, "Peer latency: min %"
              GST_TIME_FORMAT " max %" GST_TIME_FORMAT,
              GST_TIME_ARGS (min), GST_TIME_ARGS (max));

          if (videorate->from_rate_numerator != 0) {
            /* add latency. We don't really know since we hold on to the frames
             * until we get a next frame, which can be anything. We assume
             * however that this will take from_rate time. */
            latency = gst_util_uint64_scale (GST_SECOND,
                videorate->from_rate_denominator,
                videorate->from_rate_numerator);
          } else {
            /* no input framerate, we don't know */
            latency = 0;
          }

          GST_DEBUG_OBJECT (videorate, "Our latency: %"
              GST_TIME_FORMAT, GST_TIME_ARGS (latency));

          min += latency;
          if (max != -1)
            max += latency;

          GST_DEBUG_OBJECT (videorate, "Calculated total latency : min %"
              GST_TIME_FORMAT " max %" GST_TIME_FORMAT,
              GST_TIME_ARGS (min), GST_TIME_ARGS (max));

          gst_query_set_latency (query, live, min, max);
        }
        gst_object_unref (peer);
      }
      break;
    }
    default:
      res = gst_pad_query_default (pad, query);
      break;
  }
  gst_object_unref (videorate);

  return res;
}
示例#17
0
static GstFlowReturn
gst_video_rate_chain (GstPad * pad, GstBuffer * buffer)
{
  GstVideoRate *videorate;
  GstFlowReturn res = GST_FLOW_OK;
  GstClockTime intime, in_ts, in_dur;

  videorate = GST_VIDEO_RATE (GST_PAD_PARENT (pad));

  /* make sure the denominators are not 0 */
  if (videorate->from_rate_denominator == 0 ||
      videorate->to_rate_denominator == 0)
    goto not_negotiated;

  in_ts = GST_BUFFER_TIMESTAMP (buffer);
  in_dur = GST_BUFFER_DURATION (buffer);

  if (G_UNLIKELY (in_ts == GST_CLOCK_TIME_NONE)) {
    in_ts = videorate->last_ts;
    if (G_UNLIKELY (in_ts == GST_CLOCK_TIME_NONE))
      goto invalid_buffer;
  }

  /* get the time of the next expected buffer timestamp, we use this when the
   * next buffer has -1 as a timestamp */
  videorate->last_ts = in_ts;
  if (in_dur != GST_CLOCK_TIME_NONE)
    videorate->last_ts += in_dur;

  GST_DEBUG_OBJECT (videorate, "got buffer with timestamp %" GST_TIME_FORMAT,
      GST_TIME_ARGS (in_ts));

  /* the input time is the time in the segment + all previously accumulated
   * segments */
  intime = in_ts + videorate->segment.accum;

  /* we need to have two buffers to compare */
  if (videorate->prevbuf == NULL) {
    gst_video_rate_swap_prev (videorate, buffer, intime);
    videorate->in++;
    if (!GST_CLOCK_TIME_IS_VALID (videorate->next_ts)) {
      /* new buffer, we expect to output a buffer that matches the first
       * timestamp in the segment */
      if (videorate->skip_to_first) {
        videorate->next_ts = intime;
        videorate->base_ts = in_ts - videorate->segment.start;
        videorate->out_frame_count = 0;
      } else {
        videorate->next_ts =
            videorate->segment.start + videorate->segment.accum;
      }
    }
  } else {
    GstClockTime prevtime;
    gint count = 0;
    gint64 diff1, diff2;

    prevtime = videorate->prev_ts;

    GST_LOG_OBJECT (videorate,
        "BEGINNING prev buf %" GST_TIME_FORMAT " new buf %" GST_TIME_FORMAT
        " outgoing ts %" GST_TIME_FORMAT, GST_TIME_ARGS (prevtime),
        GST_TIME_ARGS (intime), GST_TIME_ARGS (videorate->next_ts));

    videorate->in++;

    /* drop new buffer if it's before previous one */
    if (intime < prevtime) {
      GST_DEBUG_OBJECT (videorate,
          "The new buffer (%" GST_TIME_FORMAT
          ") is before the previous buffer (%"
          GST_TIME_FORMAT "). Dropping new buffer.",
          GST_TIME_ARGS (intime), GST_TIME_ARGS (prevtime));
      videorate->drop++;
      if (!videorate->silent)
        gst_video_rate_notify_drop (videorate);
      gst_buffer_unref (buffer);
      goto done;
    }

    /* got 2 buffers, see which one is the best */
    do {

      diff1 = prevtime - videorate->next_ts;
      diff2 = intime - videorate->next_ts;

      /* take absolute values, beware: abs and ABS don't work for gint64 */
      if (diff1 < 0)
        diff1 = -diff1;
      if (diff2 < 0)
        diff2 = -diff2;

      GST_LOG_OBJECT (videorate,
          "diff with prev %" GST_TIME_FORMAT " diff with new %"
          GST_TIME_FORMAT " outgoing ts %" GST_TIME_FORMAT,
          GST_TIME_ARGS (diff1), GST_TIME_ARGS (diff2),
          GST_TIME_ARGS (videorate->next_ts));

      /* output first one when its the best */
      if (diff1 <= diff2) {
        count++;

        /* on error the _flush function posted a warning already */
        if ((res =
                gst_video_rate_flush_prev (videorate,
                    count > 1)) != GST_FLOW_OK) {
          gst_buffer_unref (buffer);
          goto done;
        }
      }
      /* continue while the first one was the best, if they were equal avoid
       * going into an infinite loop */
    }
    while (diff1 < diff2);

    /* if we outputed the first buffer more then once, we have dups */
    if (count > 1) {
      videorate->dup += count - 1;
      if (!videorate->silent)
        gst_video_rate_notify_duplicate (videorate);
    }
    /* if we didn't output the first buffer, we have a drop */
    else if (count == 0) {
      videorate->drop++;

      if (!videorate->silent)
        gst_video_rate_notify_drop (videorate);

      GST_LOG_OBJECT (videorate,
          "new is best, old never used, drop, outgoing ts %"
          GST_TIME_FORMAT, GST_TIME_ARGS (videorate->next_ts));
    }
    GST_LOG_OBJECT (videorate,
        "END, putting new in old, diff1 %" GST_TIME_FORMAT
        ", diff2 %" GST_TIME_FORMAT ", next_ts %" GST_TIME_FORMAT
        ", in %" G_GUINT64_FORMAT ", out %" G_GUINT64_FORMAT ", drop %"
        G_GUINT64_FORMAT ", dup %" G_GUINT64_FORMAT, GST_TIME_ARGS (diff1),
        GST_TIME_ARGS (diff2), GST_TIME_ARGS (videorate->next_ts),
        videorate->in, videorate->out, videorate->drop, videorate->dup);

    /* swap in new one when it's the best */
    gst_video_rate_swap_prev (videorate, buffer, intime);
  }
done:
  return res;

  /* ERRORS */
not_negotiated:
  {
    GST_WARNING_OBJECT (videorate, "no framerate negotiated");
    gst_buffer_unref (buffer);
    res = GST_FLOW_NOT_NEGOTIATED;
    goto done;
  }

invalid_buffer:
  {
    GST_WARNING_OBJECT (videorate,
        "Got buffer with GST_CLOCK_TIME_NONE timestamp, discarding it");
    gst_buffer_unref (buffer);
    goto done;
  }
}