Example #1
0
static gboolean
gst_base_video_codec_src_query (GstPad * pad, GstQuery * query)
{
  GstBaseVideoCodec *base_codec;
  gboolean res = FALSE;

  base_codec = GST_BASE_VIDEO_CODEC (gst_pad_get_parent (pad));

  switch (GST_QUERY_TYPE (query)) {
    case GST_QUERY_POSITION:
    {
      GstFormat format;
      gint64 time;
      gint64 value;

      gst_query_parse_position (query, &format, NULL);

      time = gst_util_uint64_scale (base_codec->system_frame_number,
          base_codec->state.fps_n, base_codec->state.fps_d);
      time += base_codec->state.segment.time;
      GST_DEBUG ("query position %" GST_TIME_FORMAT, GST_TIME_ARGS (time));
      res = gst_base_video_encoded_video_convert (&base_codec->state,
          GST_FORMAT_TIME, time, &format, &value);
      if (!res)
        goto error;

      gst_query_set_position (query, format, value);
      break;
    }
    case GST_QUERY_DURATION:
      res = gst_pad_query (GST_PAD_PEER (base_codec->sinkpad), query);
      if (!res)
        goto error;
      break;
    case GST_QUERY_CONVERT:
    {
      GstFormat src_fmt, dest_fmt;
      gint64 src_val, dest_val;

      GST_DEBUG ("query convert");

      gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, &dest_val);
      res = gst_base_video_encoded_video_convert (&base_codec->state,
          src_fmt, src_val, &dest_fmt, &dest_val);
      if (!res)
        goto error;
      gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val);
      break;
    }
    default:
      res = gst_pad_query_default (pad, query);
      break;
  }
done:
  gst_object_unref (base_codec);

  return res;
error:
  GST_DEBUG_OBJECT (base_codec, "query failed");
  goto done;
}
Example #2
0
int
gst_base_video_encoder_get_width (GstBaseVideoEncoder * base_video_encoder)
{
  return GST_BASE_VIDEO_CODEC (base_video_encoder)->state.width;
}
Example #3
0
const GstVideoState *
gst_base_video_encoder_get_state (GstBaseVideoEncoder * base_video_encoder)
{
  return &GST_BASE_VIDEO_CODEC (base_video_encoder)->state;
}
Example #4
0
GstFlowReturn
gst_base_video_encoder_finish_frame (GstBaseVideoEncoder * base_video_encoder,
    GstVideoFrame * frame)
{
  GstFlowReturn ret;
  GstBaseVideoEncoderClass *base_video_encoder_class;

  base_video_encoder_class =
      GST_BASE_VIDEO_ENCODER_GET_CLASS (base_video_encoder);

  frame->system_frame_number =
      GST_BASE_VIDEO_CODEC (base_video_encoder)->system_frame_number;
  GST_BASE_VIDEO_CODEC (base_video_encoder)->system_frame_number++;

  if (frame->is_sync_point) {
    base_video_encoder->distance_from_sync = 0;
    GST_BUFFER_FLAG_UNSET (frame->src_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
  } else {
    GST_BUFFER_FLAG_SET (frame->src_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
  }

  frame->distance_from_sync = base_video_encoder->distance_from_sync;
  base_video_encoder->distance_from_sync++;

  frame->decode_frame_number = frame->system_frame_number - 1;
  if (frame->decode_frame_number < 0) {
    frame->decode_timestamp = 0;
  } else {
    frame->decode_timestamp = gst_util_uint64_scale (frame->decode_frame_number,
        GST_SECOND * GST_BASE_VIDEO_CODEC (base_video_encoder)->state.fps_d,
        GST_BASE_VIDEO_CODEC (base_video_encoder)->state.fps_n);
  }

  GST_BUFFER_TIMESTAMP (frame->src_buffer) = frame->presentation_timestamp;
  GST_BUFFER_DURATION (frame->src_buffer) = frame->presentation_duration;
  GST_BUFFER_OFFSET (frame->src_buffer) = frame->decode_timestamp;

  GST_BASE_VIDEO_CODEC (base_video_encoder)->frames =
      g_list_remove (GST_BASE_VIDEO_CODEC (base_video_encoder)->frames, frame);

  if (!base_video_encoder->set_output_caps) {
    if (base_video_encoder_class->get_caps) {
      GST_BASE_VIDEO_CODEC (base_video_encoder)->caps =
          base_video_encoder_class->get_caps (base_video_encoder);
    } else {
      GST_BASE_VIDEO_CODEC (base_video_encoder)->caps =
          gst_caps_new_simple ("video/unknown", NULL);
    }
    gst_pad_set_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder),
        GST_BASE_VIDEO_CODEC (base_video_encoder)->caps);
    base_video_encoder->set_output_caps = TRUE;
  }

  gst_buffer_set_caps (GST_BUFFER (frame->src_buffer),
      GST_BASE_VIDEO_CODEC (base_video_encoder)->caps);

  if (frame->force_keyframe) {
    GstClockTime stream_time;
    GstClockTime running_time;
    GstStructure *s;

    running_time =
        gst_segment_to_running_time (&GST_BASE_VIDEO_CODEC
        (base_video_encoder)->segment, GST_FORMAT_TIME,
        frame->presentation_timestamp);
    stream_time =
        gst_segment_to_stream_time (&GST_BASE_VIDEO_CODEC
        (base_video_encoder)->segment, GST_FORMAT_TIME,
        frame->presentation_timestamp);

    /* FIXME this should send the event that we got on the sink pad
       instead of creating a new one */
    s = gst_structure_new ("GstForceKeyUnit",
        "timestamp", G_TYPE_UINT64, frame->presentation_timestamp,
        "stream-time", G_TYPE_UINT64, stream_time,
        "running-time", G_TYPE_UINT64, running_time, NULL);

    gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder),
        gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM, s));
  }

  if (base_video_encoder_class->shape_output) {
    ret = base_video_encoder_class->shape_output (base_video_encoder, frame);
  } else {
    ret =
        gst_pad_push (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder),
        frame->src_buffer);
  }

  gst_base_video_codec_free_frame (frame);

  return ret;
}
Example #5
0
int
gst_base_video_encoder_get_height (GstBaseVideoEncoder * base_video_encoder)
{
  return GST_BASE_VIDEO_CODEC (base_video_encoder)->state.height;
}
Example #6
0
static GstFlowReturn
gst_vp8_enc_shape_output (GstBaseVideoEncoder * base_video_encoder,
    GstVideoFrame * frame)
{
  GstVP8Enc *encoder;
  GstBuffer *buf;
  const GstVideoState *state;
  GstFlowReturn ret;
  GstVP8EncCoderHook *hook = frame->coder_hook;
  GList *l;
  gint inv_count;

  GST_DEBUG_OBJECT (base_video_encoder, "shape_output");

  encoder = GST_VP8_ENC (base_video_encoder);

  state = gst_base_video_encoder_get_state (base_video_encoder);

  g_assert (hook != NULL);

  for (inv_count = 0, l = hook->invisible; l; inv_count++, l = l->next) {
    buf = l->data;
    l->data = NULL;

    if (l == hook->invisible && frame->is_sync_point) {
      GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
      encoder->keyframe_distance = 0;
    } else {
      GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
      encoder->keyframe_distance++;
    }

    GST_BUFFER_OFFSET_END (buf) =
        _to_granulepos (frame->presentation_frame_number + 1,
        inv_count, encoder->keyframe_distance);
    GST_BUFFER_OFFSET (buf) =
        gst_util_uint64_scale (frame->presentation_frame_number + 1,
        GST_SECOND * state->fps_d, state->fps_n);

    gst_buffer_set_caps (buf, GST_BASE_VIDEO_CODEC (base_video_encoder)->caps);
    ret = gst_pad_push (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder), buf);

    if (ret != GST_FLOW_OK) {
      GST_WARNING_OBJECT (encoder, "flow error %d", ret);
      goto done;
    }
  }

  buf = frame->src_buffer;
  frame->src_buffer = NULL;

  if (!hook->invisible && frame->is_sync_point) {
    GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
    encoder->keyframe_distance = 0;
  } else {
    GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
    encoder->keyframe_distance++;
  }

  GST_BUFFER_OFFSET_END (buf) =
      _to_granulepos (frame->presentation_frame_number + 1, 0,
      encoder->keyframe_distance);
  GST_BUFFER_OFFSET (buf) =
      gst_util_uint64_scale (frame->presentation_frame_number + 1,
      GST_SECOND * state->fps_d, state->fps_n);

  ret = gst_pad_push (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder), buf);
  if (ret != GST_FLOW_OK) {
    GST_WARNING_OBJECT (encoder, "flow error %d", ret);
  }

done:
  if (hook) {
    g_list_foreach (hook->invisible, (GFunc) _gst_mini_object_unref0, NULL);
    g_list_free (hook->invisible);
    g_slice_free (GstVP8EncCoderHook, hook);
    frame->coder_hook = NULL;
  }

  return ret;
}
Example #7
0
static gboolean
gst_base_video_encoder_sink_event (GstPad * pad, GstEvent * event)
{
  GstBaseVideoEncoder *base_video_encoder;
  GstBaseVideoEncoderClass *base_video_encoder_class;
  gboolean ret = FALSE;

  base_video_encoder = GST_BASE_VIDEO_ENCODER (gst_pad_get_parent (pad));
  base_video_encoder_class =
      GST_BASE_VIDEO_ENCODER_GET_CLASS (base_video_encoder);

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_EOS:
    {
      if (base_video_encoder_class->finish) {
        base_video_encoder_class->finish (base_video_encoder);
      }

      ret =
          gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder),
          event);
    }
      break;
    case GST_EVENT_NEWSEGMENT:
    {
      gboolean update;
      double rate;
      double applied_rate;
      GstFormat format;
      gint64 start;
      gint64 stop;
      gint64 position;

      gst_event_parse_new_segment_full (event, &update, &rate,
          &applied_rate, &format, &start, &stop, &position);

      if (format != GST_FORMAT_TIME)
        goto newseg_wrong_format;

      GST_DEBUG ("new segment %" GST_TIME_FORMAT " %" GST_TIME_FORMAT,
          GST_TIME_ARGS (start), GST_TIME_ARGS (position));

      gst_segment_set_newsegment_full (&GST_BASE_VIDEO_CODEC
          (base_video_encoder)->segment, update, rate, applied_rate, format,
          start, stop, position);

      ret =
          gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder),
          event);
    }
      break;
    case GST_EVENT_CUSTOM_DOWNSTREAM:
    {
      const GstStructure *s;

      s = gst_event_get_structure (event);

      if (gst_structure_has_name (s, "GstForceKeyUnit")) {
        GST_OBJECT_LOCK (base_video_encoder);
        base_video_encoder->force_keyframe = TRUE;
        GST_OBJECT_UNLOCK (base_video_encoder);
        gst_event_unref (event);
        ret = GST_FLOW_OK;
      } else {
        ret =
            gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD
            (base_video_encoder), event);
      }
      break;
    }
    default:
      /* FIXME this changes the order of events */
      ret =
          gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder),
          event);
      break;
  }

done:
  gst_object_unref (base_video_encoder);
  return ret;

newseg_wrong_format:
  {
    GST_DEBUG_OBJECT (base_video_encoder, "received non TIME newsegment");
    gst_event_unref (event);
    goto done;
  }
}
Example #8
0
static gboolean
gst_base_video_codec_src_event (GstPad * pad, GstEvent * event)
{
  GstBaseVideoCodec *base_video_codec;
  gboolean res = FALSE;

  base_video_codec = GST_BASE_VIDEO_CODEC (gst_pad_get_parent (pad));

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_SEEK:
    {
      GstFormat format, tformat;
      gdouble rate;
      GstEvent *real_seek;
      GstSeekFlags flags;
      GstSeekType cur_type, stop_type;
      gint64 cur, stop;
      gint64 tcur, tstop;

      gst_event_parse_seek (event, &rate, &format, &flags, &cur_type,
          &cur, &stop_type, &stop);
      gst_event_unref (event);

      tformat = GST_FORMAT_TIME;
      res = gst_base_video_encoded_video_convert (&base_video_codec->state,
          format, cur, &tformat, &tcur);
      if (!res)
        goto convert_error;
      res = gst_base_video_encoded_video_convert (&base_video_codec->state,
          format, stop, &tformat, &tstop);
      if (!res)
        goto convert_error;

      real_seek = gst_event_new_seek (rate, GST_FORMAT_TIME,
          flags, cur_type, tcur, stop_type, tstop);

      res = gst_pad_push_event (base_video_codec->sinkpad, real_seek);

      break;
    }
#if 0
    case GST_EVENT_QOS:
    {
      gdouble proportion;
      GstClockTimeDiff diff;
      GstClockTime timestamp;

      gst_event_parse_qos (event, &proportion, &diff, &timestamp);

      GST_OBJECT_LOCK (base_video_codec);
      base_video_codec->proportion = proportion;
      base_video_codec->earliest_time = timestamp + diff;
      GST_OBJECT_UNLOCK (base_video_codec);

      GST_DEBUG_OBJECT (base_video_codec,
          "got QoS %" GST_TIME_FORMAT ", %" G_GINT64_FORMAT,
          GST_TIME_ARGS (timestamp), diff);

      res = gst_pad_push_event (base_video_codec->sinkpad, event);
      break;
    }
#endif
    default:
      res = gst_pad_push_event (base_video_codec->sinkpad, event);
      break;
  }
done:
  gst_object_unref (base_video_codec);
  return res;

convert_error:
  GST_DEBUG_OBJECT (base_video_codec, "could not convert format");
  goto done;
}
Example #9
0
static gboolean
gst_base_video_codec_sink_event (GstPad * pad, GstEvent * event)
{
  GstBaseVideoCodec *base_video_codec;
  gboolean ret = FALSE;

  base_video_codec = GST_BASE_VIDEO_CODEC (gst_pad_get_parent (pad));

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_FLUSH_START:
      ret = gst_pad_push_event (base_video_codec->srcpad, event);
      break;
    case GST_EVENT_FLUSH_STOP:
      gst_base_video_codec_reset (base_video_codec);
      ret = gst_pad_push_event (base_video_codec->srcpad, event);
      break;
    case GST_EVENT_EOS:
      if (gst_base_video_codec_push_all (base_video_codec,
              FALSE) == GST_FLOW_ERROR) {
        gst_event_unref (event);
        return FALSE;
      }

      ret = gst_pad_push_event (base_video_codec->srcpad, event);
      break;
    case GST_EVENT_NEWSEGMENT:
    {
      gboolean update;
      GstFormat format;
      gdouble rate;
      gint64 start, stop, time;

      gst_event_parse_new_segment (event, &update, &rate, &format, &start,
          &stop, &time);

      if (format != GST_FORMAT_TIME)
        goto newseg_wrong_format;

      if (rate <= 0.0)
        goto newseg_wrong_rate;

      GST_DEBUG ("newsegment %" GST_TIME_FORMAT " %" GST_TIME_FORMAT,
          GST_TIME_ARGS (start), GST_TIME_ARGS (time));
      gst_segment_set_newsegment (&base_video_codec->state.segment, update,
          rate, format, start, stop, time);

      ret = gst_pad_push_event (base_video_codec->srcpad, event);
      break;
    }
    default:
      ret = gst_pad_push_event (base_video_codec->srcpad, event);
      break;
  }
done:
  gst_object_unref (base_video_codec);
  return ret;

newseg_wrong_format:
  GST_DEBUG_OBJECT (base_video_codec, "received non TIME newsegment");
  gst_event_unref (event);
  goto done;

newseg_wrong_rate:
  GST_DEBUG_OBJECT (base_video_codec, "negative rates not supported");
  gst_event_unref (event);
  goto done;
}
Example #10
0
/**
 * gst_base_video_encoder_finish_frame:
 * @base_video_encoder: a #GstBaseVideoEncoder
 * @frame: an encoded #GstVideoFrame 
 *
 * @frame must have a valid encoded data buffer, whose metadata fields
 * are then appropriately set according to frame data or no buffer at
 * all if the frame should be dropped.
 * It is subsequently pushed downstream or provided to @shape_output.
 * In any case, the frame is considered finished and released.
 *
 * Returns: a #GstFlowReturn resulting from sending data downstream
 */
GstFlowReturn
gst_base_video_encoder_finish_frame (GstBaseVideoEncoder * base_video_encoder,
    GstVideoFrame * frame)
{
  GstFlowReturn ret = GST_FLOW_OK;
  GstBaseVideoEncoderClass *base_video_encoder_class;
  GList *l;

  base_video_encoder_class =
      GST_BASE_VIDEO_ENCODER_GET_CLASS (base_video_encoder);

  GST_LOG_OBJECT (base_video_encoder,
      "finish frame fpn %d", frame->presentation_frame_number);

  GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_encoder);

  /* Push all pending events that arrived before this frame */
  for (l = base_video_encoder->base_video_codec.frames; l; l = l->next) {
    GstVideoFrame *tmp = l->data;

    if (tmp->events) {
      GList *k;

      for (k = g_list_last (tmp->events); k; k = k->prev)
        gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder),
            k->data);
      g_list_free (tmp->events);
      tmp->events = NULL;
    }

    if (tmp == frame)
      break;
  }

  if (frame->force_keyframe) {
    GstClockTime stream_time;
    GstClockTime running_time;
    GstEvent *ev;

    running_time =
        gst_segment_to_running_time (&GST_BASE_VIDEO_CODEC
        (base_video_encoder)->segment, GST_FORMAT_TIME,
        frame->presentation_timestamp);
    stream_time =
        gst_segment_to_stream_time (&GST_BASE_VIDEO_CODEC
        (base_video_encoder)->segment, GST_FORMAT_TIME,
        frame->presentation_timestamp);

    /* re-use upstream event if any so it also conveys any additional
     * info upstream arranged in there */
    GST_OBJECT_LOCK (base_video_encoder);
    if (base_video_encoder->force_keyunit_event) {
      ev = base_video_encoder->force_keyunit_event;
      base_video_encoder->force_keyunit_event = NULL;
    } else {
      ev = gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM,
          gst_structure_new ("GstForceKeyUnit", NULL));
    }
    GST_OBJECT_UNLOCK (base_video_encoder);

    gst_structure_set (ev->structure,
        "timestamp", G_TYPE_UINT64, frame->presentation_timestamp,
        "stream-time", G_TYPE_UINT64, stream_time,
        "running-time", G_TYPE_UINT64, running_time, NULL);

    gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder), ev);
  }

  /* no buffer data means this frame is skipped/dropped */
  if (!frame->src_buffer) {
    GST_DEBUG_OBJECT (base_video_encoder, "skipping frame %" GST_TIME_FORMAT,
        GST_TIME_ARGS (frame->presentation_timestamp));
    goto done;
  }

  if (frame->is_sync_point) {
    GST_LOG_OBJECT (base_video_encoder, "key frame");
    base_video_encoder->distance_from_sync = 0;
    GST_BUFFER_FLAG_UNSET (frame->src_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
  } else {
    GST_BUFFER_FLAG_SET (frame->src_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
  }

  frame->distance_from_sync = base_video_encoder->distance_from_sync;
  base_video_encoder->distance_from_sync++;

  frame->decode_frame_number = frame->system_frame_number - 1;
  if (frame->decode_frame_number < 0) {
    frame->decode_timestamp = 0;
  } else {
    frame->decode_timestamp = gst_util_uint64_scale (frame->decode_frame_number,
        GST_SECOND * GST_BASE_VIDEO_CODEC (base_video_encoder)->state.fps_d,
        GST_BASE_VIDEO_CODEC (base_video_encoder)->state.fps_n);
  }

  GST_BUFFER_TIMESTAMP (frame->src_buffer) = frame->presentation_timestamp;
  GST_BUFFER_DURATION (frame->src_buffer) = frame->presentation_duration;
  GST_BUFFER_OFFSET (frame->src_buffer) = frame->decode_timestamp;

  /* update rate estimate */
  GST_BASE_VIDEO_CODEC (base_video_encoder)->bytes +=
      GST_BUFFER_SIZE (frame->src_buffer);
  if (GST_CLOCK_TIME_IS_VALID (frame->presentation_duration)) {
    GST_BASE_VIDEO_CODEC (base_video_encoder)->time +=
        frame->presentation_duration;
  } else {
    /* better none than nothing valid */
    GST_BASE_VIDEO_CODEC (base_video_encoder)->time = GST_CLOCK_TIME_NONE;
  }

  if (G_UNLIKELY (GST_BASE_VIDEO_CODEC (base_video_encoder)->discont)) {
    GST_LOG_OBJECT (base_video_encoder, "marking discont");
    GST_BUFFER_FLAG_SET (frame->src_buffer, GST_BUFFER_FLAG_DISCONT);
    GST_BASE_VIDEO_CODEC (base_video_encoder)->discont = FALSE;
  }

  gst_buffer_set_caps (GST_BUFFER (frame->src_buffer),
      GST_PAD_CAPS (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder)));

  if (base_video_encoder_class->shape_output) {
    ret = base_video_encoder_class->shape_output (base_video_encoder, frame);
  } else {
    ret =
        gst_pad_push (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder),
        frame->src_buffer);
  }
  frame->src_buffer = NULL;

done:
  /* handed out */
  GST_BASE_VIDEO_CODEC (base_video_encoder)->frames =
      g_list_remove (GST_BASE_VIDEO_CODEC (base_video_encoder)->frames, frame);

  gst_base_video_codec_free_frame (frame);

  GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_encoder);

  return ret;
}
Example #11
0
static GstFlowReturn
gst_base_video_encoder_chain (GstPad * pad, GstBuffer * buf)
{
  GstBaseVideoEncoder *base_video_encoder;
  GstBaseVideoEncoderClass *klass;
  GstVideoFrame *frame;
  GstFlowReturn ret = GST_FLOW_OK;

  base_video_encoder = GST_BASE_VIDEO_ENCODER (gst_pad_get_parent (pad));
  klass = GST_BASE_VIDEO_ENCODER_GET_CLASS (base_video_encoder);

  g_return_val_if_fail (klass->handle_frame != NULL, GST_FLOW_ERROR);

  GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_encoder);

  if (!GST_PAD_CAPS (pad)) {
    ret = GST_FLOW_NOT_NEGOTIATED;
    goto done;
  }

  GST_LOG_OBJECT (base_video_encoder,
      "received buffer of size %d with ts %" GST_TIME_FORMAT
      ", duration %" GST_TIME_FORMAT, GST_BUFFER_SIZE (buf),
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
      GST_TIME_ARGS (GST_BUFFER_DURATION (buf)));

  if (base_video_encoder->a.at_eos) {
    ret = GST_FLOW_UNEXPECTED;
    goto done;
  }

  if (base_video_encoder->sink_clipping) {
    gint64 start = GST_BUFFER_TIMESTAMP (buf);
    gint64 stop = start + GST_BUFFER_DURATION (buf);
    gint64 clip_start;
    gint64 clip_stop;

    if (!gst_segment_clip (&GST_BASE_VIDEO_CODEC (base_video_encoder)->segment,
            GST_FORMAT_TIME, start, stop, &clip_start, &clip_stop)) {
      GST_DEBUG_OBJECT (base_video_encoder,
          "clipping to segment dropped frame");
      goto done;
    }
  }

  if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT))) {
    GST_LOG_OBJECT (base_video_encoder, "marked discont");
    GST_BASE_VIDEO_CODEC (base_video_encoder)->discont = TRUE;
  }

  frame =
      gst_base_video_codec_new_frame (GST_BASE_VIDEO_CODEC
      (base_video_encoder));
  frame->events = base_video_encoder->current_frame_events;
  base_video_encoder->current_frame_events = NULL;
  frame->sink_buffer = buf;
  frame->presentation_timestamp = GST_BUFFER_TIMESTAMP (buf);
  frame->presentation_duration = GST_BUFFER_DURATION (buf);
  frame->presentation_frame_number =
      base_video_encoder->presentation_frame_number;
  base_video_encoder->presentation_frame_number++;
  frame->force_keyframe = base_video_encoder->force_keyframe;
  base_video_encoder->force_keyframe = FALSE;

  GST_BASE_VIDEO_CODEC (base_video_encoder)->frames =
      g_list_append (GST_BASE_VIDEO_CODEC (base_video_encoder)->frames, frame);

  /* new data, more finish needed */
  base_video_encoder->drained = FALSE;

  GST_LOG_OBJECT (base_video_encoder, "passing frame pfn %d to subclass",
      frame->presentation_frame_number);

  ret = klass->handle_frame (base_video_encoder, frame);

done:
  GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_encoder);

  g_object_unref (base_video_encoder);

  return ret;
}
Example #12
0
static gboolean
gst_base_video_encoder_src_query (GstPad * pad, GstQuery * query)
{
  GstBaseVideoEncoder *enc;
  gboolean res;
  GstPad *peerpad;

  enc = GST_BASE_VIDEO_ENCODER (gst_pad_get_parent (pad));
  peerpad = gst_pad_get_peer (GST_BASE_VIDEO_CODEC_SINK_PAD (enc));

  GST_LOG_OBJECT (enc, "handling query: %" GST_PTR_FORMAT, query);

  switch (GST_QUERY_TYPE (query)) {
    case GST_QUERY_CONVERT:
    {
      GstBaseVideoCodec *codec = GST_BASE_VIDEO_CODEC (enc);
      GstFormat src_fmt, dest_fmt;
      gint64 src_val, dest_val;

      gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, &dest_val);
      res = gst_base_video_encoded_video_convert (&codec->state,
          codec->bytes, codec->time, src_fmt, src_val, &dest_fmt, &dest_val);
      if (!res)
        goto error;
      gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val);
      break;
    }
    case GST_QUERY_LATENCY:
    {
      gboolean live;
      GstClockTime min_latency, max_latency;

      res = gst_pad_query (peerpad, query);
      if (res) {
        gst_query_parse_latency (query, &live, &min_latency, &max_latency);
        GST_DEBUG_OBJECT (enc, "Peer latency: live %d, min %"
            GST_TIME_FORMAT " max %" GST_TIME_FORMAT, live,
            GST_TIME_ARGS (min_latency), GST_TIME_ARGS (max_latency));

        GST_OBJECT_LOCK (enc);
        min_latency += enc->min_latency;
        if (max_latency != GST_CLOCK_TIME_NONE) {
          max_latency += enc->max_latency;
        }
        GST_OBJECT_UNLOCK (enc);

        gst_query_set_latency (query, live, min_latency, max_latency);
      }
    }
      break;
    default:
      res = gst_pad_query_default (pad, query);
  }
  gst_object_unref (peerpad);
  gst_object_unref (enc);
  return res;

error:
  GST_DEBUG_OBJECT (enc, "query failed");
  gst_object_unref (peerpad);
  gst_object_unref (enc);
  return res;
}
Example #13
0
static gboolean
gst_base_video_encoder_sink_eventfunc (GstBaseVideoEncoder * base_video_encoder,
    GstEvent * event)
{
  GstBaseVideoEncoderClass *base_video_encoder_class;
  gboolean ret = FALSE;

  base_video_encoder_class =
      GST_BASE_VIDEO_ENCODER_GET_CLASS (base_video_encoder);

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_EOS:
    {
      GstFlowReturn flow_ret;

      GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_encoder);
      base_video_encoder->a.at_eos = TRUE;

      if (base_video_encoder_class->finish) {
        flow_ret = base_video_encoder_class->finish (base_video_encoder);
      } else {
        flow_ret = GST_FLOW_OK;
      }

      ret = (flow_ret == GST_BASE_VIDEO_ENCODER_FLOW_DROPPED);
      GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_encoder);
      break;
    }
    case GST_EVENT_NEWSEGMENT:
    {
      gboolean update;
      double rate;
      double applied_rate;
      GstFormat format;
      gint64 start;
      gint64 stop;
      gint64 position;

      GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_encoder);
      gst_event_parse_new_segment_full (event, &update, &rate, &applied_rate,
          &format, &start, &stop, &position);

      GST_DEBUG_OBJECT (base_video_encoder, "newseg rate %g, applied rate %g, "
          "format %d, start = %" GST_TIME_FORMAT ", stop = %" GST_TIME_FORMAT
          ", pos = %" GST_TIME_FORMAT, rate, applied_rate, format,
          GST_TIME_ARGS (start), GST_TIME_ARGS (stop),
          GST_TIME_ARGS (position));

      if (format != GST_FORMAT_TIME) {
        GST_DEBUG_OBJECT (base_video_encoder, "received non TIME newsegment");
        GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_encoder);
        break;
      }

      base_video_encoder->a.at_eos = FALSE;

      gst_segment_set_newsegment_full (&GST_BASE_VIDEO_CODEC
          (base_video_encoder)->segment, update, rate, applied_rate, format,
          start, stop, position);
      GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_encoder);
      break;
    }
    case GST_EVENT_CUSTOM_DOWNSTREAM:
    {
      const GstStructure *s;

      s = gst_event_get_structure (event);

      if (gst_structure_has_name (s, "GstForceKeyUnit")) {
        GST_OBJECT_LOCK (base_video_encoder);
        base_video_encoder->force_keyframe = TRUE;
        if (base_video_encoder->force_keyunit_event)
          gst_event_unref (base_video_encoder->force_keyunit_event);
        base_video_encoder->force_keyunit_event = gst_event_copy (event);
        GST_OBJECT_UNLOCK (base_video_encoder);
        gst_event_unref (event);
        ret = TRUE;
      }
      break;
    }
    default:
      break;
  }

  return ret;
}
Example #14
0
static gboolean
gst_base_video_encoder_sink_setcaps (GstPad * pad, GstCaps * caps)
{
  GstBaseVideoEncoder *base_video_encoder;
  GstBaseVideoEncoderClass *base_video_encoder_class;
  GstStructure *structure;
  GstVideoState *state, tmp_state;
  gboolean ret;
  gboolean changed = FALSE;

  base_video_encoder = GST_BASE_VIDEO_ENCODER (gst_pad_get_parent (pad));
  base_video_encoder_class =
      GST_BASE_VIDEO_ENCODER_GET_CLASS (base_video_encoder);

  /* subclass should do something here ... */
  g_return_val_if_fail (base_video_encoder_class->set_format != NULL, FALSE);

  GST_DEBUG_OBJECT (base_video_encoder, "setcaps %" GST_PTR_FORMAT, caps);

  GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_encoder);

  state = &GST_BASE_VIDEO_CODEC (base_video_encoder)->state;
  memset (&tmp_state, 0, sizeof (tmp_state));

  tmp_state.caps = gst_caps_ref (caps);
  structure = gst_caps_get_structure (caps, 0);

  ret =
      gst_video_format_parse_caps (caps, &tmp_state.format, &tmp_state.width,
      &tmp_state.height);
  if (!ret)
    goto exit;

  changed = (tmp_state.format != state->format
      || tmp_state.width != state->width || tmp_state.height != state->height);

  if (!gst_video_parse_caps_framerate (caps, &tmp_state.fps_n,
          &tmp_state.fps_d)) {
    tmp_state.fps_n = 0;
    tmp_state.fps_d = 1;
  }
  changed = changed || (tmp_state.fps_n != state->fps_n
      || tmp_state.fps_d != state->fps_d);

  if (!gst_video_parse_caps_pixel_aspect_ratio (caps, &tmp_state.par_n,
          &tmp_state.par_d)) {
    tmp_state.par_n = 1;
    tmp_state.par_d = 1;
  }
  changed = changed || (tmp_state.par_n != state->par_n
      || tmp_state.par_d != state->par_d);

  tmp_state.have_interlaced =
      gst_structure_get_boolean (structure, "interlaced",
      &tmp_state.interlaced);
  changed = changed || (tmp_state.have_interlaced != state->have_interlaced
      || tmp_state.interlaced != state->interlaced);

  tmp_state.bytes_per_picture =
      gst_video_format_get_size (tmp_state.format, tmp_state.width,
      tmp_state.height);
  tmp_state.clean_width = tmp_state.width;
  tmp_state.clean_height = tmp_state.height;
  tmp_state.clean_offset_left = 0;
  tmp_state.clean_offset_top = 0;

  if (changed) {
    /* arrange draining pending frames */
    gst_base_video_encoder_drain (base_video_encoder);

    /* and subclass should be ready to configure format at any time around */
    if (base_video_encoder_class->set_format)
      ret =
          base_video_encoder_class->set_format (base_video_encoder, &tmp_state);
    if (ret) {
      gst_caps_replace (&state->caps, NULL);
      *state = tmp_state;
    }
  } else {
    /* no need to stir things up */
    GST_DEBUG_OBJECT (base_video_encoder,
        "new video format identical to configured format");
    gst_caps_unref (tmp_state.caps);
    ret = TRUE;
  }

exit:
  GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_encoder);

  if (!ret) {
    GST_WARNING_OBJECT (base_video_encoder, "rejected caps %" GST_PTR_FORMAT,
        caps);
  }

  g_object_unref (base_video_encoder);

  return ret;
}