static void
gst_base_video_decoder_calculate_timestamps (GstBaseVideoDecoder *
    base_video_decoder, GstVideoFrame * frame,
    GstClockTime * presentation_timestamp, GstClockTime * presentation_duration)
{
  GST_DEBUG ("calculate timestamps sync=%d upstream timestamp: %"
      GST_TIME_FORMAT " parsed timestamp: %" GST_TIME_FORMAT,
      GST_VIDEO_FRAME_FLAG_IS_SET (frame, GST_VIDEO_FRAME_FLAG_SYNC_POINT),
      GST_TIME_ARGS (frame->upstream_timestamp),
      GST_TIME_ARGS (frame->parsed_timestamp));

  *presentation_timestamp = GST_CLOCK_TIME_NONE;
  *presentation_duration = GST_CLOCK_TIME_NONE;

  if (gst_base_video_decoder_check_timestamp (base_video_decoder,
          frame->upstream_timestamp)) {
    *presentation_timestamp = frame->upstream_timestamp;
    *presentation_duration = frame->upstream_duration;
  }

  else if (gst_base_video_decoder_check_timestamp (base_video_decoder,
          frame->parsed_timestamp))
    *presentation_timestamp = frame->parsed_timestamp;


  if (GST_CLOCK_TIME_IS_VALID (*presentation_timestamp)) {
    GST_DEBUG ("sync timestamp %" GST_TIME_FORMAT " diff %" GST_TIME_FORMAT,
        GST_TIME_ARGS (*presentation_timestamp),
        GST_TIME_ARGS (*presentation_timestamp -
            base_video_decoder->segment.start));
    base_video_decoder->timestamp_offset = *presentation_timestamp;
    base_video_decoder->field_index = 0;
  }

  else {
    if (GST_VIDEO_FRAME_FLAG_IS_SET (frame, GST_VIDEO_FRAME_FLAG_SYNC_POINT)) {
      GST_WARNING ("sync point doesn't have timestamp");
      if (!GST_CLOCK_TIME_IS_VALID (base_video_decoder->timestamp_offset)) {
        GST_WARNING
            ("No base timestamp.  Assuming frames start at segment start");
        base_video_decoder->timestamp_offset =
            base_video_decoder->segment.start;
        base_video_decoder->field_index = 0;
      }
    }

    *presentation_timestamp =
        gst_base_video_decoder_get_field_timestamp (base_video_decoder,
        base_video_decoder->field_index);
  }

  if (*presentation_duration == GST_CLOCK_TIME_NONE) {
    *presentation_duration =
        gst_base_video_decoder_get_field_duration (base_video_decoder,
        frame->n_fields);
  }

  base_video_decoder->field_index += frame->n_fields;
  base_video_decoder->last_timestamp = *presentation_timestamp;
}
GstFlowReturn
gst_base_video_decoder_have_frame (GstBaseVideoDecoder * base_video_decoder,
    gboolean include_current_buf, GstVideoFrame ** new_frame)
{
  GstVideoFrame *frame = base_video_decoder->current_frame;
  GstBaseVideoDecoderClass *klass;

  guint64 frame_end_offset;
  GstClockTime timestamp, duration;
  GstClockTime running_time;
  GstClockTimeDiff deadline;
  GstFlowReturn ret;

  klass = GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);

  if (include_current_buf)
    frame_end_offset = base_video_decoder->current_buf_offset;
  else
    frame_end_offset = base_video_decoder->prev_buf_offset;

  gst_base_video_decoder_get_timestamp_at_offset (base_video_decoder,
      frame_end_offset, &timestamp, &duration);

  frame->presentation_timestamp = timestamp;
  frame->presentation_duration = duration;

  if (GST_VIDEO_FRAME_FLAG_IS_SET (frame, GST_VIDEO_FRAME_FLAG_SYNC_POINT))
    base_video_decoder->distance_from_sync = 0;

  frame->distance_from_sync = base_video_decoder->distance_from_sync;
  base_video_decoder->distance_from_sync++;

  GST_DEBUG ("pts %" GST_TIME_FORMAT,
      GST_TIME_ARGS (frame->presentation_timestamp));
  GST_DEBUG ("dts %" GST_TIME_FORMAT, GST_TIME_ARGS (frame->decode_timestamp));
  GST_DEBUG ("dist %d", frame->distance_from_sync);

  running_time = gst_segment_to_running_time (&base_video_decoder->segment,
      GST_FORMAT_TIME, frame->presentation_timestamp);

  if (GST_CLOCK_TIME_IS_VALID (base_video_decoder->earliest_time))
    deadline = GST_CLOCK_DIFF (base_video_decoder->earliest_time, running_time);
  else
    deadline = G_MAXINT64;

  /* do something with frame */
  ret = klass->handle_frame (base_video_decoder, frame, deadline);
  if (!GST_FLOW_IS_SUCCESS (ret)) {
    GST_DEBUG ("flow error!");
  }

  /* create new frame */
  base_video_decoder->current_frame =
      gst_base_video_decoder_new_frame (base_video_decoder);

  if (new_frame)
    *new_frame = base_video_decoder->current_frame;

  return ret;
}
static GstFlowReturn
gst_vdp_h264_dec_parse_data (GstBaseVideoDecoder * base_video_decoder,
    GstBuffer * buf, gboolean at_eos, GstVideoFrame * frame)
{
  GstVdpH264Dec *h264_dec = GST_VDP_H264_DEC (base_video_decoder);
  GstBitReader reader;
  GstNalUnit nal_unit;
  guint8 forbidden_zero_bit;

  guint8 *data;
  guint size;
  gint i;

  GstFlowReturn ret = GST_FLOW_OK;

  GST_MEMDUMP ("data", GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf));

  gst_bit_reader_init_from_buffer (&reader, buf);

  if (gst_bit_reader_get_remaining (&reader) <
      h264_dec->nal_length_size * 8 + 7)
    goto invalid_packet;

  /* skip nal_length or sync code */
  gst_bit_reader_skip_unchecked (&reader, h264_dec->nal_length_size * 8);

  forbidden_zero_bit = gst_bit_reader_get_bits_uint8_unchecked (&reader, 1);

  if (forbidden_zero_bit != 0) {
    GST_WARNING ("forbidden_zero_bit != 0");
    return GST_FLOW_ERROR;
  }

  nal_unit.ref_idc = gst_bit_reader_get_bits_uint16_unchecked (&reader, 2);
  GST_DEBUG ("nal_ref_idc: %u", nal_unit.ref_idc);

  /* read nal_unit_type */
  nal_unit.type = gst_bit_reader_get_bits_uint16_unchecked (&reader, 5);

  GST_DEBUG ("nal_unit_type: %u", nal_unit.type);
  if (nal_unit.type == 14 || nal_unit.type == 20) {
    if (!gst_bit_reader_skip (&reader, 24))
      goto invalid_packet;
  }
  nal_unit.IdrPicFlag = (nal_unit.type == 5 ? 1 : 0);

  data = GST_BUFFER_DATA (buf) + gst_bit_reader_get_pos (&reader) / 8;
  size = gst_bit_reader_get_remaining (&reader) / 8;

  i = size - 1;
  while ((gint) size > 0 && data[i] == 0x00) {
    size--;
    i--;
  }

  if (GST_VIDEO_FRAME_FLAG_IS_SET (frame, GST_H264_FRAME_GOT_PRIMARY)) {
    if (nal_unit.type == GST_NAL_SPS || nal_unit.type == GST_NAL_PPS ||
        nal_unit.type == GST_NAL_SEI || nal_unit.type == GST_NAL_AU_DELIMITER ||
        (nal_unit.type >= 14 && nal_unit.type <= 18))
      ret =
          gst_base_video_decoder_have_frame (base_video_decoder, FALSE, &frame);
  }

  if (nal_unit.type >= GST_NAL_SLICE && nal_unit.type <= GST_NAL_SLICE_IDR) {
    GstH264Slice slice;

    if (!gst_h264_parser_parse_slice_header (h264_dec->parser, &slice, data,
            size, nal_unit))
      goto invalid_packet;

    if (slice.redundant_pic_cnt == 0) {
      if (GST_VIDEO_FRAME_FLAG_IS_SET (frame, GST_H264_FRAME_GOT_PRIMARY)) {
        GstH264Slice *p_slice;
        guint8 pic_order_cnt_type, p_pic_order_cnt_type;
        gboolean finish_frame = FALSE;

        p_slice = &(GST_H264_FRAME_CAST (frame)->slice_hdr);
        pic_order_cnt_type = slice.picture->sequence->pic_order_cnt_type;
        p_pic_order_cnt_type = p_slice->picture->sequence->pic_order_cnt_type;

        if (slice.frame_num != p_slice->frame_num)
          finish_frame = TRUE;

        else if (slice.picture != p_slice->picture)
          finish_frame = TRUE;

        else if (slice.bottom_field_flag != p_slice->bottom_field_flag)
          finish_frame = TRUE;

        else if (nal_unit.ref_idc != p_slice->nal_unit.ref_idc &&
            (nal_unit.ref_idc == 0 || p_slice->nal_unit.ref_idc == 0))
          finish_frame = TRUE;

        else if ((pic_order_cnt_type == 0 && p_pic_order_cnt_type == 0) &&
            (slice.pic_order_cnt_lsb != p_slice->pic_order_cnt_lsb ||
                slice.delta_pic_order_cnt_bottom !=
                p_slice->delta_pic_order_cnt_bottom))
          finish_frame = TRUE;

        else if ((p_pic_order_cnt_type == 1 && p_pic_order_cnt_type == 1) &&
            (slice.delta_pic_order_cnt[0] != p_slice->delta_pic_order_cnt[0] ||
                slice.delta_pic_order_cnt[1] !=
                p_slice->delta_pic_order_cnt[1]))
          finish_frame = TRUE;

        if (finish_frame)
          ret =
              gst_base_video_decoder_have_frame (base_video_decoder, FALSE,
              &frame);

      }

      if (!GST_VIDEO_FRAME_FLAG_IS_SET (frame, GST_H264_FRAME_GOT_PRIMARY)) {
        if (GST_H264_IS_I_SLICE (slice.type)
            || GST_H264_IS_SI_SLICE (slice.type))
          GST_VIDEO_FRAME_FLAG_SET (frame, GST_VIDEO_FRAME_FLAG_KEYFRAME);

        GST_H264_FRAME_CAST (frame)->slice_hdr = slice;
        GST_VIDEO_FRAME_FLAG_SET (frame, GST_H264_FRAME_GOT_PRIMARY);
      }
    }
    gst_h264_frame_add_slice ((GstH264Frame *) frame, buf);
  }

  if (nal_unit.type == GST_NAL_SPS) {
    if (!gst_h264_parser_parse_sequence (h264_dec->parser, data, size))
      goto invalid_packet;
  }

  if (nal_unit.type == GST_NAL_PPS) {
    if (!gst_h264_parser_parse_picture (h264_dec->parser, data, size))
      goto invalid_packet;
  }

  gst_buffer_unref (buf);
  return ret;

invalid_packet:
  GST_WARNING ("Invalid packet size!");
  gst_buffer_unref (buf);

  return GST_FLOW_OK;
}
GstFlowReturn
gst_base_video_decoder_finish_frame (GstBaseVideoDecoder * base_video_decoder,
    GstVideoFrame * frame)
{
  GstBaseVideoDecoderClass *base_video_decoder_class;
  GstBuffer *src_buffer;

  GST_DEBUG ("finish frame");

  base_video_decoder_class =
      GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);

  GST_DEBUG ("finish frame sync=%d pts=%" GST_TIME_FORMAT,
      GST_VIDEO_FRAME_FLAG_IS_SET (frame, GST_VIDEO_FRAME_FLAG_SYNC_POINT),
      GST_TIME_ARGS (frame->presentation_timestamp));

  if (GST_CLOCK_TIME_IS_VALID (frame->presentation_timestamp)) {
    if (frame->presentation_timestamp != base_video_decoder->timestamp_offset) {
      GST_DEBUG ("sync timestamp %" GST_TIME_FORMAT " diff %" GST_TIME_FORMAT,
          GST_TIME_ARGS (frame->presentation_timestamp),
          GST_TIME_ARGS (frame->presentation_timestamp -
              base_video_decoder->segment.start));
      base_video_decoder->timestamp_offset = frame->presentation_timestamp;
      base_video_decoder->field_index = 0;
    } else {
      /* This case is for one initial timestamp and no others, e.g.,
       * filesrc ! decoder ! xvimagesink */
      GST_WARNING ("sync timestamp didn't change, ignoring");
      frame->presentation_timestamp = GST_CLOCK_TIME_NONE;
    }
  } else {
    if (GST_VIDEO_FRAME_FLAG_IS_SET (frame, GST_VIDEO_FRAME_FLAG_SYNC_POINT)) {
      GST_WARNING ("sync point doesn't have timestamp");
      if (!GST_CLOCK_TIME_IS_VALID (base_video_decoder->timestamp_offset)) {
        GST_WARNING
            ("No base timestamp.  Assuming frames start at segment start");
        base_video_decoder->timestamp_offset =
            base_video_decoder->segment.start;
        base_video_decoder->field_index = 0;
      }
    }
  }
  frame->field_index = base_video_decoder->field_index;
  base_video_decoder->field_index += frame->n_fields;

  if (frame->presentation_timestamp == GST_CLOCK_TIME_NONE) {
    frame->presentation_timestamp =
        gst_base_video_decoder_get_field_timestamp (base_video_decoder,
        frame->field_index);
    frame->presentation_duration = GST_CLOCK_TIME_NONE;
    frame->decode_timestamp =
        gst_base_video_decoder_get_timestamp (base_video_decoder,
        frame->decode_frame_number);
  }
  if (frame->presentation_duration == GST_CLOCK_TIME_NONE) {
    frame->presentation_duration =
        gst_base_video_decoder_get_field_duration (base_video_decoder,
        frame->n_fields);
  }

  if (GST_CLOCK_TIME_IS_VALID (base_video_decoder->last_timestamp)) {
    if (frame->presentation_timestamp < base_video_decoder->last_timestamp) {
      GST_WARNING ("decreasing timestamp (%" GST_TIME_FORMAT " < %"
          GST_TIME_FORMAT ")", GST_TIME_ARGS (frame->presentation_timestamp),
          GST_TIME_ARGS (base_video_decoder->last_timestamp));
    }
  }
  base_video_decoder->last_timestamp = frame->presentation_timestamp;

  src_buffer = frame->src_buffer;

  GST_BUFFER_FLAG_UNSET (src_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
  if (base_video_decoder->state.interlaced) {
#ifndef GST_VIDEO_BUFFER_TFF
#define GST_VIDEO_BUFFER_TFF (GST_MINI_OBJECT_FLAG_LAST << 5)
#endif
#ifndef GST_VIDEO_BUFFER_RFF
#define GST_VIDEO_BUFFER_RFF (GST_MINI_OBJECT_FLAG_LAST << 6)
#endif
#ifndef GST_VIDEO_BUFFER_ONEFIELD
#define GST_VIDEO_BUFFER_ONEFIELD (GST_MINI_OBJECT_FLAG_LAST << 7)
#endif

    if (GST_VIDEO_FRAME_FLAG_IS_SET (frame, GST_VIDEO_FRAME_FLAG_TFF)) {
      GST_BUFFER_FLAG_SET (src_buffer, GST_VIDEO_BUFFER_TFF);
    } else {
      GST_BUFFER_FLAG_UNSET (src_buffer, GST_VIDEO_BUFFER_TFF);
    }
    GST_BUFFER_FLAG_UNSET (src_buffer, GST_VIDEO_BUFFER_RFF);
    GST_BUFFER_FLAG_UNSET (src_buffer, GST_VIDEO_BUFFER_ONEFIELD);
    if (frame->n_fields == 3) {
      GST_BUFFER_FLAG_SET (src_buffer, GST_VIDEO_BUFFER_RFF);
    } else if (frame->n_fields == 1) {
      GST_BUFFER_FLAG_SET (src_buffer, GST_VIDEO_BUFFER_ONEFIELD);
    }
  }
  if (base_video_decoder->discont) {
    GST_BUFFER_FLAG_UNSET (src_buffer, GST_BUFFER_FLAG_DISCONT);
    base_video_decoder->discont = FALSE;
  }

  GST_BUFFER_TIMESTAMP (src_buffer) = frame->presentation_timestamp;
  GST_BUFFER_DURATION (src_buffer) = frame->presentation_duration;
  GST_BUFFER_OFFSET (src_buffer) = GST_BUFFER_OFFSET_NONE;
  GST_BUFFER_OFFSET_END (src_buffer) = GST_BUFFER_OFFSET_NONE;

  GST_DEBUG ("pushing frame %" GST_TIME_FORMAT,
      GST_TIME_ARGS (frame->presentation_timestamp));

  gst_base_video_decoder_set_src_caps (base_video_decoder);

  if (base_video_decoder->sink_clipping) {
    gint64 start = GST_BUFFER_TIMESTAMP (src_buffer);
    gint64 stop = GST_BUFFER_TIMESTAMP (src_buffer) +
        GST_BUFFER_DURATION (src_buffer);

    if (gst_segment_clip (&base_video_decoder->segment, GST_FORMAT_TIME,
            start, stop, &start, &stop)) {
      GST_BUFFER_TIMESTAMP (src_buffer) = start;
      GST_BUFFER_DURATION (src_buffer) = stop - start;
      GST_DEBUG ("accepting buffer inside segment: %" GST_TIME_FORMAT
          " %" GST_TIME_FORMAT
          " seg %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT
          " time %" GST_TIME_FORMAT,
          GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (src_buffer)),
          GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (src_buffer) +
              GST_BUFFER_DURATION (src_buffer)),
          GST_TIME_ARGS (base_video_decoder->segment.start),
          GST_TIME_ARGS (base_video_decoder->segment.stop),
          GST_TIME_ARGS (base_video_decoder->segment.time));
    } else {
      GST_DEBUG ("dropping buffer outside segment: %" GST_TIME_FORMAT
          " %" GST_TIME_FORMAT
          " seg %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT
          " time %" GST_TIME_FORMAT,
          GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (src_buffer)),
          GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (src_buffer) +
              GST_BUFFER_DURATION (src_buffer)),
          GST_TIME_ARGS (base_video_decoder->segment.start),
          GST_TIME_ARGS (base_video_decoder->segment.stop),
          GST_TIME_ARGS (base_video_decoder->segment.time));
      gst_video_frame_unref (frame);
      return GST_FLOW_OK;
    }
  }

  gst_buffer_ref (src_buffer);
  gst_video_frame_unref (frame);

  if (base_video_decoder_class->shape_output)
    return base_video_decoder_class->shape_output (base_video_decoder,
        src_buffer);

  return gst_pad_push (GST_BASE_VIDEO_DECODER_SRC_PAD (base_video_decoder),
      src_buffer);
}
void
gst_base_video_decoder_skip_frame (GstBaseVideoDecoder * base_video_decoder,
    GstVideoFrame * frame)
{
  GstBaseVideoDecoderClass *base_video_decoder_class;

  GST_DEBUG ("skip frame");

  base_video_decoder_class =
      GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);

  GST_DEBUG ("skip frame sync=%d pts=%" GST_TIME_FORMAT,
      GST_VIDEO_FRAME_FLAG_IS_SET (frame, GST_VIDEO_FRAME_FLAG_SYNC_POINT),
      GST_TIME_ARGS (frame->presentation_timestamp));

  if (GST_CLOCK_TIME_IS_VALID (frame->presentation_timestamp)) {
    if (frame->presentation_timestamp != base_video_decoder->timestamp_offset) {
      GST_DEBUG ("sync timestamp %" GST_TIME_FORMAT " diff %" GST_TIME_FORMAT,
          GST_TIME_ARGS (frame->presentation_timestamp),
          GST_TIME_ARGS (frame->presentation_timestamp -
              base_video_decoder->segment.start));
      base_video_decoder->timestamp_offset = frame->presentation_timestamp;
      base_video_decoder->field_index = 0;
    } else {
      /* This case is for one initial timestamp and no others, e.g.,
       * filesrc ! decoder ! xvimagesink */
      GST_WARNING ("sync timestamp didn't change, ignoring");
      frame->presentation_timestamp = GST_CLOCK_TIME_NONE;
    }
  } else {
    if (GST_VIDEO_FRAME_FLAG_IS_SET (frame, GST_VIDEO_FRAME_FLAG_SYNC_POINT)) {
      GST_WARNING ("sync point doesn't have timestamp");
      if (GST_CLOCK_TIME_IS_VALID (base_video_decoder->timestamp_offset)) {
        GST_WARNING
            ("No base timestamp.  Assuming frames start at segment start");
        base_video_decoder->timestamp_offset =
            base_video_decoder->segment.start;
        base_video_decoder->field_index = 0;
      }
    }
  }
  frame->field_index = base_video_decoder->field_index;
  base_video_decoder->field_index += frame->n_fields;

  if (frame->presentation_timestamp == GST_CLOCK_TIME_NONE) {
    frame->presentation_timestamp =
        gst_base_video_decoder_get_field_timestamp (base_video_decoder,
        frame->field_index);
    frame->presentation_duration = GST_CLOCK_TIME_NONE;
    frame->decode_timestamp =
        gst_base_video_decoder_get_timestamp (base_video_decoder,
        frame->decode_frame_number);
  }
  if (frame->presentation_duration == GST_CLOCK_TIME_NONE) {
    frame->presentation_duration =
        gst_base_video_decoder_get_field_duration (base_video_decoder,
        frame->n_fields);
  }

  base_video_decoder->last_timestamp = frame->presentation_timestamp;

  GST_DEBUG ("skipping frame %" GST_TIME_FORMAT,
      GST_TIME_ARGS (frame->presentation_timestamp));

  gst_video_frame_unref (frame);
}
GstFlowReturn
gst_base_video_decoder_finish_frame (GstBaseVideoDecoder * base_video_decoder,
    GstVideoFrame * frame)
{
  GstBaseVideoDecoderClass *base_video_decoder_class;

  GstClockTime presentation_timestamp;
  GstClockTime presentation_duration;

  GstBuffer *src_buffer;

  GST_DEBUG ("finish frame");

  base_video_decoder_class =
      GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);

  gst_base_video_decoder_calculate_timestamps (base_video_decoder, frame,
      &presentation_timestamp, &presentation_duration);

  src_buffer = frame->src_buffer;

  GST_BUFFER_FLAG_UNSET (src_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
  if (base_video_decoder->state.interlaced) {
#ifndef GST_VIDEO_BUFFER_TFF
#define GST_VIDEO_BUFFER_TFF (GST_MINI_OBJECT_FLAG_LAST << 5)
#endif
#ifndef GST_VIDEO_BUFFER_RFF
#define GST_VIDEO_BUFFER_RFF (GST_MINI_OBJECT_FLAG_LAST << 6)
#endif
#ifndef GST_VIDEO_BUFFER_ONEFIELD
#define GST_VIDEO_BUFFER_ONEFIELD (GST_MINI_OBJECT_FLAG_LAST << 7)
#endif

    if (GST_VIDEO_FRAME_FLAG_IS_SET (frame, GST_VIDEO_FRAME_FLAG_TFF)) {
      GST_BUFFER_FLAG_SET (src_buffer, GST_VIDEO_BUFFER_TFF);
    } else {
      GST_BUFFER_FLAG_UNSET (src_buffer, GST_VIDEO_BUFFER_TFF);
    }
    GST_BUFFER_FLAG_UNSET (src_buffer, GST_VIDEO_BUFFER_RFF);
    GST_BUFFER_FLAG_UNSET (src_buffer, GST_VIDEO_BUFFER_ONEFIELD);
    if (frame->n_fields == 3) {
      GST_BUFFER_FLAG_SET (src_buffer, GST_VIDEO_BUFFER_RFF);
    } else if (frame->n_fields == 1) {
      GST_BUFFER_FLAG_SET (src_buffer, GST_VIDEO_BUFFER_ONEFIELD);
    }
  }
  if (base_video_decoder->discont) {
    GST_BUFFER_FLAG_UNSET (src_buffer, GST_BUFFER_FLAG_DISCONT);
    base_video_decoder->discont = FALSE;
  }

  GST_BUFFER_TIMESTAMP (src_buffer) = presentation_timestamp;
  GST_BUFFER_DURATION (src_buffer) = presentation_duration;
  GST_BUFFER_OFFSET (src_buffer) = GST_BUFFER_OFFSET_NONE;
  GST_BUFFER_OFFSET_END (src_buffer) = GST_BUFFER_OFFSET_NONE;

  GST_DEBUG ("pushing frame %" GST_TIME_FORMAT,
      GST_TIME_ARGS (presentation_timestamp));

  gst_base_video_decoder_set_src_caps (base_video_decoder);

  if (base_video_decoder->sink_clipping) {
    gint64 start = GST_BUFFER_TIMESTAMP (src_buffer);
    gint64 stop = GST_BUFFER_TIMESTAMP (src_buffer) +
        GST_BUFFER_DURATION (src_buffer);

    if (gst_segment_clip (&base_video_decoder->segment, GST_FORMAT_TIME,
            start, stop, &start, &stop)) {
      GST_BUFFER_TIMESTAMP (src_buffer) = start;
      GST_BUFFER_DURATION (src_buffer) = stop - start;
      GST_DEBUG ("accepting buffer inside segment: %" GST_TIME_FORMAT
          " %" GST_TIME_FORMAT
          " seg %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT
          " time %" GST_TIME_FORMAT,
          GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (src_buffer)),
          GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (src_buffer) +
              GST_BUFFER_DURATION (src_buffer)),
          GST_TIME_ARGS (base_video_decoder->segment.start),
          GST_TIME_ARGS (base_video_decoder->segment.stop),
          GST_TIME_ARGS (base_video_decoder->segment.time));
    } else {
      GST_DEBUG ("dropping buffer outside segment: %" GST_TIME_FORMAT
          " %" GST_TIME_FORMAT
          " seg %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT
          " time %" GST_TIME_FORMAT,
          GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (src_buffer)),
          GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (src_buffer) +
              GST_BUFFER_DURATION (src_buffer)),
          GST_TIME_ARGS (base_video_decoder->segment.start),
          GST_TIME_ARGS (base_video_decoder->segment.stop),
          GST_TIME_ARGS (base_video_decoder->segment.time));
      gst_video_frame_unref (frame);
      return GST_FLOW_OK;
    }
  }

  gst_buffer_ref (src_buffer);
  gst_video_frame_unref (frame);

  if (base_video_decoder_class->shape_output)
    return base_video_decoder_class->shape_output (base_video_decoder,
        src_buffer);

  return gst_pad_push (GST_BASE_VIDEO_DECODER_SRC_PAD (base_video_decoder),
      src_buffer);
}