예제 #1
0
GstFlowReturn
gst_base_video_decoder_have_frame (GstBaseVideoDecoder * base_video_decoder,
    gboolean include_current_buf, GstVideoFrame ** new_frame)
{
  GstVideoFrame *frame = base_video_decoder->current_frame;
  GstBaseVideoDecoderClass *klass;

  guint64 frame_end_offset;
  GstClockTime timestamp, duration;
  GstClockTime running_time;
  GstClockTimeDiff deadline;
  GstFlowReturn ret;

  klass = GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);

  if (include_current_buf)
    frame_end_offset = base_video_decoder->current_buf_offset;
  else
    frame_end_offset = base_video_decoder->prev_buf_offset;

  gst_base_video_decoder_get_timestamp_at_offset (base_video_decoder,
      frame_end_offset, &timestamp, &duration);

  frame->presentation_timestamp = timestamp;
  frame->presentation_duration = duration;

  if (GST_VIDEO_FRAME_FLAG_IS_SET (frame, GST_VIDEO_FRAME_FLAG_SYNC_POINT))
    base_video_decoder->distance_from_sync = 0;

  frame->distance_from_sync = base_video_decoder->distance_from_sync;
  base_video_decoder->distance_from_sync++;

  GST_DEBUG ("pts %" GST_TIME_FORMAT,
      GST_TIME_ARGS (frame->presentation_timestamp));
  GST_DEBUG ("dts %" GST_TIME_FORMAT, GST_TIME_ARGS (frame->decode_timestamp));
  GST_DEBUG ("dist %d", frame->distance_from_sync);

  running_time = gst_segment_to_running_time (&base_video_decoder->segment,
      GST_FORMAT_TIME, frame->presentation_timestamp);

  if (GST_CLOCK_TIME_IS_VALID (base_video_decoder->earliest_time))
    deadline = GST_CLOCK_DIFF (base_video_decoder->earliest_time, running_time);
  else
    deadline = G_MAXINT64;

  /* do something with frame */
  ret = klass->handle_frame (base_video_decoder, frame, deadline);
  if (!GST_FLOW_IS_SUCCESS (ret)) {
    GST_DEBUG ("flow error!");
  }

  /* create new frame */
  base_video_decoder->current_frame =
      gst_base_video_decoder_new_frame (base_video_decoder);

  if (new_frame)
    *new_frame = base_video_decoder->current_frame;

  return ret;
}
예제 #2
0
static GstFlowReturn
gst_base_video_decoder_have_frame_2 (GstBaseVideoDecoder * base_video_decoder)
{
  GstVideoFrame *frame = base_video_decoder->current_frame;
  GstBaseVideoDecoderClass *base_video_decoder_class;
  GstFlowReturn ret = GST_FLOW_OK;
  GstClockTime running_time;
  GstClockTimeDiff deadline;

  base_video_decoder_class =
      GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);

  frame->distance_from_sync = base_video_decoder->distance_from_sync;
  base_video_decoder->distance_from_sync++;

  frame->presentation_timestamp = GST_BUFFER_TIMESTAMP (frame->sink_buffer);
  frame->presentation_duration = GST_BUFFER_DURATION (frame->sink_buffer);

  GST_DEBUG ("pts %" GST_TIME_FORMAT,
      GST_TIME_ARGS (frame->presentation_timestamp));
  GST_DEBUG ("dts %" GST_TIME_FORMAT, GST_TIME_ARGS (frame->decode_timestamp));
  GST_DEBUG ("dist %d", frame->distance_from_sync);

  base_video_decoder->frames = g_list_append (base_video_decoder->frames,
      frame);

  running_time = gst_segment_to_running_time (&base_video_decoder->segment,
      GST_FORMAT_TIME, frame->presentation_timestamp);

  if (GST_CLOCK_TIME_IS_VALID (base_video_decoder->earliest_time))
    deadline = GST_CLOCK_DIFF (base_video_decoder->earliest_time, running_time);
  else
    deadline = G_MAXINT64;

  /* do something with frame */
  ret = base_video_decoder_class->handle_frame (base_video_decoder, frame,
      deadline);
  if (!GST_FLOW_IS_SUCCESS (ret)) {
    GST_DEBUG ("flow error!");
  }

  /* create new frame */
  base_video_decoder->current_frame =
      gst_base_video_decoder_new_frame (base_video_decoder);

  return ret;
}
예제 #3
0
bool
MediaParserGst::pushGstBuffer()
{
    GstBuffer* buffer = gst_buffer_new_and_alloc(PUSHBUF_SIZE);

    std::streamoff ret = _stream->read(GST_BUFFER_DATA(buffer), PUSHBUF_SIZE);

    if (ret == 0) {
        if (!_stream->eof()) {
            log_error(_("MediaParserGst failed to read the stream, but did not "
                      "reach EOF!"));
        } else {
            _parsingComplete = true;
        }
        gst_buffer_unref(buffer);
        return false;
    }

    if (ret < PUSHBUF_SIZE) {
        if (!_stream->eof()) {
            log_error(_("MediaParserGst failed to read the stream, but did not "
                      "reach EOF!"));
        } else {
            _parsingComplete = true;
        }       

        GST_BUFFER_SIZE(buffer) = ret;
    }

    GstFlowReturn rv = gst_pad_push (_srcpad, buffer);
    if (!GST_FLOW_IS_SUCCESS (rv)) {
        log_error(_("MediaParserGst failed to push more data into the demuxer! "
                    "Seeking back."));
        _stream->seek(_stream->tell() - ret);
        return false;
    }
    
    return true;
}
예제 #4
0
GstFlowReturn
gst_base_video_decoder_have_frame (GstBaseVideoDecoder * base_video_decoder)
{
  GstVideoFrame *frame = base_video_decoder->current_frame;
  GstBuffer *buffer;
  GstBaseVideoDecoderClass *base_video_decoder_class;
  GstFlowReturn ret = GST_FLOW_OK;
  int n_available;

  GST_DEBUG ("have_frame");

  base_video_decoder_class =
      GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);

  n_available = gst_adapter_available (base_video_decoder->output_adapter);
  if (n_available) {
    buffer = gst_adapter_take_buffer (base_video_decoder->output_adapter,
        n_available);
  } else {
    buffer = gst_buffer_new_and_alloc (0);
  }

  frame->distance_from_sync = base_video_decoder->distance_from_sync;
  base_video_decoder->distance_from_sync++;

#if 0
  if (frame->presentation_timestamp == GST_CLOCK_TIME_NONE) {
    frame->presentation_timestamp =
        gst_base_video_decoder_get_timestamp (base_video_decoder,
        frame->presentation_frame_number);
    frame->presentation_duration =
        gst_base_video_decoder_get_timestamp (base_video_decoder,
        frame->presentation_frame_number + 1) - frame->presentation_timestamp;
    frame->decode_timestamp =
        gst_base_video_decoder_get_timestamp (base_video_decoder,
        frame->decode_frame_number);
  }
#endif

#if 0
  GST_BUFFER_TIMESTAMP (buffer) = frame->presentation_timestamp;
  GST_BUFFER_DURATION (buffer) = frame->presentation_duration;
  if (frame->decode_frame_number < 0) {
    GST_BUFFER_OFFSET (buffer) = 0;
  } else {
    GST_BUFFER_OFFSET (buffer) = frame->decode_timestamp;
  }
  GST_BUFFER_OFFSET_END (buffer) = GST_CLOCK_TIME_NONE;
#endif

  GST_DEBUG ("pts %" GST_TIME_FORMAT,
      GST_TIME_ARGS (frame->presentation_timestamp));
  GST_DEBUG ("dts %" GST_TIME_FORMAT, GST_TIME_ARGS (frame->decode_timestamp));
  GST_DEBUG ("dist %d", frame->distance_from_sync);

  if (frame->is_sync_point) {
    GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DELTA_UNIT);
  } else {
    GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT);
  }
  if (base_video_decoder->discont) {
    GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT);
    base_video_decoder->discont = FALSE;
  }

  frame->sink_buffer = buffer;

  base_video_decoder->frames = g_list_append (base_video_decoder->frames,
      frame);

  /* do something with frame */
  ret = base_video_decoder_class->handle_frame (base_video_decoder, frame);
  if (!GST_FLOW_IS_SUCCESS (ret)) {
    GST_DEBUG ("flow error!");
  }

  /* create new frame */
  base_video_decoder->current_frame =
      gst_base_video_decoder_new_frame (base_video_decoder);

  return ret;
}