static GstFlowReturn gst_base_video_parse_chain (GstPad * pad, GstBuffer * buf) { GstBaseVideoParse *base_video_parse; GstBaseVideoParseClass *klass; GstBuffer *buffer; GstFlowReturn ret; GST_DEBUG ("chain with %d bytes", GST_BUFFER_SIZE (buf)); base_video_parse = GST_BASE_VIDEO_PARSE (GST_PAD_PARENT (pad)); klass = GST_BASE_VIDEO_PARSE_GET_CLASS (base_video_parse); if (!base_video_parse->started) { klass->start (base_video_parse); base_video_parse->started = TRUE; } if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT))) { GST_DEBUG_OBJECT (base_video_parse, "received DISCONT buffer"); gst_base_video_parse_reset (base_video_parse); base_video_parse->discont = TRUE; base_video_parse->have_sync = FALSE; } if (GST_BUFFER_TIMESTAMP (buf) != GST_CLOCK_TIME_NONE) { base_video_parse->last_timestamp = GST_BUFFER_TIMESTAMP (buf); } gst_adapter_push (base_video_parse->input_adapter, buf); if (!base_video_parse->have_sync) { int n, m; GST_DEBUG ("no sync, scanning"); n = gst_adapter_available (base_video_parse->input_adapter); m = klass->scan_for_sync (base_video_parse->input_adapter, FALSE, 0, n); gst_adapter_flush (base_video_parse->input_adapter, m); if (m < n) { GST_DEBUG ("found possible sync after %d bytes (of %d)", m, n); /* this is only "maybe" sync */ base_video_parse->have_sync = TRUE; } if (!base_video_parse->have_sync) { return GST_FLOW_OK; } } /* FIXME: use gst_adapter_prev_timestamp() here instead? */ buffer = gst_adapter_get_buffer (base_video_parse->input_adapter); gst_buffer_unref (buffer); /* FIXME check klass->parse_data */ do { ret = klass->parse_data (base_video_parse, FALSE); } while (ret == GST_FLOW_OK); if (ret == GST_BASE_VIDEO_PARSE_FLOW_NEED_DATA) { return GST_FLOW_OK; } return ret; }
GstFlowReturn gst_base_video_parse_finish_frame (GstBaseVideoParse * base_video_parse) { GstVideoFrame *frame = base_video_parse->current_frame; GstBuffer *buffer; GstBaseVideoParseClass *base_video_parse_class; GstFlowReturn ret; GST_DEBUG ("finish_frame"); base_video_parse_class = GST_BASE_VIDEO_PARSE_GET_CLASS (base_video_parse); buffer = gst_adapter_take_buffer (base_video_parse->output_adapter, gst_adapter_available (base_video_parse->output_adapter)); if (frame->is_sync_point) { base_video_parse->timestamp_offset = base_video_parse->last_timestamp - gst_util_uint64_scale (frame->presentation_frame_number, base_video_parse->state.fps_d * GST_SECOND, base_video_parse->state.fps_n); base_video_parse->distance_from_sync = 0; } frame->distance_from_sync = base_video_parse->distance_from_sync; base_video_parse->distance_from_sync++; frame->presentation_timestamp = gst_base_video_parse_get_timestamp (base_video_parse, frame->presentation_frame_number); frame->presentation_duration = gst_base_video_parse_get_timestamp (base_video_parse, frame->presentation_frame_number + 1) - frame->presentation_timestamp; frame->decode_timestamp = gst_base_video_parse_get_timestamp (base_video_parse, frame->decode_frame_number); GST_BUFFER_TIMESTAMP (buffer) = frame->presentation_timestamp; GST_BUFFER_DURATION (buffer) = frame->presentation_duration; if (frame->decode_frame_number < 0) { GST_BUFFER_OFFSET (buffer) = 0; } else { GST_BUFFER_OFFSET (buffer) = frame->decode_timestamp; } GST_BUFFER_OFFSET_END (buffer) = GST_CLOCK_TIME_NONE; GST_DEBUG ("pts %" GST_TIME_FORMAT, GST_TIME_ARGS (frame->presentation_timestamp)); GST_DEBUG ("dts %" GST_TIME_FORMAT, GST_TIME_ARGS (frame->decode_timestamp)); GST_DEBUG ("dist %d", frame->distance_from_sync); if (frame->is_sync_point) { GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DELTA_UNIT); } else { GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT); } frame->src_buffer = buffer; ret = base_video_parse_class->shape_output (base_video_parse, frame); gst_base_video_parse_free_frame (base_video_parse->current_frame); /* create new frame */ base_video_parse->current_frame = gst_base_video_parse_new_frame (base_video_parse); return ret; }