static GstFlowReturn
gst_mpegv_parse_pre_push_frame (GstBaseParse * parse, GstBaseParseFrame * frame)
{
  GstMpegvParse *mpvparse = GST_MPEGVIDEO_PARSE (parse);
  GstTagList *taglist;

  /* tag sending done late enough in hook to ensure pending events
   * have already been sent */

  if (G_UNLIKELY (mpvparse->send_codec_tag)) {
    gchar *codec;

    /* codec tag */
    codec =
        g_strdup_printf ("MPEG %d Video",
        (mpvparse->config_flags & FLAG_MPEG2) ? 2 : 1);
    taglist = gst_tag_list_new (GST_TAG_VIDEO_CODEC, codec, NULL);
    g_free (codec);

    gst_pad_push_event (GST_BASE_PARSE_SRC_PAD (mpvparse),
        gst_event_new_tag (taglist));

    mpvparse->send_codec_tag = FALSE;
  }

  /* usual clipping applies */
  frame->flags |= GST_BASE_PARSE_FRAME_FLAG_CLIP;

  return GST_FLOW_OK;
}
示例#2
0
static gboolean
gst_mpegv_parse_set_caps (GstBaseParse * parse, GstCaps * caps)
{
  GstMpegvParse *mpvparse = GST_MPEGVIDEO_PARSE (parse);
  GstStructure *s;
  const GValue *value;
  GstBuffer *buf;

  GST_DEBUG_OBJECT (parse, "setcaps called with %" GST_PTR_FORMAT, caps);

  s = gst_caps_get_structure (caps, 0);

  if ((value = gst_structure_get_value (s, "codec_data")) != NULL
      && (buf = gst_value_get_buffer (value))) {
    GstMapInfo map;
    gst_buffer_map (buf, &map, GST_MAP_READ);
    /* best possible parse attempt,
     * src caps are based on sink caps so it will end up in there
     * whether sucessful or not */
    mpvparse->seq_offset = 4;
    gst_mpegv_parse_process_config (mpvparse, &map, gst_buffer_get_size (buf));
    gst_buffer_unmap (buf, &map);
    gst_mpegv_parse_reset_frame (mpvparse);
  }

  /* let's not interfere and accept regardless of config parsing success */
  return TRUE;
}
static GstFlowReturn
gst_mpegv_parse_parse_frame (GstBaseParse * parse, GstBaseParseFrame * frame)
{
  GstMpegvParse *mpvparse = GST_MPEGVIDEO_PARSE (parse);
  GstBuffer *buffer = frame->buffer;

  gst_mpegv_parse_update_src_caps (mpvparse);

  if (G_UNLIKELY (mpvparse->pichdr.pic_type == GST_MPEG_VIDEO_PICTURE_TYPE_I))
    GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DELTA_UNIT);
  else
    GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT);

  /* maybe only sequence in this buffer, though not recommended,
   * so mark it as such and force 0 duration */
  if (G_UNLIKELY (mpvparse->pic_offset < 0)) {
    GST_DEBUG_OBJECT (mpvparse, "frame holds no picture data");
    frame->flags |= GST_BASE_PARSE_FRAME_FLAG_NO_FRAME;
    GST_BUFFER_DURATION (buffer) = 0;
  }

  if (mpvparse->frame_repeat_count
      && GST_CLOCK_TIME_IS_VALID (GST_BUFFER_DURATION (buffer))) {
    GST_BUFFER_DURATION (buffer) =
        (1 + mpvparse->frame_repeat_count) * GST_BUFFER_DURATION (buffer) / 2;
  }

  if (G_UNLIKELY (mpvparse->drop && !mpvparse->config)) {
    GST_DEBUG_OBJECT (mpvparse, "dropping frame as no config yet");
    return GST_BASE_PARSE_FLOW_DROPPED;
  } else
    return GST_FLOW_OK;
}
static gboolean
gst_mpegv_parse_stop (GstBaseParse * parse)
{
  GstMpegvParse *mpvparse = GST_MPEGVIDEO_PARSE (parse);

  GST_DEBUG_OBJECT (parse, "stop");

  gst_mpegv_parse_reset (mpvparse);

  return TRUE;
}
static gboolean
gst_mpegv_parse_start (GstBaseParse * parse)
{
  GstMpegvParse *mpvparse = GST_MPEGVIDEO_PARSE (parse);

  GST_DEBUG_OBJECT (parse, "start");

  gst_mpegv_parse_reset (mpvparse);
  /* at least this much for a valid frame */
  gst_base_parse_set_min_frame_size (parse, 6);

  return TRUE;
}
static void
gst_mpegv_parse_set_property (GObject * object, guint property_id,
    const GValue * value, GParamSpec * pspec)
{
  GstMpegvParse *parse = GST_MPEGVIDEO_PARSE (object);

  switch (property_id) {
    case PROP_DROP:
      parse->drop = g_value_get_boolean (value);
      break;
    case PROP_GOP_SPLIT:
      parse->gop_split = g_value_get_boolean (value);
      break;
    default:
      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
  }
}
示例#7
0
static gboolean
gst_mpegv_parse_sink_query (GstBaseParse * parse, GstQuery * query)
{
  gboolean res;
  GstMpegvParse *mpvparse = GST_MPEGVIDEO_PARSE (parse);

  res = GST_BASE_PARSE_CLASS (parent_class)->sink_query (parse, query);

  if (res && GST_QUERY_TYPE (query) == GST_QUERY_ALLOCATION) {
    mpvparse->send_mpeg_meta =
        gst_query_find_allocation_meta (query, GST_MPEG_VIDEO_META_API_TYPE,
        NULL);

    GST_DEBUG_OBJECT (parse, "Downstream can handle GstMpegVideo GstMeta : %d",
        mpvparse->send_mpeg_meta);
  }

  return res;
}
static GstFlowReturn
gst_mpegv_parse_handle_frame (GstBaseParse * parse,
    GstBaseParseFrame * frame, gint * skipsize)
{
  GstMpegvParse *mpvparse = GST_MPEGVIDEO_PARSE (parse);
  GstBuffer *buf = frame->buffer;
  gboolean ret = FALSE;
  gint off = 0;
  GstMpegVideoPacket packet;
  guint8 *data;
  gint size;
  GstMapInfo map;

  update_frame_parsing_status (mpvparse, frame);

  gst_buffer_map (buf, &map, GST_MAP_READ);
  data = map.data;
  size = map.size;

retry:
  /* at least start code and subsequent byte */
  if (G_UNLIKELY (size < 5 + off))
    goto exit;

  /* if already found a previous start code, e.g. start of frame, go for next */
  if (mpvparse->last_sc >= 0) {
    off = packet.offset = mpvparse->last_sc;
    packet.size = 0;
    goto next;
  }

  if (!gst_mpeg_video_parse (&packet, data, size, off)) {
    /* didn't find anything that looks like a sync word, skip */
    GST_LOG_OBJECT (mpvparse, "no start code found");
    *skipsize = size - 3;
    goto exit;
  }

  off = packet.offset - 4;
  GST_LOG_OBJECT (mpvparse, "possible sync at buffer offset %d", off);

  /* possible frame header, but not at offset 0? skip bytes before sync */
  if (G_UNLIKELY (off > 0)) {
    *skipsize = off;
    goto exit;
  }

  /* note: initial start code is assumed at offset 0 by subsequent code */

  /* examine start code, see if it looks like an initial start code */
  if (gst_mpegv_parse_process_sc (mpvparse, buf, 4, packet.type)) {
    /* found sc */
    GST_LOG_OBJECT (mpvparse, "valid start code found");
    mpvparse->last_sc = 0;
  } else {
    off++;
    gst_mpegv_parse_reset_frame (mpvparse);
    GST_LOG_OBJECT (mpvparse, "invalid start code");
    goto retry;
  }

next:
  /* start is fine as of now */
  *skipsize = 0;
  /* terminating start code may have been found in prev scan already */
  if (((gint) packet.size) >= 0) {
    off = packet.offset + packet.size;
    /* so now we have start code at start of data; locate next start code */
    if (!gst_mpeg_video_parse (&packet, data, size, off)) {
      off = -1;
    } else {
      g_assert (packet.offset >= 4);
      off = packet.offset - 4;
    }
  } else {
    off = -1;
  }

  GST_LOG_OBJECT (mpvparse, "next start code at %d", off);
  if (off < 0) {
    /* if draining, take all */
    if (GST_BASE_PARSE_DRAINING (parse)) {
      GST_LOG_OBJECT (mpvparse, "draining, accepting all data");
      off = size;
      ret = TRUE;
    } else {
      GST_LOG_OBJECT (mpvparse, "need more data");
      /* resume scan where we left it */
      mpvparse->last_sc = size - 3;
      /* request best next available */
      off = G_MAXUINT;
      goto exit;
    }
  } else {
    /* decide whether this startcode ends a frame */
    ret = gst_mpegv_parse_process_sc (mpvparse, buf, off + 4, packet.type);
  }

  if (!ret)
    goto next;

exit:
  gst_buffer_unmap (buf, &map);

  if (ret) {
    GstFlowReturn res;

    *skipsize = 0;
    g_assert (off <= map.size);
    res = gst_mpegv_parse_parse_frame (parse, frame);
    if (res == GST_BASE_PARSE_FLOW_DROPPED)
      frame->flags |= GST_BASE_PARSE_FRAME_FLAG_DROP;
    return gst_base_parse_finish_frame (parse, frame, off);
  }

  return GST_FLOW_OK;
}
示例#9
0
static GstFlowReturn
gst_mpegv_parse_pre_push_frame (GstBaseParse * parse, GstBaseParseFrame * frame)
{
  GstMpegvParse *mpvparse = GST_MPEGVIDEO_PARSE (parse);
  GstTagList *taglist;
  GstMpegVideoMeta *meta;
  GstMpegVideoSequenceHdr *seq_hdr = NULL;
  GstMpegVideoSequenceExt *seq_ext = NULL;
  GstMpegVideoSequenceDisplayExt *disp_ext = NULL;
  GstMpegVideoPictureHdr *pic_hdr = NULL;
  GstMpegVideoPictureExt *pic_ext = NULL;
  GstMpegVideoQuantMatrixExt *quant_ext = NULL;

  /* tag sending done late enough in hook to ensure pending events
   * have already been sent */

  if (G_UNLIKELY (mpvparse->send_codec_tag)) {
    gchar *codec;

    /* codec tag */
    codec =
        g_strdup_printf ("MPEG %d Video",
        (mpvparse->config_flags & FLAG_MPEG2) ? 2 : 1);
    taglist = gst_tag_list_new (GST_TAG_VIDEO_CODEC, codec, NULL);
    g_free (codec);

    gst_pad_push_event (GST_BASE_PARSE_SRC_PAD (mpvparse),
        gst_event_new_tag (taglist));

    mpvparse->send_codec_tag = FALSE;
  }

  /* usual clipping applies */
  frame->flags |= GST_BASE_PARSE_FRAME_FLAG_CLIP;

  if (mpvparse->send_mpeg_meta) {
    GstBuffer *buf;

    if (mpvparse->seqhdr_updated)
      seq_hdr = &mpvparse->sequencehdr;
    if (mpvparse->seqext_updated)
      seq_ext = &mpvparse->sequenceext;
    if (mpvparse->seqdispext_updated)
      disp_ext = &mpvparse->sequencedispext;
    if (mpvparse->picext_updated)
      pic_ext = &mpvparse->picext;
    if (mpvparse->quantmatrext_updated)
      quant_ext = &mpvparse->quantmatrext;
    pic_hdr = &mpvparse->pichdr;

    GST_DEBUG_OBJECT (mpvparse,
        "Adding GstMpegVideoMeta (slice_count:%d, slice_offset:%d)",
        mpvparse->slice_count, mpvparse->slice_offset);

    if (frame->out_buffer) {
      buf = frame->out_buffer = gst_buffer_make_writable (frame->out_buffer);
    } else {
      buf = frame->buffer = gst_buffer_make_writable (frame->buffer);
    }

    meta =
        gst_buffer_add_mpeg_video_meta (buf, seq_hdr, seq_ext, disp_ext,
        pic_hdr, pic_ext, quant_ext);
    meta->num_slices = mpvparse->slice_count;
    meta->slice_offset = mpvparse->slice_offset;
  }
  return GST_FLOW_OK;
}