Beispiel #1
0
gboolean
mpeg_util_parse_picture_hdr (MPEGPictureHdr * hdr, GstBuffer * buffer)
{
  GstBitReader reader = GST_BIT_READER_INIT_FROM_BUFFER (buffer);

  /* skip sync word */
  if (!gst_bit_reader_skip (&reader, 8 * 4))
    return FALSE;

  /* temperal sequence number */
  if (!gst_bit_reader_get_bits_uint16 (&reader, &hdr->tsn, 10))
    return FALSE;

  /* frame type */
  if (!gst_bit_reader_get_bits_uint8 (&reader, &hdr->pic_type, 3))
    return FALSE;

  if (hdr->pic_type == 0 || hdr->pic_type > 4)
    return FALSE;               /* Corrupted picture packet */

  /* VBV delay */
  if (!gst_bit_reader_get_bits_uint16 (&reader, &hdr->vbv_delay, 16))
    return FALSE;

  if (hdr->pic_type == P_FRAME || hdr->pic_type == B_FRAME) {

    READ_UINT8 (&reader, hdr->full_pel_forward_vector, 1);

    READ_UINT8 (&reader, hdr->f_code[0][0], 3);
    hdr->f_code[0][1] = hdr->f_code[0][0];
  } else {
    hdr->full_pel_forward_vector = 0;
    hdr->f_code[0][0] = hdr->f_code[0][1] = 0;
  }

  if (hdr->pic_type == B_FRAME) {
    READ_UINT8 (&reader, hdr->full_pel_backward_vector, 1);

    READ_UINT8 (&reader, hdr->f_code[1][0], 3);
    hdr->f_code[1][1] = hdr->f_code[1][0];
  } else {
    hdr->full_pel_backward_vector = 0;
    hdr->f_code[1][0] = hdr->f_code[1][1] = 0;
  }

  return TRUE;

error:
  GST_WARNING ("error parsing \"Picture Header\"");
  return FALSE;
}
static GstFlowReturn
gst_vdp_mpeg_dec_parse_data (GstBaseVideoDecoder * base_video_decoder,
    GstBuffer * buf, gboolean at_eos, GstVideoFrame * frame)
{
  GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (base_video_decoder);

  GstVdpMpegFrame *mpeg_frame;
  GstFlowReturn ret = GST_FLOW_OK;
  GstBitReader b_reader = GST_BIT_READER_INIT_FROM_BUFFER (buf);
  guint8 start_code;

  if (gst_bit_reader_get_remaining (&b_reader) < 8 * 3 + 8)
    return GST_FLOW_ERROR;

  /* skip sync_code */
  gst_bit_reader_skip_unchecked (&b_reader, 8 * 3);

  /* start_code */
  start_code = gst_bit_reader_get_bits_uint8_unchecked (&b_reader, 8);

  mpeg_frame = GST_VDP_MPEG_FRAME_CAST (frame);

  if (start_code >= MPEG_PACKET_SLICE_MIN
      && start_code <= MPEG_PACKET_SLICE_MAX) {
    GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_SLICE");

    gst_vdp_mpeg_frame_add_slice (mpeg_frame, buf);
    goto done;
  }

  switch (start_code) {
    case MPEG_PACKET_SEQUENCE:
      GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_SEQUENCE");

      if (mpeg_dec->prev_packet != -1)
        ret = gst_base_video_decoder_have_frame (base_video_decoder, FALSE,
            (GstVideoFrame **) & mpeg_frame);

      mpeg_frame->seq = buf;
      break;

    case MPEG_PACKET_PICTURE:
      GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_PICTURE");

      if (mpeg_dec->prev_packet != MPEG_PACKET_SEQUENCE &&
          mpeg_dec->prev_packet != MPEG_PACKET_GOP)
        ret = gst_base_video_decoder_have_frame (base_video_decoder, FALSE,
            (GstVideoFrame **) & mpeg_frame);

      mpeg_frame->pic = buf;
      break;

    case MPEG_PACKET_GOP:
      GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_GOP");

      if (mpeg_dec->prev_packet != MPEG_PACKET_SEQUENCE)
        ret = gst_base_video_decoder_have_frame (base_video_decoder, FALSE,
            (GstVideoFrame **) & mpeg_frame);

      mpeg_frame->gop = buf;
      break;

    case MPEG_PACKET_EXTENSION:
    {
      guint8 ext_code;

      /* ext_code */
      if (!gst_bit_reader_get_bits_uint8 (&b_reader, &ext_code, 4)) {
        ret = GST_FLOW_ERROR;
        gst_buffer_unref (buf);
        goto done;
      }

      GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_EXTENSION: %d", ext_code);

      switch (ext_code) {
        case MPEG_PACKET_EXT_SEQUENCE:
          GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_EXT_SEQUENCE");


          mpeg_frame->seq_ext = buf;

          /* so that we don't finish the frame if we get a MPEG_PACKET_PICTURE
           * or MPEG_PACKET_GOP after this */
          start_code = MPEG_PACKET_SEQUENCE;
          break;

        case MPEG_PACKET_EXT_SEQUENCE_DISPLAY:
          GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_EXT_SEQUENCE_DISPLAY");

          /* so that we don't finish the frame if we get a MPEG_PACKET_PICTURE
           * or MPEG_PACKET_GOP after this */
          start_code = MPEG_PACKET_SEQUENCE;
          break;

        case MPEG_PACKET_EXT_PICTURE_CODING:
          GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_EXT_PICTURE_CODING");

          mpeg_frame->pic_ext = buf;
          break;

        case MPEG_PACKET_EXT_QUANT_MATRIX:
          GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_EXT_QUANT_MATRIX");

          mpeg_frame->qm_ext = buf;
          break;

        default:
          gst_buffer_unref (buf);
      }
      break;
    }

    default:
      gst_buffer_unref (buf);
  }

  if (at_eos && mpeg_frame->slices)
    ret = gst_base_video_decoder_have_frame (base_video_decoder, TRUE, NULL);

done:
  mpeg_dec->prev_packet = start_code;

  return ret;
}
Beispiel #3
0
static GstFlowReturn
gst_vdp_mpeg_dec_chain (GstPad * pad, GstBuffer * buffer)
{
    GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (gst_pad_get_parent (pad));
    GstVdpMpegPacketizer packetizer;
    GstBuffer *buf;
    GstFlowReturn ret = GST_FLOW_OK;

    if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT))) {
        GST_DEBUG_OBJECT (mpeg_dec, "Received discont buffer");
        gst_vdp_mpeg_dec_flush (mpeg_dec);
    }


    gst_vdp_mpeg_packetizer_init (&packetizer, buffer);
    while ((buf = gst_vdp_mpeg_packetizer_get_next_packet (&packetizer))) {
        GstBitReader b_reader = GST_BIT_READER_INIT_FROM_BUFFER (buf);
        guint32 sync_code;
        guint8 start_code;

        /* skip sync_code */
        gst_bit_reader_get_bits_uint32 (&b_reader, &sync_code, 8 * 3);

        /* start_code */
        gst_bit_reader_get_bits_uint8 (&b_reader, &start_code, 8);

        if (start_code >= MPEG_PACKET_SLICE_MIN
                && start_code <= MPEG_PACKET_SLICE_MAX) {
            GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_SLICE");

            gst_buffer_ref (buf);
            gst_adapter_push (mpeg_dec->adapter, buf);
            mpeg_dec->vdp_info.slice_count++;
        }

        switch (start_code) {
        case MPEG_PACKET_PICTURE:
            GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_PICTURE");

            if (!gst_vdp_mpeg_dec_parse_picture (mpeg_dec, buf))
                goto done;

            break;
        case MPEG_PACKET_SEQUENCE:
            GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_SEQUENCE");
            gst_vdp_mpeg_dec_parse_sequence (mpeg_dec, buf);
            break;
        case MPEG_PACKET_EXTENSION:
        {
            guint8 ext_code;

            GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_EXTENSION");

            /* ext_code */
            gst_bit_reader_get_bits_uint8 (&b_reader, &ext_code, 4);
            switch (ext_code) {
            case MPEG_PACKET_EXT_PICTURE_CODING:
                GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_EXT_PICTURE_CODING");
                gst_vdp_mpeg_dec_parse_picture_coding (mpeg_dec, buf);
                break;
            case MPEG_PACKET_EXT_QUANT_MATRIX:
                GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_EXT_QUANT_MATRIX");
                gst_vdp_mpeg_dec_parse_quant_matrix (mpeg_dec, buf);
                break;
            default:
                break;
            }
            break;
        }
        case MPEG_PACKET_GOP:
            GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_GOP");
            gst_vdp_mpeg_dec_parse_gop (mpeg_dec, buf);
            break;
        default:
            break;
        }

        gst_buffer_unref (buf);
    }

    if (mpeg_dec->state == GST_VDP_MPEG_DEC_NEED_SEQUENCE ||
            mpeg_dec->state == GST_VDP_MPEG_DEC_NEED_GOP) {
        gst_adapter_clear (mpeg_dec->adapter);
        goto done;
    }

    if (mpeg_dec->vdp_info.slice_count > 0)
        ret = gst_vdp_mpeg_dec_decode (mpeg_dec, GST_BUFFER_TIMESTAMP (buffer),
                                       GST_BUFFER_SIZE (buffer));

done:
    gst_object_unref (mpeg_dec);

    return ret;
}
Beispiel #4
0
static GstFlowReturn
gst_vdp_h264_dec_parse_data (GstBaseVideoDecoder * base_video_decoder,
    GstBuffer * buf, gboolean at_eos)
{
  GstVdpH264Dec *h264_dec = GST_VDP_H264_DEC (base_video_decoder);
  GstBitReader reader;
  GstNalUnit nal_unit;
  guint8 forbidden_zero_bit;

  guint8 *data;
  guint size;
  gint i;

  GstVideoFrame *frame;
  GstFlowReturn ret = GST_FLOW_OK;

  GST_MEMDUMP ("data", GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf));

  gst_bit_reader_init_from_buffer (&reader, buf);

  /* skip nal_length or sync code */
  gst_bit_reader_skip (&reader, h264_dec->nal_length_size * 8);

  if (!gst_bit_reader_get_bits_uint8 (&reader, &forbidden_zero_bit, 1))
    goto invalid_packet;
  if (forbidden_zero_bit != 0) {
    GST_WARNING ("forbidden_zero_bit != 0");
    return GST_FLOW_ERROR;
  }

  if (!gst_bit_reader_get_bits_uint16 (&reader, &nal_unit.ref_idc, 2))
    goto invalid_packet;
  GST_DEBUG ("nal_ref_idc: %u", nal_unit.ref_idc);

  /* read nal_unit_type */
  if (!gst_bit_reader_get_bits_uint16 (&reader, &nal_unit.type, 5))
    goto invalid_packet;

  GST_DEBUG ("nal_unit_type: %u", nal_unit.type);
  if (nal_unit.type == 14 || nal_unit.type == 20) {
    if (!gst_bit_reader_skip (&reader, 24))
      goto invalid_packet;
  }
  nal_unit.IdrPicFlag = (nal_unit.type == 5 ? 1 : 0);

  data = GST_BUFFER_DATA (buf) + gst_bit_reader_get_pos (&reader) / 8;
  size = gst_bit_reader_get_remaining (&reader) / 8;

  i = size - 1;
  while (size >= 0 && data[i] == 0x00) {
    size--;
    i--;
  }

  frame = gst_base_video_decoder_get_current_frame (base_video_decoder);

  if (GST_VIDEO_FRAME_FLAG_IS_SET (frame, GST_H264_FRAME_GOT_PRIMARY)) {
    if (nal_unit.type == GST_NAL_SPS || nal_unit.type == GST_NAL_PPS ||
        nal_unit.type == GST_NAL_SEI || nal_unit.type == GST_NAL_AU_DELIMITER ||
        (nal_unit.type >= 14 && nal_unit.type <= 18))
      ret =
          gst_base_video_decoder_have_frame (base_video_decoder, FALSE, &frame);
  }

  if (nal_unit.type >= GST_NAL_SLICE && nal_unit.type <= GST_NAL_SLICE_IDR) {
    GstH264Slice slice;

    if (!gst_h264_parser_parse_slice_header (h264_dec->parser, &slice, data,
            size, nal_unit))
      goto invalid_packet;

    if (slice.redundant_pic_cnt == 0) {
      if (GST_VIDEO_FRAME_FLAG_IS_SET (frame, GST_H264_FRAME_GOT_PRIMARY)) {
        GstH264Slice *p_slice;
        guint8 pic_order_cnt_type, p_pic_order_cnt_type;
        gboolean finish_frame = FALSE;

        p_slice = &(GST_H264_FRAME_CAST (frame)->slice_hdr);
        pic_order_cnt_type = slice.picture->sequence->pic_order_cnt_type;
        p_pic_order_cnt_type = p_slice->picture->sequence->pic_order_cnt_type;

        if (slice.frame_num != p_slice->frame_num)
          finish_frame = TRUE;

        else if (slice.picture != p_slice->picture)
          finish_frame = TRUE;

        else if (slice.bottom_field_flag != p_slice->bottom_field_flag)
          finish_frame = TRUE;

        else if (nal_unit.ref_idc != p_slice->nal_unit.ref_idc &&
            (nal_unit.ref_idc == 0 || p_slice->nal_unit.ref_idc == 0))
          finish_frame = TRUE;

        else if ((pic_order_cnt_type == 0 && p_pic_order_cnt_type == 0) &&
            (slice.pic_order_cnt_lsb != p_slice->pic_order_cnt_lsb ||
                slice.delta_pic_order_cnt_bottom !=
                p_slice->delta_pic_order_cnt_bottom))
          finish_frame = TRUE;

        else if ((p_pic_order_cnt_type == 1 && p_pic_order_cnt_type == 1) &&
            (slice.delta_pic_order_cnt[0] != p_slice->delta_pic_order_cnt[0] ||
                slice.delta_pic_order_cnt[1] !=
                p_slice->delta_pic_order_cnt[1]))
          finish_frame = TRUE;

        if (finish_frame)
          ret =
              gst_base_video_decoder_have_frame (base_video_decoder, FALSE,
              &frame);

      }

      if (!GST_VIDEO_FRAME_FLAG_IS_SET (frame, GST_H264_FRAME_GOT_PRIMARY)) {
        if (GST_H264_IS_I_SLICE (slice.type)
            || GST_H264_IS_SI_SLICE (slice.type))
          GST_VIDEO_FRAME_FLAG_SET (frame, GST_VIDEO_FRAME_FLAG_KEYFRAME);

        GST_H264_FRAME_CAST (frame)->slice_hdr = slice;
        GST_VIDEO_FRAME_FLAG_SET (frame, GST_H264_FRAME_GOT_PRIMARY);
      }
    }
    gst_h264_frame_add_slice ((GstH264Frame *) frame, buf);
  }

  if (nal_unit.type == GST_NAL_SPS) {
    if (!gst_h264_parser_parse_sequence (h264_dec->parser, data, size))
      goto invalid_packet;
  }

  if (nal_unit.type == GST_NAL_PPS) {
    if (!gst_h264_parser_parse_picture (h264_dec->parser, data, size))
      goto invalid_packet;
  }

  gst_buffer_unref (buf);
  return ret;

invalid_packet:
  GST_WARNING ("Invalid packet size!");
  gst_buffer_unref (buf);

  return GST_FLOW_OK;
}
static gboolean
gst_rtp_mp4g_pay_parse_audio_config (GstRtpMP4GPay * rtpmp4gpay,
    GstBuffer * buffer)
{
  GstMapInfo map;
  guint8 objectType = 0;
  guint8 samplingIdx = 0;
  guint8 channelCfg = 0;
  GstBitReader br;

  gst_buffer_map (buffer, &map, GST_MAP_READ);

  gst_bit_reader_init (&br, map.data, map.size);

  /* any object type is fine, we need to copy it to the profile-level-id field. */
  if (!gst_bit_reader_get_bits_uint8 (&br, &objectType, 5))
    goto too_short;
  if (objectType == 0)
    goto invalid_object;

  if (!gst_bit_reader_get_bits_uint8 (&br, &samplingIdx, 4))
    goto too_short;
  /* only fixed values for now */
  if (samplingIdx > 12 && samplingIdx != 15)
    goto wrong_freq;

  if (!gst_bit_reader_get_bits_uint8 (&br, &channelCfg, 4))
    goto too_short;
  if (channelCfg > 7)
    goto wrong_channels;

  /* rtp rate depends on sampling rate of the audio */
  if (samplingIdx == 15) {
    guint32 rate = 0;

    /* index of 15 means we get the rate in the next 24 bits */
    if (!gst_bit_reader_get_bits_uint32 (&br, &rate, 24))
      goto too_short;

    rtpmp4gpay->rate = rate;
  } else {
    /* else use the rate from the table */
    rtpmp4gpay->rate = sampling_table[samplingIdx];
  }

  rtpmp4gpay->frame_len = 1024;

  switch (objectType) {
    case 1:
    case 2:
    case 3:
    case 4:
    case 6:
    case 7:
    {
      guint8 frameLenFlag = 0;

      if (gst_bit_reader_get_bits_uint8 (&br, &frameLenFlag, 1))
        if (frameLenFlag)
          rtpmp4gpay->frame_len = 960;

      break;
    }
    default:
      break;
  }

  /* extra rtp params contain the number of channels */
  g_free (rtpmp4gpay->params);
  rtpmp4gpay->params = g_strdup_printf ("%d", channelCfg);
  /* audio stream type */
  rtpmp4gpay->streamtype = "5";
  /* mode only high bitrate for now */
  rtpmp4gpay->mode = "AAC-hbr";
  /* profile */
  g_free (rtpmp4gpay->profile);
  rtpmp4gpay->profile = g_strdup_printf ("%d", objectType);

  GST_DEBUG_OBJECT (rtpmp4gpay,
      "objectType: %d, samplingIdx: %d (%d), channelCfg: %d, frame_len %d",
      objectType, samplingIdx, rtpmp4gpay->rate, channelCfg,
      rtpmp4gpay->frame_len);

  gst_buffer_unmap (buffer, &map);
  return TRUE;

  /* ERROR */
too_short:
  {
    GST_ELEMENT_ERROR (rtpmp4gpay, STREAM, FORMAT,
        (NULL), ("config string too short"));
    gst_buffer_unmap (buffer, &map);
    return FALSE;
  }
invalid_object:
  {
    GST_ELEMENT_ERROR (rtpmp4gpay, STREAM, FORMAT,
        (NULL), ("invalid object type"));
    gst_buffer_unmap (buffer, &map);
    return FALSE;
  }
wrong_freq:
  {
    GST_ELEMENT_ERROR (rtpmp4gpay, STREAM, NOT_IMPLEMENTED,
        (NULL), ("unsupported frequency index %d", samplingIdx));
    gst_buffer_unmap (buffer, &map);
    return FALSE;
  }
wrong_channels:
  {
    GST_ELEMENT_ERROR (rtpmp4gpay, STREAM, NOT_IMPLEMENTED,
        (NULL), ("unsupported number of channels %d, must < 8", channelCfg));
    gst_buffer_unmap (buffer, &map);
    return FALSE;
  }
}