Exemplo n.º 1
0
static gint
gst_vdp_mpeg_dec_scan_for_sync (GstBaseVideoDecoder * base_video_decoder,
    GstAdapter * adapter)
{
  gint m;

  m = gst_adapter_masked_scan_uint32 (adapter, 0xffffff00, 0x00000100, 0,
      gst_adapter_available (adapter));
  if (m == -1)
    return gst_adapter_available (adapter) - SYNC_CODE_SIZE;

  return m;
}
Exemplo n.º 2
0
static gint
gst_vdp_h264_dec_scan_for_sync (GstBaseVideoDecoder * base_video_decoder,
    GstAdapter * adapter)
{
  GstVdpH264Dec *h264_dec = GST_VDP_H264_DEC (base_video_decoder);
  gint m;

  if (h264_dec->packetized)
    return 0;

  m = gst_adapter_masked_scan_uint32 (adapter, 0xffffff00, 0x00000100,
      0, gst_adapter_available (adapter));
  if (m == -1)
    return gst_adapter_available (adapter) - SYNC_CODE_SIZE;

  return m;
}
Exemplo n.º 3
0
/*
 * gst_jpeg_parse_skip_to_jpeg_header:
 * @parse: the parser
 *
 * Flush everything until the next JPEG header.  The header is considered
 * to be the a start marker SOI (0xff 0xd8) followed by any other marker
 * (0xff ...).
 *
 * Returns: TRUE if the header was found, FALSE if more data is needed.
 */
static gboolean
gst_jpeg_parse_skip_to_jpeg_header (GstJpegParse * parse)
{
  guint available, flush;
  gboolean ret = TRUE;

  available = gst_adapter_available (parse->priv->adapter);
  if (available < 4)
    return FALSE;

  flush = gst_adapter_masked_scan_uint32 (parse->priv->adapter, 0xffffff00,
      0xffd8ff00, 0, available);
  if (flush == -1) {
    flush = available - 3;      /* Last 3 bytes + 1 more may match header. */
    ret = FALSE;
  }
  if (flush > 0) {
    GST_LOG_OBJECT (parse, "Skipping %u bytes.", flush);
    gst_adapter_flush (parse->priv->adapter, flush);
  }
  return ret;
}
Exemplo n.º 4
0
static GstBaseVideoDecoderScanResult
gst_vdp_mpeg_dec_scan_for_packet_end (GstBaseVideoDecoder * base_video_decoder,
    GstAdapter * adapter, guint * size, gboolean at_eos)
{
  guint8 *data;
  guint32 sync_code;

  data = g_slice_alloc (SYNC_CODE_SIZE);
  gst_adapter_copy (adapter, data, 0, SYNC_CODE_SIZE);
  sync_code = ((data[0] << 16) | (data[1] << 8) | data[2]);

  if (sync_code != 0x000001)
    return GST_BASE_VIDEO_DECODER_SCAN_RESULT_LOST_SYNC;

  *size = gst_adapter_masked_scan_uint32 (adapter, 0xffffff00, 0x00000100,
      SYNC_CODE_SIZE, gst_adapter_available (adapter) - SYNC_CODE_SIZE);

  if (*size == -1)
    return GST_BASE_VIDEO_DECODER_SCAN_RESULT_NEED_DATA;

  return GST_BASE_VIDEO_DECODER_SCAN_RESULT_OK;
}
Exemplo n.º 5
0
static GstBaseVideoDecoderScanResult
gst_vdp_h264_dec_scan_for_packet_end (GstBaseVideoDecoder * base_video_decoder,
    GstAdapter * adapter, guint * size, gboolean at_eos)
{
  GstVdpH264Dec *h264_dec = GST_VDP_H264_DEC (base_video_decoder);
  guint avail;

  avail = gst_adapter_available (adapter);
  if (avail < h264_dec->nal_length_size)
    return GST_BASE_VIDEO_DECODER_SCAN_RESULT_NEED_DATA;

  if (h264_dec->packetized) {
    guint8 *data;
    gint i;
    guint32 nal_length = 0;

    data = g_slice_alloc (h264_dec->nal_length_size);
    gst_adapter_copy (adapter, data, 0, h264_dec->nal_length_size);
    for (i = 0; i < h264_dec->nal_length_size; i++)
      nal_length = (nal_length << 8) | data[i];

    g_slice_free1 (h264_dec->nal_length_size, data);

    nal_length += h264_dec->nal_length_size;

    /* check for invalid NALU sizes, assume the size if the available bytes
     * when something is fishy */
    if (nal_length <= 1 || nal_length > avail) {
      nal_length = avail - h264_dec->nal_length_size;
      GST_DEBUG ("fixing invalid NALU size to %u", nal_length);
    }

    *size = nal_length;
  }

  else {
    guint8 *data;
    guint32 start_code;
    guint n;

    data = g_slice_alloc (SYNC_CODE_SIZE);
    gst_adapter_copy (adapter, data, 0, SYNC_CODE_SIZE);
    start_code = ((data[0] << 16) && (data[1] << 8) && data[2]);
    g_slice_free1 (SYNC_CODE_SIZE, data);

    GST_DEBUG ("start_code: %d", start_code);
    if (start_code == 0x000001)
      return GST_BASE_VIDEO_DECODER_SCAN_RESULT_LOST_SYNC;

    n = gst_adapter_masked_scan_uint32 (adapter, 0xffffff00, 0x00000100,
        SYNC_CODE_SIZE, avail - SYNC_CODE_SIZE);
    if (n == -1)
      return GST_BASE_VIDEO_DECODER_SCAN_RESULT_NEED_DATA;

    *size = n;
  }

  GST_DEBUG ("NAL size: %d", *size);

  return GST_BASE_VIDEO_DECODER_SCAN_RESULT_OK;
}
Exemplo n.º 6
0
/* returns image length in bytes if parsed successfully,
 * otherwise 0 if more data needed,
 * if < 0 the absolute value needs to be flushed */
static gint
gst_jpeg_parse_get_image_length (GstJpegParse * parse)
{
  guint size;
  gboolean resync;
  GstAdapter *adapter = parse->priv->adapter;
  gint offset, noffset;

  size = gst_adapter_available (adapter);

  /* we expect at least 4 bytes, first of which start marker */
  if (gst_adapter_masked_scan_uint32 (adapter, 0xffff0000, 0xffd80000, 0, 4))
    return 0;

  GST_DEBUG ("Parsing jpeg image data (%u bytes)", size);

  GST_DEBUG ("Parse state: offset=%d, resync=%d, entropy len=%d",
      parse->priv->last_offset, parse->priv->last_resync,
      parse->priv->last_entropy_len);

  /* offset is 2 less than actual offset;
   * - adapter needs at least 4 bytes for scanning,
   * - start and end marker ensure at least that much
   */
  /* resume from state offset */
  offset = parse->priv->last_offset;

  while (1) {
    guint frame_len;
    guint32 value;

    noffset =
        gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00, 0x0000ff00,
        offset, size - offset, &value);
    /* lost sync if 0xff marker not where expected */
    if ((resync = (noffset != offset))) {
      GST_DEBUG ("Lost sync at 0x%08x, resyncing", offset + 2);
    }
    /* may have marker, but could have been resyncng */
    resync = resync || parse->priv->last_resync;
    /* Skip over extra 0xff */
    while ((noffset >= 0) && ((value & 0xff) == 0xff)) {
      noffset++;
      noffset =
          gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00, 0x0000ff00,
          noffset, size - noffset, &value);
    }
    /* enough bytes left for marker? (we need 0xNN after the 0xff) */
    if (noffset < 0) {
      GST_DEBUG ("at end of input and no EOI marker found, need more data");
      goto need_more_data;
    }

    /* now lock on the marker we found */
    offset = noffset;
    value = value & 0xff;
    if (value == 0xd9) {
      GST_DEBUG ("0x%08x: EOI marker", offset + 2);
      /* clear parse state */
      parse->priv->last_resync = FALSE;
      parse->priv->last_offset = 0;
      return (offset + 4);
    } else if (value == 0xd8) {
      /* Skip this frame if we found another SOI marker */
      GST_DEBUG ("0x%08x: SOI marker before EOI, skipping", offset + 2);
      /* clear parse state */
      parse->priv->last_resync = FALSE;
      parse->priv->last_offset = 0;
      return -(offset + 2);
    }

    if (value >= 0xd0 && value <= 0xd7)
      frame_len = 0;
    else {
      /* peek tag and subsequent length */
      if (offset + 2 + 4 > size)
        goto need_more_data;
      else
        gst_adapter_masked_scan_uint32_peek (adapter, 0x0, 0x0, offset + 2, 4,
            &frame_len);
      frame_len = frame_len & 0xffff;
    }
    GST_DEBUG ("0x%08x: tag %02x, frame_len=%u", offset + 2, value, frame_len);
    /* the frame length includes the 2 bytes for the length; here we want at
     * least 2 more bytes at the end for an end marker */
    if (offset + 2 + 2 + frame_len + 2 > size) {
      goto need_more_data;
    }

    if (gst_jpeg_parse_parse_tag_has_entropy_segment (value)) {
      guint eseglen = parse->priv->last_entropy_len;

      GST_DEBUG ("0x%08x: finding entropy segment length", offset + 2);
      noffset = offset + 2 + frame_len + eseglen;
      while (1) {
        noffset = gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00,
            0x0000ff00, noffset, size - noffset, &value);
        if (noffset < 0) {
          /* need more data */
          parse->priv->last_entropy_len = size - offset - 4 - frame_len - 2;
          goto need_more_data;
        }
        if ((value & 0xff) != 0x00) {
          eseglen = noffset - offset - frame_len - 2;
          break;
        }
        noffset++;
      }
      parse->priv->last_entropy_len = 0;
      frame_len += eseglen;
      GST_DEBUG ("entropy segment length=%u => frame_len=%u", eseglen,
          frame_len);
    }
    if (resync) {
      /* check if we will still be in sync if we interpret
       * this as a sync point and skip this frame */
      noffset = offset + frame_len + 2;
      noffset = gst_adapter_masked_scan_uint32 (adapter, 0x0000ff00, 0x0000ff00,
          noffset, 4);
      if (noffset < 0) {
        /* ignore and continue resyncing until we hit the end
         * of our data or find a sync point that looks okay */
        offset++;
        continue;
      }
      GST_DEBUG ("found sync at 0x%x", offset + 2);
    }

    offset += frame_len + 2;
  }

  /* EXITS */
need_more_data:
  {
    parse->priv->last_offset = offset;
    parse->priv->last_resync = resync;
    return 0;
  }
}
Exemplo n.º 7
0
static GstFlowReturn
gst_rtp_h263p_pay_flush (GstRtpH263PPay * rtph263ppay)
{
  guint avail;
  GstBufferList *list = NULL;
  GstBuffer *outbuf = NULL;
  GstFlowReturn ret;
  gboolean fragmented = FALSE;

  avail = gst_adapter_available (rtph263ppay->adapter);
  if (avail == 0)
    return GST_FLOW_OK;

  fragmented = FALSE;
  /* This algorithm assumes the H263/+/++ encoder sends complete frames in each
   * buffer */
  /* With Fragmentation Mode at GST_FRAGMENTATION_MODE_NORMAL:
   *  This algorithm implements the Follow-on packets method for packetization.
   *  This assumes low packet loss network. 
   * With Fragmentation Mode at GST_FRAGMENTATION_MODE_SYNC:
   *  This algorithm separates large frames at synchronisation points (Segments)
   *  (See RFC 4629 section 6). It would be interesting to have a property such as network
   *  quality to select between both packetization methods */
  /* TODO Add VRC supprt (See RFC 4629 section 5.2) */

  while (avail > 0) {
    guint towrite;
    guint8 *payload;
    gint header_len;
    guint next_gop = 0;
    gboolean found_gob = FALSE;
    GstRTPBuffer rtp = { NULL };
    GstBuffer *payload_buf;

    if (rtph263ppay->fragmentation_mode == GST_FRAGMENTATION_MODE_SYNC) {
      /* start after 1st gop possible */

      /* Check if we have a gob or eos , eossbs */
      /* FIXME EOS and EOSSBS packets should never contain any gobs and vice-versa */
      next_gop =
          gst_adapter_masked_scan_uint32 (rtph263ppay->adapter, 0xffff8000,
          0x00008000, 0, avail);
      if (next_gop == 0) {
        GST_DEBUG_OBJECT (rtph263ppay, " Found GOB header");
        found_gob = TRUE;
      }

      /* Find next and cut the packet accordingly */
      /* TODO we should get as many gobs as possible until MTU is reached, this
       * code seems to just get one GOB per packet */
      if (next_gop == 0 && avail > 3)
        next_gop =
            gst_adapter_masked_scan_uint32 (rtph263ppay->adapter, 0xffff8000,
            0x00008000, 3, avail - 3);
      GST_DEBUG_OBJECT (rtph263ppay, " Next GOB Detected at :  %d", next_gop);
      if (next_gop == -1)
        next_gop = 0;
    }

    /* for picture start frames (non-fragmented), we need to remove the first
     * two 0x00 bytes and set P=1 */
    if (!fragmented || found_gob) {
      gst_adapter_flush (rtph263ppay->adapter, 2);
      avail -= 2;
    }
    header_len = 2;

    towrite = MIN (avail, gst_rtp_buffer_calc_payload_len
        (GST_RTP_BASE_PAYLOAD_MTU (rtph263ppay) - header_len, 0, 0));

    if (next_gop > 0)
      towrite = MIN (next_gop, towrite);

    outbuf = gst_rtp_buffer_new_allocate (header_len, 0, 0);

    gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
    /* last fragment gets the marker bit set */
    gst_rtp_buffer_set_marker (&rtp, avail > towrite ? 0 : 1);

    payload = gst_rtp_buffer_get_payload (&rtp);

    /*  0                   1
     *  0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
     * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
     * |   RR    |P|V|   PLEN    |PEBIT|
     * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
     */
    /* if fragmented or gop header , write p bit =1 */
    payload[0] = (fragmented && !found_gob) ? 0x00 : 0x04;
    payload[1] = 0;

    GST_BUFFER_PTS (outbuf) = rtph263ppay->first_timestamp;
    GST_BUFFER_DURATION (outbuf) = rtph263ppay->first_duration;
    gst_rtp_buffer_unmap (&rtp);

    payload_buf = gst_adapter_take_buffer_fast (rtph263ppay->adapter, towrite);
    gst_rtp_copy_meta (GST_ELEMENT_CAST (rtph263ppay), outbuf, payload_buf,
        g_quark_from_static_string (GST_META_TAG_VIDEO_STR));
    outbuf = gst_buffer_append (outbuf, payload_buf);
    avail -= towrite;

    /* If more data is available and this is our first iteration,
     * we create a buffer list and remember that we're fragmented.
     *
     * If we're fragmented already, add buffers to the previously
     * created buffer list.
     *
     * Otherwise fragmented will be FALSE and we just push the single output
     * buffer, and no list is allocated.
     */
    if (avail && !fragmented) {
      fragmented = TRUE;
      list = gst_buffer_list_new ();
      gst_buffer_list_add (list, outbuf);
    } else if (fragmented) {
      gst_buffer_list_add (list, outbuf);
    }
  }

  if (fragmented) {
    ret =
        gst_rtp_base_payload_push_list (GST_RTP_BASE_PAYLOAD (rtph263ppay),
        list);
  } else {
    ret =
        gst_rtp_base_payload_push (GST_RTP_BASE_PAYLOAD (rtph263ppay), outbuf);
  }

  return ret;
}
Exemplo n.º 8
0
static GstFlowReturn
gst_jpeg_dec_parse (GstVideoDecoder * bdec, GstVideoCodecFrame * frame,
    GstAdapter * adapter, gboolean at_eos)
{
  guint size;
  gint toadd = 0;
  gboolean resync;
  gint offset = 0, noffset;
  GstJpegDec *dec = (GstJpegDec *) bdec;

  /* FIXME : The overhead of using scan_uint32 is massive */

  size = gst_adapter_available (adapter);
  GST_DEBUG ("Parsing jpeg image data (%u bytes)", size);

  if (at_eos) {
    GST_DEBUG ("Flushing all data out");
    toadd = size;

    /* If we have leftover data, throw it away */
    if (!dec->saw_header)
      goto drop_frame;
    goto have_full_frame;
  }

  if (size < 8)
    goto need_more_data;

  if (!dec->saw_header) {
    gint ret;
    /* we expect at least 4 bytes, first of which start marker */
    ret =
        gst_adapter_masked_scan_uint32 (adapter, 0xffff0000, 0xffd80000, 0,
        size - 4);

    GST_DEBUG ("ret:%d", ret);
    if (ret < 0)
      goto need_more_data;

    if (ret) {
      gst_adapter_flush (adapter, ret);
      size -= ret;
    }
    dec->saw_header = TRUE;
  }

  while (1) {
    guint frame_len;
    guint32 value;

    GST_DEBUG ("offset:%d, size:%d", offset, size);

    noffset =
        gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00, 0x0000ff00,
        offset, size - offset, &value);

    /* lost sync if 0xff marker not where expected */
    if ((resync = (noffset != offset))) {
      GST_DEBUG ("Lost sync at 0x%08x, resyncing", offset + 2);
    }
    /* may have marker, but could have been resyncng */
    resync = resync || dec->parse_resync;
    /* Skip over extra 0xff */
    while ((noffset >= 0) && ((value & 0xff) == 0xff)) {
      noffset++;
      noffset =
          gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00, 0x0000ff00,
          noffset, size - noffset, &value);
    }
    /* enough bytes left for marker? (we need 0xNN after the 0xff) */
    if (noffset < 0) {
      GST_DEBUG ("at end of input and no EOI marker found, need more data");
      goto need_more_data;
    }

    /* now lock on the marker we found */
    offset = noffset;
    value = value & 0xff;
    if (value == 0xd9) {
      GST_DEBUG ("0x%08x: EOI marker", offset + 2);
      /* clear parse state */
      dec->saw_header = FALSE;
      dec->parse_resync = FALSE;
      toadd = offset + 4;
      goto have_full_frame;
    }
    if (value == 0xd8) {
      /* Skip this frame if we found another SOI marker */
      GST_DEBUG ("0x%08x: SOI marker before EOI, skipping", offset + 2);
      dec->parse_resync = FALSE;
      /* FIXME : Need to skip data */
      toadd -= offset + 2;
      goto have_full_frame;
    }


    if (value >= 0xd0 && value <= 0xd7)
      frame_len = 0;
    else {
      /* peek tag and subsequent length */
      if (offset + 2 + 4 > size)
        goto need_more_data;
      else
        gst_adapter_masked_scan_uint32_peek (adapter, 0x0, 0x0, offset + 2, 4,
            &frame_len);
      frame_len = frame_len & 0xffff;
    }
    GST_DEBUG ("0x%08x: tag %02x, frame_len=%u", offset + 2, value, frame_len);
    /* the frame length includes the 2 bytes for the length; here we want at
     * least 2 more bytes at the end for an end marker */
    if (offset + 2 + 2 + frame_len + 2 > size) {
      goto need_more_data;
    }

    if (gst_jpeg_dec_parse_tag_has_entropy_segment (value)) {
      guint eseglen = dec->parse_entropy_len;

      GST_DEBUG ("0x%08x: finding entropy segment length (eseglen:%d)",
          offset + 2, eseglen);
      if (size < offset + 2 + frame_len + eseglen)
        goto need_more_data;
      noffset = offset + 2 + frame_len + dec->parse_entropy_len;
      while (1) {
        GST_DEBUG ("noffset:%d, size:%d, size - noffset:%d",
            noffset, size, size - noffset);
        noffset = gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00,
            0x0000ff00, noffset, size - noffset, &value);
        if (noffset < 0) {
          /* need more data */
          dec->parse_entropy_len = size - offset - 4 - frame_len - 2;
          goto need_more_data;
        }
        if ((value & 0xff) != 0x00) {
          eseglen = noffset - offset - frame_len - 2;
          break;
        }
        noffset++;
      }
      dec->parse_entropy_len = 0;
      frame_len += eseglen;
      GST_DEBUG ("entropy segment length=%u => frame_len=%u", eseglen,
          frame_len);
    }
    if (resync) {
      /* check if we will still be in sync if we interpret
       * this as a sync point and skip this frame */
      noffset = offset + frame_len + 2;
      noffset = gst_adapter_masked_scan_uint32 (adapter, 0x0000ff00, 0x0000ff00,
          noffset, 4);
      if (noffset < 0) {
        /* ignore and continue resyncing until we hit the end
         * of our data or find a sync point that looks okay */
        offset++;
        continue;
      }
      GST_DEBUG ("found sync at 0x%x", offset + 2);
    }

    /* Add current data to output buffer */
    toadd += frame_len + 2;
    offset += frame_len + 2;
  }

need_more_data:
  if (toadd)
    gst_video_decoder_add_to_frame (bdec, toadd);
  return GST_VIDEO_DECODER_FLOW_NEED_DATA;

have_full_frame:
  if (toadd)
    gst_video_decoder_add_to_frame (bdec, toadd);
  return gst_video_decoder_have_frame (bdec);

drop_frame:
  gst_adapter_flush (adapter, size);
  return GST_FLOW_OK;
}