static GstFlowReturn
gst_rtp_base_audio_payload_push_buffer (GstRTPBaseAudioPayload *
    baseaudiopayload, GstBuffer * buffer, GstClockTime timestamp)
{
  GstRTPBasePayload *basepayload;
  GstRTPBaseAudioPayloadPrivate *priv;
  GstBuffer *outbuf;
  guint payload_len;
  GstFlowReturn ret;

  priv = baseaudiopayload->priv;
  basepayload = GST_RTP_BASE_PAYLOAD (baseaudiopayload);

  payload_len = gst_buffer_get_size (buffer);

  GST_DEBUG_OBJECT (baseaudiopayload, "Pushing %d bytes ts %" GST_TIME_FORMAT,
      payload_len, GST_TIME_ARGS (timestamp));

  /* create just the RTP header buffer */
  outbuf = gst_rtp_base_payload_allocate_output_buffer (basepayload, 0, 0, 0);

  /* set metadata */
  gst_rtp_base_audio_payload_set_meta (baseaudiopayload, outbuf, payload_len,
      timestamp);

  if (priv->buffer_list) {
    GstBufferList *list;
    guint i, len;

    list = gst_buffer_list_new ();
    len = gst_buffer_list_length (list);

    for (i = 0; i < len; i++) {
      /* FIXME */
      g_warning ("bufferlist not implemented");
      gst_buffer_list_add (list, outbuf);
      gst_buffer_list_add (list, buffer);
    }

    GST_DEBUG_OBJECT (baseaudiopayload, "Pushing list %p", list);
    ret = gst_rtp_base_payload_push_list (basepayload, list);
  } else {
    CopyMetaData data;

    /* copy payload */
    data.pay = baseaudiopayload;
    data.outbuf = outbuf;
    gst_buffer_foreach_meta (buffer, foreach_metadata, &data);
    outbuf = gst_buffer_append (outbuf, buffer);

    GST_DEBUG_OBJECT (baseaudiopayload, "Pushing buffer %p", outbuf);
    ret = gst_rtp_base_payload_push (basepayload, outbuf);
  }

  return ret;
}
static GstFlowReturn
gst_rtp_mpa_pay_flush (GstRtpMPAPay * rtpmpapay)
{
  guint avail;
  GstBuffer *outbuf;
  GstFlowReturn ret;
  guint16 frag_offset;
  GstBufferList *list;

  /* the data available in the adapter is either smaller
   * than the MTU or bigger. In the case it is smaller, the complete
   * adapter contents can be put in one packet. In the case the
   * adapter has more than one MTU, we need to split the MPA data
   * over multiple packets. The frag_offset in each packet header
   * needs to be updated with the position in the MPA frame. */
  avail = gst_adapter_available (rtpmpapay->adapter);

  ret = GST_FLOW_OK;

  list =
      gst_buffer_list_new_sized (avail / (GST_RTP_BASE_PAYLOAD_MTU (rtpmpapay) -
          RTP_HEADER_LEN) + 1);

  frag_offset = 0;
  while (avail > 0) {
    guint towrite;
    guint8 *payload;
    guint payload_len;
    guint packet_len;
    GstRTPBuffer rtp = { NULL };
    GstBuffer *paybuf;

    /* this will be the total length of the packet */
    packet_len = gst_rtp_buffer_calc_packet_len (4 + avail, 0, 0);

    /* fill one MTU or all available bytes */
    towrite = MIN (packet_len, GST_RTP_BASE_PAYLOAD_MTU (rtpmpapay));

    /* this is the payload length */
    payload_len = gst_rtp_buffer_calc_payload_len (towrite, 0, 0);

    /* create buffer to hold the payload */
    outbuf = gst_rtp_buffer_new_allocate (4, 0, 0);

    gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);

    payload_len -= 4;

    gst_rtp_buffer_set_payload_type (&rtp, GST_RTP_PAYLOAD_MPA);

    /*
     *  0                   1                   2                   3
     *  0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
     * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
     * |             MBZ               |          Frag_offset          |
     * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
     */
    payload = gst_rtp_buffer_get_payload (&rtp);
    payload[0] = 0;
    payload[1] = 0;
    payload[2] = frag_offset >> 8;
    payload[3] = frag_offset & 0xff;

    avail -= payload_len;
    frag_offset += payload_len;

    if (avail == 0)
      gst_rtp_buffer_set_marker (&rtp, TRUE);

    gst_rtp_buffer_unmap (&rtp);

    paybuf = gst_adapter_take_buffer_fast (rtpmpapay->adapter, payload_len);
    gst_rtp_copy_meta (GST_ELEMENT_CAST (rtpmpapay), outbuf, paybuf,
        g_quark_from_static_string (GST_META_TAG_AUDIO_STR));
    outbuf = gst_buffer_append (outbuf, paybuf);

    GST_BUFFER_PTS (outbuf) = rtpmpapay->first_ts;
    GST_BUFFER_DURATION (outbuf) = rtpmpapay->duration;
    gst_buffer_list_add (list, outbuf);
  }

  ret = gst_rtp_base_payload_push_list (GST_RTP_BASE_PAYLOAD (rtpmpapay), list);

  return ret;
}
Ejemplo n.º 3
0
static GstFlowReturn
gst_rtp_vraw_pay_handle_buffer (GstRTPBasePayload * payload, GstBuffer * buffer)
{
  GstRtpVRawPay *rtpvrawpay;
  GstFlowReturn ret = GST_FLOW_OK;
  gfloat packets_per_packline;
  guint pgroups_per_packet;
  guint packlines_per_list, buffers_per_list;
  guint lines_delay;            /* after how many packed lines we push out a buffer list */
  guint last_line;              /* last pack line number we pushed out a buffer list     */
  guint line, offset;
  guint8 *p0, *yp, *up, *vp;
  guint ystride, uvstride;
  guint xinc, yinc;
  guint pgroup;
  guint mtu;
  guint width, height;
  gint field, fields;
  GstVideoFormat format;
  GstVideoFrame frame;
  gint interlaced;
  gboolean use_buffer_lists;
  GstBufferList *list = NULL;
  GstRTPBuffer rtp = { NULL, };

  rtpvrawpay = GST_RTP_VRAW_PAY (payload);

  gst_video_frame_map (&frame, &rtpvrawpay->vinfo, buffer, GST_MAP_READ);

  GST_LOG_OBJECT (rtpvrawpay, "new frame of %" G_GSIZE_FORMAT " bytes",
      gst_buffer_get_size (buffer));

  /* get pointer and strides of the planes */
  p0 = GST_VIDEO_FRAME_PLANE_DATA (&frame, 0);
  yp = GST_VIDEO_FRAME_COMP_DATA (&frame, 0);
  up = GST_VIDEO_FRAME_COMP_DATA (&frame, 1);
  vp = GST_VIDEO_FRAME_COMP_DATA (&frame, 2);

  ystride = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 0);
  uvstride = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 1);

  mtu = GST_RTP_BASE_PAYLOAD_MTU (payload);

  /* amount of bytes for one pixel */
  pgroup = rtpvrawpay->pgroup;
  width = GST_VIDEO_INFO_WIDTH (&rtpvrawpay->vinfo);
  height = GST_VIDEO_INFO_HEIGHT (&rtpvrawpay->vinfo);

  interlaced = GST_VIDEO_INFO_IS_INTERLACED (&rtpvrawpay->vinfo);

  format = GST_VIDEO_INFO_FORMAT (&rtpvrawpay->vinfo);

  yinc = rtpvrawpay->yinc;
  xinc = rtpvrawpay->xinc;

  /* after how many packed lines we push out a buffer list */
  lines_delay = GST_ROUND_UP_4 (height / rtpvrawpay->chunks_per_frame);

  /* calculate how many buffers we expect to store in a single buffer list */
  pgroups_per_packet = (mtu - (12 + 14)) / pgroup;
  packets_per_packline = width / (xinc * pgroups_per_packet * 1.0);
  packlines_per_list = height / (yinc * rtpvrawpay->chunks_per_frame);
  buffers_per_list = packlines_per_list * packets_per_packline;
  buffers_per_list = GST_ROUND_UP_8 (buffers_per_list);

  use_buffer_lists = (rtpvrawpay->chunks_per_frame < (height / yinc));

  fields = 1 + interlaced;

  /* start with line 0, offset 0 */
  for (field = 0; field < fields; field++) {
    line = field;
    offset = 0;
    last_line = 0;

    if (use_buffer_lists)
      list = gst_buffer_list_new_sized (buffers_per_list);

    /* write all lines */
    while (line < height) {
      guint left, pack_line;
      GstBuffer *out;
      guint8 *outdata, *headers;
      gboolean next_line, complete = FALSE;
      guint length, cont, pixels;

      /* get the max allowed payload length size, we try to fill the complete MTU */
      left = gst_rtp_buffer_calc_payload_len (mtu, 0, 0);
      out = gst_rtp_buffer_new_allocate (left, 0, 0);

      if (field == 0) {
        GST_BUFFER_PTS (out) = GST_BUFFER_PTS (buffer);
      } else {
        GST_BUFFER_PTS (out) = GST_BUFFER_PTS (buffer) +
            GST_BUFFER_DURATION (buffer) / 2;
      }

      gst_rtp_buffer_map (out, GST_MAP_WRITE, &rtp);
      outdata = gst_rtp_buffer_get_payload (&rtp);

      GST_LOG_OBJECT (rtpvrawpay, "created buffer of size %u for MTU %u", left,
          mtu);

      /*
       *   0                   1                   2                   3
       *   0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
       *  +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
       *  |   Extended Sequence Number    |            Length             |
       *  +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
       *  |F|          Line No            |C|           Offset            |
       *  +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
       *  |            Length             |F|          Line No            |
       *  +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
       *  |C|           Offset            |                               .
       *  +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+                               .
       *  .                                                               .
       *  .                 Two (partial) lines of video data             .
       *  .                                                               .
       *  +---------------------------------------------------------------+
       */

      /* need 2 bytes for the extended sequence number */
      *outdata++ = 0;
      *outdata++ = 0;
      left -= 2;

      /* the headers start here */
      headers = outdata;

      /* make sure we can fit at least *one* header and pixel */
      if (!(left > (6 + pgroup))) {
        gst_rtp_buffer_unmap (&rtp);
        gst_buffer_unref (out);
        goto too_small;
      }

      /* while we can fit at least one header and one pixel */
      while (left > (6 + pgroup)) {
        /* we need a 6 bytes header */
        left -= 6;

        /* get how may bytes we need for the remaining pixels */
        pixels = width - offset;
        length = (pixels * pgroup) / xinc;

        if (left >= length) {
          /* pixels and header fit completely, we will write them and skip to the
           * next line. */
          next_line = TRUE;
        } else {
          /* line does not fit completely, see how many pixels fit */
          pixels = (left / pgroup) * xinc;
          length = (pixels * pgroup) / xinc;
          next_line = FALSE;
        }
        GST_LOG_OBJECT (rtpvrawpay, "filling %u bytes in %u pixels", length,
            pixels);
        left -= length;

        /* write length */
        *outdata++ = (length >> 8) & 0xff;
        *outdata++ = length & 0xff;

        /* write line no */
        *outdata++ = ((line >> 8) & 0x7f) | ((field << 7) & 0x80);
        *outdata++ = line & 0xff;

        if (next_line) {
          /* go to next line we do this here to make the check below easier */
          line += yinc;
        }

        /* calculate continuation marker */
        cont = (left > (6 + pgroup) && line < height) ? 0x80 : 0x00;

        /* write offset and continuation marker */
        *outdata++ = ((offset >> 8) & 0x7f) | cont;
        *outdata++ = offset & 0xff;

        if (next_line) {
          /* reset offset */
          offset = 0;
          GST_LOG_OBJECT (rtpvrawpay, "go to next line %u", line);
        } else {
          offset += pixels;
          GST_LOG_OBJECT (rtpvrawpay, "next offset %u", offset);
        }

        if (!cont)
          break;
      }
      GST_LOG_OBJECT (rtpvrawpay, "consumed %u bytes",
          (guint) (outdata - headers));

      /* second pass, read headers and write the data */
      while (TRUE) {
        guint offs, lin;

        /* read length and cont */
        length = (headers[0] << 8) | headers[1];
        lin = ((headers[2] & 0x7f) << 8) | headers[3];
        offs = ((headers[4] & 0x7f) << 8) | headers[5];
        cont = headers[4] & 0x80;
        pixels = length / pgroup;
        headers += 6;

        GST_LOG_OBJECT (payload,
            "writing length %u, line %u, offset %u, cont %d", length, lin, offs,
            cont);

        switch (format) {
          case GST_VIDEO_FORMAT_RGB:
          case GST_VIDEO_FORMAT_RGBA:
          case GST_VIDEO_FORMAT_BGR:
          case GST_VIDEO_FORMAT_BGRA:
          case GST_VIDEO_FORMAT_UYVY:
          case GST_VIDEO_FORMAT_UYVP:
            offs /= xinc;
            memcpy (outdata, p0 + (lin * ystride) + (offs * pgroup), length);
            outdata += length;
            break;
          case GST_VIDEO_FORMAT_AYUV:
          {
            gint i;
            guint8 *datap;

            datap = p0 + (lin * ystride) + (offs * 4);

            for (i = 0; i < pixels; i++) {
              *outdata++ = datap[2];
              *outdata++ = datap[1];
              *outdata++ = datap[3];
              datap += 4;
            }
            break;
          }
          case GST_VIDEO_FORMAT_I420:
          {
            gint i;
            guint uvoff;
            guint8 *yd1p, *yd2p, *udp, *vdp;

            yd1p = yp + (lin * ystride) + (offs);
            yd2p = yd1p + ystride;
            uvoff = (lin / yinc * uvstride) + (offs / xinc);
            udp = up + uvoff;
            vdp = vp + uvoff;

            for (i = 0; i < pixels; i++) {
              *outdata++ = *yd1p++;
              *outdata++ = *yd1p++;
              *outdata++ = *yd2p++;
              *outdata++ = *yd2p++;
              *outdata++ = *udp++;
              *outdata++ = *vdp++;
            }
            break;
          }
          case GST_VIDEO_FORMAT_Y41B:
          {
            gint i;
            guint uvoff;
            guint8 *ydp, *udp, *vdp;

            ydp = yp + (lin * ystride) + offs;
            uvoff = (lin / yinc * uvstride) + (offs / xinc);
            udp = up + uvoff;
            vdp = vp + uvoff;

            for (i = 0; i < pixels; i++) {
              *outdata++ = *udp++;
              *outdata++ = *ydp++;
              *outdata++ = *ydp++;
              *outdata++ = *vdp++;
              *outdata++ = *ydp++;
              *outdata++ = *ydp++;
            }
            break;
          }
          default:
            gst_rtp_buffer_unmap (&rtp);
            gst_buffer_unref (out);
            goto unknown_sampling;
        }

        if (!cont)
          break;
      }

      if (line >= height) {
        GST_LOG_OBJECT (rtpvrawpay, "field/frame complete, set marker");
        gst_rtp_buffer_set_marker (&rtp, TRUE);
        complete = TRUE;
      }
      gst_rtp_buffer_unmap (&rtp);
      if (left > 0) {
        GST_LOG_OBJECT (rtpvrawpay, "we have %u bytes left", left);
        gst_buffer_resize (out, 0, gst_buffer_get_size (out) - left);
      }

      /* Now either push out the buffer directly */
      if (!use_buffer_lists) {
        ret = gst_rtp_base_payload_push (payload, out);
        continue;
      }

      /* or add the buffer to buffer list ... */
      gst_buffer_list_add (list, out);

      /* .. and check if we need to push out the list */
      pack_line = (line - field) / fields;
      if (complete || (pack_line > last_line && pack_line % lines_delay == 0)) {
        GST_LOG_OBJECT (rtpvrawpay, "pushing list of %u buffers up to pack "
            "line %u", gst_buffer_list_length (list), pack_line);
        ret = gst_rtp_base_payload_push_list (payload, list);
        list = NULL;
        if (!complete)
          list = gst_buffer_list_new_sized (buffers_per_list);
        last_line = pack_line;
      }
    }

  }

  gst_video_frame_unmap (&frame);
  gst_buffer_unref (buffer);

  return ret;

  /* ERRORS */
unknown_sampling:
  {
    GST_ELEMENT_ERROR (payload, STREAM, FORMAT,
        (NULL), ("unimplemented sampling"));
    gst_video_frame_unmap (&frame);
    gst_buffer_unref (buffer);
    return GST_FLOW_NOT_SUPPORTED;
  }
too_small:
  {
    GST_ELEMENT_ERROR (payload, RESOURCE, NO_SPACE_LEFT,
        (NULL), ("not enough space to send at least one pixel"));
    gst_video_frame_unmap (&frame);
    gst_buffer_unref (buffer);
    return GST_FLOW_NOT_SUPPORTED;
  }
}
static GstFlowReturn
gst_rtp_klv_pay_handle_buffer (GstRTPBasePayload * basepayload, GstBuffer * buf)
{
  GstFlowReturn ret = GST_FLOW_OK;
  GstBufferList *list = NULL;
  GstRtpKlvPay *pay;
  GstMapInfo map;
  GstBuffer *outbuf = NULL;
  gsize offset;
  guint mtu, rtp_header_size, max_payload_size;

  pay = GST_RTP_KLV_PAY (basepayload);
  mtu = GST_RTP_BASE_PAYLOAD_MTU (basepayload);

  rtp_header_size = gst_rtp_buffer_calc_header_len (0);
  max_payload_size = mtu - rtp_header_size;

  gst_buffer_map (buf, &map, GST_MAP_READ);

  if (map.size == 0)
    goto done;

  /* KLV coding shall use and only use a fixed 16-byte SMPTE-administered
   * Universal Label, according to SMPTE 298M as Key (Rec. ITU R-BT.1653-1) */
  if (map.size < 16 || GST_READ_UINT32_BE (map.data) != 0x060E2B34)
    goto bad_input;

  if (map.size > max_payload_size)
    list = gst_buffer_list_new ();

  GST_LOG_OBJECT (pay, "%" G_GSIZE_FORMAT " bytes of data to payload",
      map.size);

  offset = 0;
  while (offset < map.size) {
    GstBuffer *payloadbuf;
    GstRTPBuffer rtp = { NULL };
    guint payload_size;
    guint bytes_left;

    bytes_left = map.size - offset;
    payload_size = MIN (bytes_left, max_payload_size);

    outbuf = gst_rtp_buffer_new_allocate (0, 0, 0);

    if (payload_size == bytes_left) {
      GST_LOG_OBJECT (pay, "last packet of KLV unit");
      gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
      gst_rtp_buffer_set_marker (&rtp, 1);
      gst_rtp_buffer_unmap (&rtp);
    }

    GST_LOG_OBJECT (pay, "packet with payload size %u", payload_size);

    gst_rtp_copy_meta (GST_ELEMENT_CAST (pay), outbuf, buf, 0);

    payloadbuf = gst_buffer_copy_region (buf, GST_BUFFER_COPY_MEMORY,
        offset, payload_size);

    /* join rtp header + payload memory parts */
    outbuf = gst_buffer_append (outbuf, payloadbuf);

    GST_BUFFER_PTS (outbuf) = GST_BUFFER_PTS (buf);
    GST_BUFFER_DTS (outbuf) = GST_BUFFER_DTS (buf);

    /* and add to list */
    if (list != NULL)
      gst_buffer_list_insert (list, -1, outbuf);

    offset += payload_size;
  }

done:

  gst_buffer_unmap (buf, &map);
  gst_buffer_unref (buf);

  if (list != NULL)
    ret = gst_rtp_base_payload_push_list (basepayload, list);
  else if (outbuf != NULL)
    ret = gst_rtp_base_payload_push (basepayload, outbuf);

  return ret;

/* ERRORS */
bad_input:
  {
    GST_ERROR_OBJECT (pay, "Input doesn't look like a KLV packet, ignoring");
    goto done;
  }
}
Ejemplo n.º 5
0
static GstFlowReturn
gst_rtp_h263p_pay_flush (GstRtpH263PPay * rtph263ppay)
{
  guint avail;
  GstBufferList *list = NULL;
  GstBuffer *outbuf = NULL;
  GstFlowReturn ret;
  gboolean fragmented = FALSE;

  avail = gst_adapter_available (rtph263ppay->adapter);
  if (avail == 0)
    return GST_FLOW_OK;

  fragmented = FALSE;
  /* This algorithm assumes the H263/+/++ encoder sends complete frames in each
   * buffer */
  /* With Fragmentation Mode at GST_FRAGMENTATION_MODE_NORMAL:
   *  This algorithm implements the Follow-on packets method for packetization.
   *  This assumes low packet loss network. 
   * With Fragmentation Mode at GST_FRAGMENTATION_MODE_SYNC:
   *  This algorithm separates large frames at synchronisation points (Segments)
   *  (See RFC 4629 section 6). It would be interesting to have a property such as network
   *  quality to select between both packetization methods */
  /* TODO Add VRC supprt (See RFC 4629 section 5.2) */

  while (avail > 0) {
    guint towrite;
    guint8 *payload;
    gint header_len;
    guint next_gop = 0;
    gboolean found_gob = FALSE;
    GstRTPBuffer rtp = { NULL };
    GstBuffer *payload_buf;

    if (rtph263ppay->fragmentation_mode == GST_FRAGMENTATION_MODE_SYNC) {
      /* start after 1st gop possible */

      /* Check if we have a gob or eos , eossbs */
      /* FIXME EOS and EOSSBS packets should never contain any gobs and vice-versa */
      next_gop =
          gst_adapter_masked_scan_uint32 (rtph263ppay->adapter, 0xffff8000,
          0x00008000, 0, avail);
      if (next_gop == 0) {
        GST_DEBUG_OBJECT (rtph263ppay, " Found GOB header");
        found_gob = TRUE;
      }

      /* Find next and cut the packet accordingly */
      /* TODO we should get as many gobs as possible until MTU is reached, this
       * code seems to just get one GOB per packet */
      if (next_gop == 0 && avail > 3)
        next_gop =
            gst_adapter_masked_scan_uint32 (rtph263ppay->adapter, 0xffff8000,
            0x00008000, 3, avail - 3);
      GST_DEBUG_OBJECT (rtph263ppay, " Next GOB Detected at :  %d", next_gop);
      if (next_gop == -1)
        next_gop = 0;
    }

    /* for picture start frames (non-fragmented), we need to remove the first
     * two 0x00 bytes and set P=1 */
    if (!fragmented || found_gob) {
      gst_adapter_flush (rtph263ppay->adapter, 2);
      avail -= 2;
    }
    header_len = 2;

    towrite = MIN (avail, gst_rtp_buffer_calc_payload_len
        (GST_RTP_BASE_PAYLOAD_MTU (rtph263ppay) - header_len, 0, 0));

    if (next_gop > 0)
      towrite = MIN (next_gop, towrite);

    outbuf = gst_rtp_buffer_new_allocate (header_len, 0, 0);

    gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
    /* last fragment gets the marker bit set */
    gst_rtp_buffer_set_marker (&rtp, avail > towrite ? 0 : 1);

    payload = gst_rtp_buffer_get_payload (&rtp);

    /*  0                   1
     *  0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
     * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
     * |   RR    |P|V|   PLEN    |PEBIT|
     * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
     */
    /* if fragmented or gop header , write p bit =1 */
    payload[0] = (fragmented && !found_gob) ? 0x00 : 0x04;
    payload[1] = 0;

    GST_BUFFER_PTS (outbuf) = rtph263ppay->first_timestamp;
    GST_BUFFER_DURATION (outbuf) = rtph263ppay->first_duration;
    gst_rtp_buffer_unmap (&rtp);

    payload_buf = gst_adapter_take_buffer_fast (rtph263ppay->adapter, towrite);
    gst_rtp_copy_meta (GST_ELEMENT_CAST (rtph263ppay), outbuf, payload_buf,
        g_quark_from_static_string (GST_META_TAG_VIDEO_STR));
    outbuf = gst_buffer_append (outbuf, payload_buf);
    avail -= towrite;

    /* If more data is available and this is our first iteration,
     * we create a buffer list and remember that we're fragmented.
     *
     * If we're fragmented already, add buffers to the previously
     * created buffer list.
     *
     * Otherwise fragmented will be FALSE and we just push the single output
     * buffer, and no list is allocated.
     */
    if (avail && !fragmented) {
      fragmented = TRUE;
      list = gst_buffer_list_new ();
      gst_buffer_list_add (list, outbuf);
    } else if (fragmented) {
      gst_buffer_list_add (list, outbuf);
    }
  }

  if (fragmented) {
    ret =
        gst_rtp_base_payload_push_list (GST_RTP_BASE_PAYLOAD (rtph263ppay),
        list);
  } else {
    ret =
        gst_rtp_base_payload_push (GST_RTP_BASE_PAYLOAD (rtph263ppay), outbuf);
  }

  return ret;
}
static GstFlowReturn
gst_rtp_mpv_pay_flush (GstRTPMPVPay * rtpmpvpay)
{
  GstFlowReturn ret;
  guint avail;
  GstBufferList *list;
  GstBuffer *outbuf;

  guint8 *payload;

  avail = gst_adapter_available (rtpmpvpay->adapter);

  ret = GST_FLOW_OK;

  list =
      gst_buffer_list_new_sized (avail / (GST_RTP_BASE_PAYLOAD_MTU (rtpmpvpay) -
          RTP_HEADER_LEN) + 1);

  while (avail > 0) {
    guint towrite;
    guint packet_len;
    guint payload_len;
    GstRTPBuffer rtp = { NULL };
    GstBuffer *paybuf;

    packet_len = gst_rtp_buffer_calc_packet_len (avail + 4, 0, 0);

    towrite = MIN (packet_len, GST_RTP_BASE_PAYLOAD_MTU (rtpmpvpay));

    payload_len = gst_rtp_buffer_calc_payload_len (towrite, 0, 0);

    outbuf = gst_rtp_buffer_new_allocate (4, 0, 0);

    payload_len -= 4;

    gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);

    payload = gst_rtp_buffer_get_payload (&rtp);
    /* enable MPEG Video-specific header
     *
     *  0                   1                   2                   3
     *  0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
     * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
     * |    MBZ  |T|         TR        | |N|S|B|E|  P  | | BFC | | FFC |
     * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
     *                                  AN              FBV     FFV
     */

    /* fill in the MPEG Video-specific header 
     * data is set to 0x0 here
     */
    memset (payload, 0x0, 4);

    avail -= payload_len;

    gst_rtp_buffer_set_marker (&rtp, avail == 0);
    gst_rtp_buffer_unmap (&rtp);

    paybuf = gst_adapter_take_buffer_fast (rtpmpvpay->adapter, payload_len);
    gst_rtp_copy_meta (GST_ELEMENT_CAST (rtpmpvpay), outbuf, paybuf,
        g_quark_from_static_string (GST_META_TAG_VIDEO_STR));
    outbuf = gst_buffer_append (outbuf, paybuf);

    GST_BUFFER_PTS (outbuf) = rtpmpvpay->first_ts;
    gst_buffer_list_add (list, outbuf);
  }

  ret = gst_rtp_base_payload_push_list (GST_RTP_BASE_PAYLOAD (rtpmpvpay), list);

  return ret;
}
static GstFlowReturn
gst_rtp_base_audio_payload_push_buffer (GstRTPBaseAudioPayload *
    baseaudiopayload, GstBuffer * buffer, GstClockTime timestamp)
{
  GstRTPBasePayload *basepayload;
  GstRTPBaseAudioPayloadPrivate *priv;
  GstBuffer *outbuf;
  guint8 *payload;
  guint payload_len;
  GstFlowReturn ret;

  priv = baseaudiopayload->priv;
  basepayload = GST_RTP_BASE_PAYLOAD (baseaudiopayload);

  payload_len = gst_buffer_get_size (buffer);

  GST_DEBUG_OBJECT (baseaudiopayload, "Pushing %d bytes ts %" GST_TIME_FORMAT,
      payload_len, GST_TIME_ARGS (timestamp));

  if (priv->buffer_list) {
    /* create just the RTP header buffer */
    outbuf = gst_rtp_buffer_new_allocate (0, 0, 0);
  } else {
    /* create buffer to hold the payload */
    outbuf = gst_rtp_buffer_new_allocate (payload_len, 0, 0);
  }

  /* set metadata */
  gst_rtp_base_audio_payload_set_meta (baseaudiopayload, outbuf, payload_len,
      timestamp);

  if (priv->buffer_list) {
    GstBufferList *list;
    guint i, len;

    list = gst_buffer_list_new ();
    len = gst_buffer_list_length (list);

    for (i = 0; i < len; i++) {
      /* FIXME */
      g_warning ("bufferlist not implemented");
      gst_buffer_list_add (list, outbuf);
      gst_buffer_list_add (list, buffer);
    }

    GST_DEBUG_OBJECT (baseaudiopayload, "Pushing list %p", list);
    ret = gst_rtp_base_payload_push_list (basepayload, list);
  } else {
    GstRTPBuffer rtp = { NULL };

    /* copy payload */
    gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
    payload = gst_rtp_buffer_get_payload (&rtp);
    gst_buffer_extract (buffer, 0, payload, payload_len);
    gst_rtp_buffer_unmap (&rtp);

    gst_buffer_unref (buffer);

    GST_DEBUG_OBJECT (baseaudiopayload, "Pushing buffer %p", outbuf);
    ret = gst_rtp_base_payload_push (basepayload, outbuf);
  }

  return ret;
}