コード例 #1
0
ファイル: gstrtpceltpay.c プロジェクト: adesurya/gst-mobile
static GstFlowReturn
gst_rtp_celt_pay_flush_queued (GstRtpCELTPay * rtpceltpay)
{
  GstFlowReturn ret;
  GstBuffer *buf, *outbuf;
  guint8 *payload, *spayload;
  guint payload_len;
  GstClockTime duration;
  GstRTPBuffer rtp = { NULL, };

  payload_len = rtpceltpay->bytes + rtpceltpay->sbytes;
  duration = rtpceltpay->qduration;

  GST_DEBUG_OBJECT (rtpceltpay, "flushing out %u, duration %" GST_TIME_FORMAT,
      payload_len, GST_TIME_ARGS (rtpceltpay->qduration));

  /* get a big enough packet for the sizes + payloads */
  outbuf = gst_rtp_buffer_new_allocate (payload_len, 0, 0);

  GST_BUFFER_DURATION (outbuf) = duration;

  gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);

  /* point to the payload for size headers and data */
  spayload = gst_rtp_buffer_get_payload (&rtp);
  payload = spayload + rtpceltpay->sbytes;

  while ((buf = g_queue_pop_head (rtpceltpay->queue))) {
    guint size;

    /* copy first timestamp to output */
    if (GST_BUFFER_TIMESTAMP (outbuf) == -1)
      GST_BUFFER_TIMESTAMP (outbuf) = GST_BUFFER_TIMESTAMP (buf);

    /* write the size to the header */
    size = gst_buffer_get_size (buf);
    while (size > 0xff) {
      *spayload++ = 0xff;
      size -= 0xff;
    }
    *spayload++ = size;

    /* copy payload */
    size = gst_buffer_get_size (buf);
    gst_buffer_extract (buf, 0, payload, size);
    payload += size;

    gst_buffer_unref (buf);
  }
  gst_rtp_buffer_unmap (&rtp);

  /* we consumed it all */
  rtpceltpay->bytes = 0;
  rtpceltpay->sbytes = 0;
  rtpceltpay->qduration = 0;

  ret = gst_rtp_base_payload_push (GST_RTP_BASE_PAYLOAD (rtpceltpay), outbuf);

  return ret;
}
コード例 #2
0
static GstFlowReturn
gst_rtp_opus_pay_handle_buffer (GstRTPBasePayload * basepayload,
    GstBuffer * buffer)
{
  GstBuffer *outbuf;
  GstClockTime pts, dts, duration;
  CopyMetaData data;

  pts = GST_BUFFER_PTS (buffer);
  dts = GST_BUFFER_DTS (buffer);
  duration = GST_BUFFER_DURATION (buffer);

  outbuf = gst_rtp_buffer_new_allocate (0, 0, 0);
  data.pay = GST_RTP_OPUS_PAY (basepayload);
  data.outbuf = outbuf;
  gst_buffer_foreach_meta (buffer, foreach_metadata, &data);
  outbuf = gst_buffer_append (outbuf, buffer);

  GST_BUFFER_PTS (outbuf) = pts;
  GST_BUFFER_DTS (outbuf) = dts;
  GST_BUFFER_DURATION (outbuf) = duration;

  /* Push out */
  return gst_rtp_base_payload_push (basepayload, outbuf);
}
コード例 #3
0
/**
 * gst_rtp_base_audio_payload_push:
 * @baseaudiopayload: a #GstRTPBasePayload
 * @data: data to set as payload
 * @payload_len: length of payload
 * @timestamp: a #GstClockTime
 *
 * Create an RTP buffer and store @payload_len bytes of @data as the
 * payload. Set the timestamp on the new buffer to @timestamp before pushing
 * the buffer downstream.
 *
 * Returns: a #GstFlowReturn
 */
GstFlowReturn
gst_rtp_base_audio_payload_push (GstRTPBaseAudioPayload * baseaudiopayload,
    const guint8 * data, guint payload_len, GstClockTime timestamp)
{
  GstRTPBasePayload *basepayload;
  GstBuffer *outbuf;
  guint8 *payload;
  GstFlowReturn ret;
  GstRTPBuffer rtp = { NULL };

  basepayload = GST_RTP_BASE_PAYLOAD (baseaudiopayload);

  GST_DEBUG_OBJECT (baseaudiopayload, "Pushing %d bytes ts %" GST_TIME_FORMAT,
      payload_len, GST_TIME_ARGS (timestamp));

  /* create buffer to hold the payload */
  outbuf = gst_rtp_base_payload_allocate_output_buffer (basepayload,
      payload_len, 0, 0);

  /* copy payload */
  gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
  payload = gst_rtp_buffer_get_payload (&rtp);
  memcpy (payload, data, payload_len);
  gst_rtp_buffer_unmap (&rtp);

  /* set metadata */
  gst_rtp_base_audio_payload_set_meta (baseaudiopayload, outbuf, payload_len,
      timestamp);

  ret = gst_rtp_base_payload_push (basepayload, outbuf);

  return ret;
}
コード例 #4
0
static GstFlowReturn
gst_rtp_sbc_pay_flush_buffers (GstRtpSBCPay * sbcpay)
{
  GstRTPBuffer rtp = GST_RTP_BUFFER_INIT;
  guint available;
  guint max_payload;
  GstBuffer *outbuf;
  guint8 *payload_data;
  guint frame_count;
  guint payload_length;
  struct rtp_payload *payload;

  if (sbcpay->frame_length == 0) {
    GST_ERROR_OBJECT (sbcpay, "Frame length is 0");
    return GST_FLOW_ERROR;
  }

  available = gst_adapter_available (sbcpay->adapter);

  max_payload =
      gst_rtp_buffer_calc_payload_len (GST_RTP_BASE_PAYLOAD_MTU (sbcpay) -
      RTP_SBC_PAYLOAD_HEADER_SIZE, 0, 0);

  max_payload = MIN (max_payload, available);
  frame_count = max_payload / sbcpay->frame_length;
  payload_length = frame_count * sbcpay->frame_length;
  if (payload_length == 0)      /* Nothing to send */
    return GST_FLOW_OK;

  outbuf = gst_rtp_buffer_new_allocate (payload_length +
      RTP_SBC_PAYLOAD_HEADER_SIZE, 0, 0);

  /* get payload */
  gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);

  gst_rtp_buffer_set_payload_type (&rtp, GST_RTP_BASE_PAYLOAD_PT (sbcpay));

  /* write header and copy data into payload */
  payload_data = gst_rtp_buffer_get_payload (&rtp);
  payload = (struct rtp_payload *) payload_data;
  memset (payload, 0, sizeof (struct rtp_payload));
  payload->frame_count = frame_count;

  gst_adapter_copy (sbcpay->adapter, payload_data +
      RTP_SBC_PAYLOAD_HEADER_SIZE, 0, payload_length);

  gst_rtp_buffer_unmap (&rtp);

  gst_adapter_flush (sbcpay->adapter, payload_length);

  /* FIXME: what about duration? */
  GST_BUFFER_PTS (outbuf) = sbcpay->timestamp;
  GST_DEBUG_OBJECT (sbcpay, "Pushing %d bytes", payload_length);

  return gst_rtp_base_payload_push (GST_RTP_BASE_PAYLOAD (sbcpay), outbuf);
}
コード例 #5
0
static GstFlowReturn
gst_rtp_base_audio_payload_push_buffer (GstRTPBaseAudioPayload *
    baseaudiopayload, GstBuffer * buffer, GstClockTime timestamp)
{
  GstRTPBasePayload *basepayload;
  GstRTPBaseAudioPayloadPrivate *priv;
  GstBuffer *outbuf;
  guint payload_len;
  GstFlowReturn ret;

  priv = baseaudiopayload->priv;
  basepayload = GST_RTP_BASE_PAYLOAD (baseaudiopayload);

  payload_len = gst_buffer_get_size (buffer);

  GST_DEBUG_OBJECT (baseaudiopayload, "Pushing %d bytes ts %" GST_TIME_FORMAT,
      payload_len, GST_TIME_ARGS (timestamp));

  /* create just the RTP header buffer */
  outbuf = gst_rtp_base_payload_allocate_output_buffer (basepayload, 0, 0, 0);

  /* set metadata */
  gst_rtp_base_audio_payload_set_meta (baseaudiopayload, outbuf, payload_len,
      timestamp);

  if (priv->buffer_list) {
    GstBufferList *list;
    guint i, len;

    list = gst_buffer_list_new ();
    len = gst_buffer_list_length (list);

    for (i = 0; i < len; i++) {
      /* FIXME */
      g_warning ("bufferlist not implemented");
      gst_buffer_list_add (list, outbuf);
      gst_buffer_list_add (list, buffer);
    }

    GST_DEBUG_OBJECT (baseaudiopayload, "Pushing list %p", list);
    ret = gst_rtp_base_payload_push_list (basepayload, list);
  } else {
    CopyMetaData data;

    /* copy payload */
    data.pay = baseaudiopayload;
    data.outbuf = outbuf;
    gst_buffer_foreach_meta (buffer, foreach_metadata, &data);
    outbuf = gst_buffer_append (outbuf, buffer);

    GST_DEBUG_OBJECT (baseaudiopayload, "Pushing buffer %p", outbuf);
    ret = gst_rtp_base_payload_push (basepayload, outbuf);
  }

  return ret;
}
コード例 #6
0
static GstFlowReturn
gst_rtp_opus_pay_handle_buffer (GstRTPBasePayload * basepayload,
    GstBuffer * buffer)
{
  GstBuffer *outbuf;

  outbuf = gst_rtp_buffer_new_allocate (0, 0, 0);
  outbuf = gst_buffer_append (outbuf, gst_buffer_ref (buffer));

  /* Push out */
  return gst_rtp_base_payload_push (basepayload, outbuf);
}
コード例 #7
0
static GstFlowReturn
gst_rtp_mp2t_pay_flush (GstRTPMP2TPay * rtpmp2tpay)
{
  guint avail, mtu;
  GstFlowReturn ret = GST_FLOW_OK;
  GstBuffer *outbuf;

  avail = gst_adapter_available (rtpmp2tpay->adapter);

  mtu = GST_RTP_BASE_PAYLOAD_MTU (rtpmp2tpay);

  while (avail > 0 && (ret == GST_FLOW_OK)) {
    guint towrite;
    guint payload_len;
    guint packet_len;
    GstBuffer *paybuf;

    /* this will be the total length of the packet */
    packet_len = gst_rtp_buffer_calc_packet_len (avail, 0, 0);

    /* fill one MTU or all available bytes */
    towrite = MIN (packet_len, mtu);

    /* this is the payload length */
    payload_len = gst_rtp_buffer_calc_payload_len (towrite, 0, 0);
    payload_len -= payload_len % 188;

    /* need whole packets */
    if (!payload_len)
      break;

    /* create buffer to hold the payload */
    outbuf = gst_rtp_buffer_new_allocate (0, 0, 0);

    /* get payload */
    paybuf = gst_adapter_take_buffer_fast (rtpmp2tpay->adapter, payload_len);
    gst_rtp_copy_meta (GST_ELEMENT_CAST (rtpmp2tpay), outbuf, paybuf, 0);
    outbuf = gst_buffer_append (outbuf, paybuf);
    avail -= payload_len;

    GST_BUFFER_PTS (outbuf) = rtpmp2tpay->first_ts;
    GST_BUFFER_DURATION (outbuf) = rtpmp2tpay->duration;

    GST_DEBUG_OBJECT (rtpmp2tpay, "pushing buffer of size %u",
        (guint) gst_buffer_get_size (outbuf));

    ret = gst_rtp_base_payload_push (GST_RTP_BASE_PAYLOAD (rtpmp2tpay), outbuf);
  }

  return ret;
}
コード例 #8
0
static GstFlowReturn
gst_rtp_gsm_pay_handle_buffer (GstRTPBasePayload * basepayload,
    GstBuffer * buffer)
{
  GstRTPGSMPay *rtpgsmpay;
  guint payload_len;
  GstBuffer *outbuf;
  GstClockTime timestamp, duration;
  GstFlowReturn ret;

  rtpgsmpay = GST_RTP_GSM_PAY (basepayload);

  timestamp = GST_BUFFER_PTS (buffer);
  duration = GST_BUFFER_DURATION (buffer);

  /* FIXME, only one GSM frame per RTP packet for now */
  payload_len = gst_buffer_get_size (buffer);

  /* FIXME, just error out for now */
  if (payload_len > GST_RTP_BASE_PAYLOAD_MTU (rtpgsmpay))
    goto too_big;

  outbuf = gst_rtp_buffer_new_allocate (payload_len, 0, 0);

  /* copy timestamp and duration */
  GST_BUFFER_PTS (outbuf) = timestamp;
  GST_BUFFER_DURATION (outbuf) = duration;

  gst_rtp_copy_meta (GST_ELEMENT_CAST (rtpgsmpay), outbuf, buffer,
      g_quark_from_static_string (GST_META_TAG_AUDIO_STR));

  /* append payload */
  outbuf = gst_buffer_append (outbuf, buffer);

  GST_DEBUG ("gst_rtp_gsm_pay_chain: pushing buffer of size %" G_GSIZE_FORMAT,
      gst_buffer_get_size (outbuf));

  ret = gst_rtp_base_payload_push (basepayload, outbuf);

  return ret;

  /* ERRORS */
too_big:
  {
    GST_ELEMENT_ERROR (rtpgsmpay, STREAM, ENCODE, (NULL),
        ("payload_len %u > mtu %u", payload_len,
            GST_RTP_BASE_PAYLOAD_MTU (rtpgsmpay)));
    return GST_FLOW_ERROR;
  }
}
コード例 #9
0
static GstFlowReturn
gst_rtp_g723_pay_flush (GstRTPG723Pay * pay)
{
  GstBuffer *outbuf, *payload_buf;
  GstFlowReturn ret;
  guint avail;
  GstRTPBuffer rtp = { NULL };

  avail = gst_adapter_available (pay->adapter);

  outbuf = gst_rtp_buffer_new_allocate (0, 0, 0);

  gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);

  GST_BUFFER_PTS (outbuf) = pay->timestamp;
  GST_BUFFER_DURATION (outbuf) = pay->duration;

  /* copy G723 data as payload */
  payload_buf = gst_adapter_take_buffer_fast (pay->adapter, avail);

  pay->timestamp = GST_CLOCK_TIME_NONE;
  pay->duration = 0;

  /* set discont and marker */
  if (pay->discont) {
    GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
    gst_rtp_buffer_set_marker (&rtp, TRUE);
    pay->discont = FALSE;
  }
  gst_rtp_buffer_unmap (&rtp);
  gst_rtp_copy_meta (GST_ELEMENT_CAST (pay), outbuf, payload_buf,
      g_quark_from_static_string (GST_META_TAG_AUDIO_STR));

  outbuf = gst_buffer_append (outbuf, payload_buf);

  ret = gst_rtp_base_payload_push (GST_RTP_BASE_PAYLOAD (pay), outbuf);

  return ret;
}
コード例 #10
0
static GstFlowReturn
gst_rtp_g723_pay_flush (GstRTPG723Pay * pay)
{
  GstBuffer *outbuf;
  GstFlowReturn ret;
  guint8 *payload;
  guint avail;
  GstRTPBuffer rtp = { NULL };

  avail = gst_adapter_available (pay->adapter);

  outbuf = gst_rtp_buffer_new_allocate (avail, 0, 0);

  gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
  payload = gst_rtp_buffer_get_payload (&rtp);

  GST_BUFFER_TIMESTAMP (outbuf) = pay->timestamp;
  GST_BUFFER_DURATION (outbuf) = pay->duration;

  /* copy G723 data as payload */
  gst_adapter_copy (pay->adapter, payload, 0, avail);

  /* flush bytes from adapter */
  gst_adapter_flush (pay->adapter, avail);
  pay->timestamp = GST_CLOCK_TIME_NONE;
  pay->duration = 0;

  /* set discont and marker */
  if (pay->discont) {
    GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
    gst_rtp_buffer_set_marker (&rtp, TRUE);
    pay->discont = FALSE;
  }
  gst_rtp_buffer_unmap (&rtp);

  ret = gst_rtp_base_payload_push (GST_RTP_BASE_PAYLOAD (pay), outbuf);

  return ret;
}
コード例 #11
0
/* Get a DV frame, chop it up in pieces, and push the pieces to the RTP layer.
 */
static GstFlowReturn
gst_rtp_dv_pay_handle_buffer (GstRTPBasePayload * basepayload,
    GstBuffer * buffer)
{
  GstRTPDVPay *rtpdvpay;
  guint max_payload_size;
  GstBuffer *outbuf;
  GstFlowReturn ret = GST_FLOW_OK;
  gint hdrlen;
  gsize size;
  GstMapInfo map;
  guint8 *data;
  guint8 *dest;
  guint filled;
  GstRTPBuffer rtp = { NULL, };

  rtpdvpay = GST_RTP_DV_PAY (basepayload);

  hdrlen = gst_rtp_buffer_calc_header_len (0);
  /* DV frames are made up from a bunch of DIF blocks. DIF blocks are 80 bytes
   * each, and we should put an integral number of them in each RTP packet.
   * Therefore, we round the available room down to the nearest multiple of 80.
   *
   * The available room is just the packet MTU, minus the RTP header length. */
  max_payload_size = ((GST_RTP_BASE_PAYLOAD_MTU (rtpdvpay) - hdrlen) / 80) * 80;

  /* The length of the buffer to transmit. */
  if (!gst_buffer_map (buffer, &map, GST_MAP_READ)) {
    GST_ELEMENT_ERROR (rtpdvpay, CORE, FAILED,
        (NULL), ("Failed to map buffer"));
    gst_buffer_unref (buffer);
    return GST_FLOW_ERROR;
  }
  data = map.data;
  size = map.size;

  GST_DEBUG_OBJECT (rtpdvpay,
      "DV RTP payloader got buffer of %" G_GSIZE_FORMAT
      " bytes, splitting in %u byte " "payload fragments, at time %"
      GST_TIME_FORMAT, size, max_payload_size,
      GST_TIME_ARGS (GST_BUFFER_PTS (buffer)));

  if (!rtpdvpay->negotiated) {
    gst_dv_pay_negotiate (rtpdvpay, data, size);
    /* if we have not yet scanned the stream for its type, do so now */
    rtpdvpay->negotiated = TRUE;
  }

  outbuf = NULL;
  dest = NULL;
  filled = 0;

  /* while we have a complete DIF chunks left */
  while (size >= 80) {
    /* Allocate a new buffer, set the timestamp */
    if (outbuf == NULL) {
      outbuf = gst_rtp_buffer_new_allocate (max_payload_size, 0, 0);
      GST_BUFFER_PTS (outbuf) = GST_BUFFER_PTS (buffer);

      if (!gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp)) {
        gst_buffer_unref (outbuf);
        GST_ELEMENT_ERROR (rtpdvpay, CORE, FAILED,
            (NULL), ("Failed to map RTP buffer"));
        ret = GST_FLOW_ERROR;
        goto beach;
      }
      dest = gst_rtp_buffer_get_payload (&rtp);
      filled = 0;
    }

    /* inspect the DIF chunk, if we don't need to include it, skip to the next one. */
    if (include_dif (rtpdvpay, data)) {
      /* copy data in packet */
      memcpy (dest, data, 80);

      dest += 80;
      filled += 80;
    }

    /* go to next dif chunk */
    size -= 80;
    data += 80;

    /* push out the buffer if the next one would exceed the max packet size or
     * when we are pushing the last packet */
    if (filled + 80 > max_payload_size || size < 80) {
      if (size < 160) {
        guint hlen;

        /* set marker */
        gst_rtp_buffer_set_marker (&rtp, TRUE);

        /* shrink buffer to last packet */
        hlen = gst_rtp_buffer_get_header_len (&rtp);
        gst_rtp_buffer_set_packet_len (&rtp, hlen + filled);
      }

      /* Push out the created piece, and check for errors. */
      gst_rtp_buffer_unmap (&rtp);
      ret = gst_rtp_base_payload_push (basepayload, outbuf);
      if (ret != GST_FLOW_OK)
        break;

      outbuf = NULL;
    }
  }

beach:
  gst_buffer_unmap (buffer, &map);
  gst_buffer_unref (buffer);

  return ret;
}
コード例 #12
0
static GstFlowReturn
gst_rtp_vraw_pay_handle_buffer (GstRTPBasePayload * payload, GstBuffer * buffer)
{
  GstRtpVRawPay *rtpvrawpay;
  GstFlowReturn ret = GST_FLOW_OK;
  gfloat packets_per_packline;
  guint pgroups_per_packet;
  guint packlines_per_list, buffers_per_list;
  guint lines_delay;            /* after how many packed lines we push out a buffer list */
  guint last_line;              /* last pack line number we pushed out a buffer list     */
  guint line, offset;
  guint8 *p0, *yp, *up, *vp;
  guint ystride, uvstride;
  guint xinc, yinc;
  guint pgroup;
  guint mtu;
  guint width, height;
  gint field, fields;
  GstVideoFormat format;
  GstVideoFrame frame;
  gint interlaced;
  gboolean use_buffer_lists;
  GstBufferList *list = NULL;
  GstRTPBuffer rtp = { NULL, };

  rtpvrawpay = GST_RTP_VRAW_PAY (payload);

  gst_video_frame_map (&frame, &rtpvrawpay->vinfo, buffer, GST_MAP_READ);

  GST_LOG_OBJECT (rtpvrawpay, "new frame of %" G_GSIZE_FORMAT " bytes",
      gst_buffer_get_size (buffer));

  /* get pointer and strides of the planes */
  p0 = GST_VIDEO_FRAME_PLANE_DATA (&frame, 0);
  yp = GST_VIDEO_FRAME_COMP_DATA (&frame, 0);
  up = GST_VIDEO_FRAME_COMP_DATA (&frame, 1);
  vp = GST_VIDEO_FRAME_COMP_DATA (&frame, 2);

  ystride = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 0);
  uvstride = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 1);

  mtu = GST_RTP_BASE_PAYLOAD_MTU (payload);

  /* amount of bytes for one pixel */
  pgroup = rtpvrawpay->pgroup;
  width = GST_VIDEO_INFO_WIDTH (&rtpvrawpay->vinfo);
  height = GST_VIDEO_INFO_HEIGHT (&rtpvrawpay->vinfo);

  interlaced = GST_VIDEO_INFO_IS_INTERLACED (&rtpvrawpay->vinfo);

  format = GST_VIDEO_INFO_FORMAT (&rtpvrawpay->vinfo);

  yinc = rtpvrawpay->yinc;
  xinc = rtpvrawpay->xinc;

  /* after how many packed lines we push out a buffer list */
  lines_delay = GST_ROUND_UP_4 (height / rtpvrawpay->chunks_per_frame);

  /* calculate how many buffers we expect to store in a single buffer list */
  pgroups_per_packet = (mtu - (12 + 14)) / pgroup;
  packets_per_packline = width / (xinc * pgroups_per_packet * 1.0);
  packlines_per_list = height / (yinc * rtpvrawpay->chunks_per_frame);
  buffers_per_list = packlines_per_list * packets_per_packline;
  buffers_per_list = GST_ROUND_UP_8 (buffers_per_list);

  use_buffer_lists = (rtpvrawpay->chunks_per_frame < (height / yinc));

  fields = 1 + interlaced;

  /* start with line 0, offset 0 */
  for (field = 0; field < fields; field++) {
    line = field;
    offset = 0;
    last_line = 0;

    if (use_buffer_lists)
      list = gst_buffer_list_new_sized (buffers_per_list);

    /* write all lines */
    while (line < height) {
      guint left, pack_line;
      GstBuffer *out;
      guint8 *outdata, *headers;
      gboolean next_line, complete = FALSE;
      guint length, cont, pixels;

      /* get the max allowed payload length size, we try to fill the complete MTU */
      left = gst_rtp_buffer_calc_payload_len (mtu, 0, 0);
      out = gst_rtp_buffer_new_allocate (left, 0, 0);

      if (field == 0) {
        GST_BUFFER_PTS (out) = GST_BUFFER_PTS (buffer);
      } else {
        GST_BUFFER_PTS (out) = GST_BUFFER_PTS (buffer) +
            GST_BUFFER_DURATION (buffer) / 2;
      }

      gst_rtp_buffer_map (out, GST_MAP_WRITE, &rtp);
      outdata = gst_rtp_buffer_get_payload (&rtp);

      GST_LOG_OBJECT (rtpvrawpay, "created buffer of size %u for MTU %u", left,
          mtu);

      /*
       *   0                   1                   2                   3
       *   0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
       *  +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
       *  |   Extended Sequence Number    |            Length             |
       *  +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
       *  |F|          Line No            |C|           Offset            |
       *  +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
       *  |            Length             |F|          Line No            |
       *  +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
       *  |C|           Offset            |                               .
       *  +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+                               .
       *  .                                                               .
       *  .                 Two (partial) lines of video data             .
       *  .                                                               .
       *  +---------------------------------------------------------------+
       */

      /* need 2 bytes for the extended sequence number */
      *outdata++ = 0;
      *outdata++ = 0;
      left -= 2;

      /* the headers start here */
      headers = outdata;

      /* make sure we can fit at least *one* header and pixel */
      if (!(left > (6 + pgroup))) {
        gst_rtp_buffer_unmap (&rtp);
        gst_buffer_unref (out);
        goto too_small;
      }

      /* while we can fit at least one header and one pixel */
      while (left > (6 + pgroup)) {
        /* we need a 6 bytes header */
        left -= 6;

        /* get how may bytes we need for the remaining pixels */
        pixels = width - offset;
        length = (pixels * pgroup) / xinc;

        if (left >= length) {
          /* pixels and header fit completely, we will write them and skip to the
           * next line. */
          next_line = TRUE;
        } else {
          /* line does not fit completely, see how many pixels fit */
          pixels = (left / pgroup) * xinc;
          length = (pixels * pgroup) / xinc;
          next_line = FALSE;
        }
        GST_LOG_OBJECT (rtpvrawpay, "filling %u bytes in %u pixels", length,
            pixels);
        left -= length;

        /* write length */
        *outdata++ = (length >> 8) & 0xff;
        *outdata++ = length & 0xff;

        /* write line no */
        *outdata++ = ((line >> 8) & 0x7f) | ((field << 7) & 0x80);
        *outdata++ = line & 0xff;

        if (next_line) {
          /* go to next line we do this here to make the check below easier */
          line += yinc;
        }

        /* calculate continuation marker */
        cont = (left > (6 + pgroup) && line < height) ? 0x80 : 0x00;

        /* write offset and continuation marker */
        *outdata++ = ((offset >> 8) & 0x7f) | cont;
        *outdata++ = offset & 0xff;

        if (next_line) {
          /* reset offset */
          offset = 0;
          GST_LOG_OBJECT (rtpvrawpay, "go to next line %u", line);
        } else {
          offset += pixels;
          GST_LOG_OBJECT (rtpvrawpay, "next offset %u", offset);
        }

        if (!cont)
          break;
      }
      GST_LOG_OBJECT (rtpvrawpay, "consumed %u bytes",
          (guint) (outdata - headers));

      /* second pass, read headers and write the data */
      while (TRUE) {
        guint offs, lin;

        /* read length and cont */
        length = (headers[0] << 8) | headers[1];
        lin = ((headers[2] & 0x7f) << 8) | headers[3];
        offs = ((headers[4] & 0x7f) << 8) | headers[5];
        cont = headers[4] & 0x80;
        pixels = length / pgroup;
        headers += 6;

        GST_LOG_OBJECT (payload,
            "writing length %u, line %u, offset %u, cont %d", length, lin, offs,
            cont);

        switch (format) {
          case GST_VIDEO_FORMAT_RGB:
          case GST_VIDEO_FORMAT_RGBA:
          case GST_VIDEO_FORMAT_BGR:
          case GST_VIDEO_FORMAT_BGRA:
          case GST_VIDEO_FORMAT_UYVY:
          case GST_VIDEO_FORMAT_UYVP:
            offs /= xinc;
            memcpy (outdata, p0 + (lin * ystride) + (offs * pgroup), length);
            outdata += length;
            break;
          case GST_VIDEO_FORMAT_AYUV:
          {
            gint i;
            guint8 *datap;

            datap = p0 + (lin * ystride) + (offs * 4);

            for (i = 0; i < pixels; i++) {
              *outdata++ = datap[2];
              *outdata++ = datap[1];
              *outdata++ = datap[3];
              datap += 4;
            }
            break;
          }
          case GST_VIDEO_FORMAT_I420:
          {
            gint i;
            guint uvoff;
            guint8 *yd1p, *yd2p, *udp, *vdp;

            yd1p = yp + (lin * ystride) + (offs);
            yd2p = yd1p + ystride;
            uvoff = (lin / yinc * uvstride) + (offs / xinc);
            udp = up + uvoff;
            vdp = vp + uvoff;

            for (i = 0; i < pixels; i++) {
              *outdata++ = *yd1p++;
              *outdata++ = *yd1p++;
              *outdata++ = *yd2p++;
              *outdata++ = *yd2p++;
              *outdata++ = *udp++;
              *outdata++ = *vdp++;
            }
            break;
          }
          case GST_VIDEO_FORMAT_Y41B:
          {
            gint i;
            guint uvoff;
            guint8 *ydp, *udp, *vdp;

            ydp = yp + (lin * ystride) + offs;
            uvoff = (lin / yinc * uvstride) + (offs / xinc);
            udp = up + uvoff;
            vdp = vp + uvoff;

            for (i = 0; i < pixels; i++) {
              *outdata++ = *udp++;
              *outdata++ = *ydp++;
              *outdata++ = *ydp++;
              *outdata++ = *vdp++;
              *outdata++ = *ydp++;
              *outdata++ = *ydp++;
            }
            break;
          }
          default:
            gst_rtp_buffer_unmap (&rtp);
            gst_buffer_unref (out);
            goto unknown_sampling;
        }

        if (!cont)
          break;
      }

      if (line >= height) {
        GST_LOG_OBJECT (rtpvrawpay, "field/frame complete, set marker");
        gst_rtp_buffer_set_marker (&rtp, TRUE);
        complete = TRUE;
      }
      gst_rtp_buffer_unmap (&rtp);
      if (left > 0) {
        GST_LOG_OBJECT (rtpvrawpay, "we have %u bytes left", left);
        gst_buffer_resize (out, 0, gst_buffer_get_size (out) - left);
      }

      /* Now either push out the buffer directly */
      if (!use_buffer_lists) {
        ret = gst_rtp_base_payload_push (payload, out);
        continue;
      }

      /* or add the buffer to buffer list ... */
      gst_buffer_list_add (list, out);

      /* .. and check if we need to push out the list */
      pack_line = (line - field) / fields;
      if (complete || (pack_line > last_line && pack_line % lines_delay == 0)) {
        GST_LOG_OBJECT (rtpvrawpay, "pushing list of %u buffers up to pack "
            "line %u", gst_buffer_list_length (list), pack_line);
        ret = gst_rtp_base_payload_push_list (payload, list);
        list = NULL;
        if (!complete)
          list = gst_buffer_list_new_sized (buffers_per_list);
        last_line = pack_line;
      }
    }

  }

  gst_video_frame_unmap (&frame);
  gst_buffer_unref (buffer);

  return ret;

  /* ERRORS */
unknown_sampling:
  {
    GST_ELEMENT_ERROR (payload, STREAM, FORMAT,
        (NULL), ("unimplemented sampling"));
    gst_video_frame_unmap (&frame);
    gst_buffer_unref (buffer);
    return GST_FLOW_NOT_SUPPORTED;
  }
too_small:
  {
    GST_ELEMENT_ERROR (payload, RESOURCE, NO_SPACE_LEFT,
        (NULL), ("not enough space to send at least one pixel"));
    gst_video_frame_unmap (&frame);
    gst_buffer_unref (buffer);
    return GST_FLOW_NOT_SUPPORTED;
  }
}
コード例 #13
0
ファイル: gstrtpasfpay.c プロジェクト: 0p1pp1/gst-plugins-bad
static GstFlowReturn
gst_rtp_asf_pay_handle_packet (GstRtpAsfPay * rtpasfpay, GstBuffer * buffer)
{
  GstRTPBasePayload *rtppay;
  GstAsfPacketInfo *packetinfo;
  guint8 flags;
  guint8 *data;
  guint32 packet_util_size;
  guint32 packet_offset;
  guint32 size_left;
  GstFlowReturn ret = GST_FLOW_OK;

  rtppay = GST_RTP_BASE_PAYLOAD (rtpasfpay);
  packetinfo = &rtpasfpay->packetinfo;

  if (!gst_asf_parse_packet (buffer, packetinfo, TRUE,
          rtpasfpay->asfinfo.packet_size)) {
    GST_ERROR_OBJECT (rtpasfpay, "Error while parsing asf packet");
    gst_buffer_unref (buffer);
    return GST_FLOW_ERROR;
  }

  if (packetinfo->packet_size == 0)
    packetinfo->packet_size = rtpasfpay->asfinfo.packet_size;

  GST_LOG_OBJECT (rtpasfpay, "Packet size: %" G_GUINT32_FORMAT
      ", padding: %" G_GUINT32_FORMAT, packetinfo->packet_size,
      packetinfo->padding);

  /* update padding field to 0 */
  if (packetinfo->padding > 0) {
    GstAsfPacketInfo info;
    /* find padding field offset */
    guint offset = packetinfo->err_cor_len + 2 +
        gst_asf_get_var_size_field_len (packetinfo->packet_field_type) +
        gst_asf_get_var_size_field_len (packetinfo->seq_field_type);
    buffer = gst_buffer_make_writable (buffer);
    switch (packetinfo->padd_field_type) {
      case ASF_FIELD_TYPE_DWORD:
        gst_buffer_memset (buffer, offset, 0, 4);
        break;
      case ASF_FIELD_TYPE_WORD:
        gst_buffer_memset (buffer, offset, 0, 2);
        break;
      case ASF_FIELD_TYPE_BYTE:
        gst_buffer_memset (buffer, offset, 0, 1);
        break;
      case ASF_FIELD_TYPE_NONE:
      default:
        break;
    }
    gst_asf_parse_packet (buffer, &info, FALSE, 0);
  }

  if (packetinfo->padding != 0)
    packet_util_size = rtpasfpay->asfinfo.packet_size - packetinfo->padding;
  else
    packet_util_size = packetinfo->packet_size;
  packet_offset = 0;
  while (packet_util_size > 0) {
    /* Even if we don't fill completely an output buffer we
     * push it when we add an fragment. Because it seems that
     * it is not possible to determine where a asf packet
     * fragment ends inside a rtp packet payload.
     * This flag tells us to push the packet.
     */
    gboolean force_push = FALSE;
    GstRTPBuffer rtp = GST_RTP_BUFFER_INIT;

    /* we have no output buffer pending, create one */
    if (rtpasfpay->current == NULL) {
      GST_LOG_OBJECT (rtpasfpay, "Creating new output buffer");
      rtpasfpay->current =
          gst_rtp_buffer_new_allocate_len (GST_RTP_BASE_PAYLOAD_MTU (rtpasfpay),
          0, 0);
      rtpasfpay->cur_off = 0;
      rtpasfpay->has_ts = FALSE;
      rtpasfpay->marker = FALSE;
    }
    gst_rtp_buffer_map (rtpasfpay->current, GST_MAP_READWRITE, &rtp);
    data = gst_rtp_buffer_get_payload (&rtp);
    data += rtpasfpay->cur_off;
    size_left = gst_rtp_buffer_get_payload_len (&rtp) - rtpasfpay->cur_off;

    GST_DEBUG_OBJECT (rtpasfpay, "Input buffer bytes consumed: %"
        G_GUINT32_FORMAT "/%" G_GSIZE_FORMAT, packet_offset,
        gst_buffer_get_size (buffer));

    GST_DEBUG_OBJECT (rtpasfpay, "Output rtpbuffer status");
    GST_DEBUG_OBJECT (rtpasfpay, "Current offset: %" G_GUINT32_FORMAT,
        rtpasfpay->cur_off);
    GST_DEBUG_OBJECT (rtpasfpay, "Size left: %" G_GUINT32_FORMAT, size_left);
    GST_DEBUG_OBJECT (rtpasfpay, "Has ts: %s",
        rtpasfpay->has_ts ? "yes" : "no");
    if (rtpasfpay->has_ts) {
      GST_DEBUG_OBJECT (rtpasfpay, "Ts: %" G_GUINT32_FORMAT, rtpasfpay->ts);
    }

    flags = 0;
    if (packetinfo->has_keyframe) {
      flags = flags | 0x80;
    }
    flags = flags | 0x20;       /* Relative timestamp is present */

    if (!rtpasfpay->has_ts) {
      /* this is the first asf packet, its send time is the 
       * rtp packet timestamp */
      rtpasfpay->has_ts = TRUE;
      rtpasfpay->ts = packetinfo->send_time;
    }

    if (size_left >= packet_util_size + 8) {
      /* enough space for the rest of the packet */
      if (packet_offset == 0) {
        flags = flags | 0x40;
        GST_WRITE_UINT24_BE (data + 1, packet_util_size);
      } else {
        GST_WRITE_UINT24_BE (data + 1, packet_offset);
        force_push = TRUE;
      }
      data[0] = flags;
      GST_WRITE_UINT32_BE (data + 4,
          (gint32) (packetinfo->send_time) - (gint32) rtpasfpay->ts);
      gst_buffer_extract (buffer, packet_offset, data + 8, packet_util_size);

      /* updating status variables */
      rtpasfpay->cur_off += 8 + packet_util_size;
      size_left -= packet_util_size + 8;
      packet_offset += packet_util_size;
      packet_util_size = 0;
      rtpasfpay->marker = TRUE;
    } else {
      /* fragment packet */
      data[0] = flags;
      GST_WRITE_UINT24_BE (data + 1, packet_offset);
      GST_WRITE_UINT32_BE (data + 4,
          (gint32) (packetinfo->send_time) - (gint32) rtpasfpay->ts);
      gst_buffer_extract (buffer, packet_offset, data + 8, size_left - 8);

      /* updating status variables */
      rtpasfpay->cur_off += size_left;
      packet_offset += size_left - 8;
      packet_util_size -= size_left - 8;
      size_left = 0;
      force_push = TRUE;
    }

    /* there is not enough room for any more buffers */
    if (force_push || size_left <= 8) {

      gst_rtp_buffer_set_ssrc (&rtp, rtppay->current_ssrc);
      gst_rtp_buffer_set_marker (&rtp, rtpasfpay->marker);
      gst_rtp_buffer_set_payload_type (&rtp, GST_RTP_BASE_PAYLOAD_PT (rtppay));
      gst_rtp_buffer_set_seq (&rtp, rtppay->seqnum + 1);
      gst_rtp_buffer_set_timestamp (&rtp, packetinfo->send_time);
      gst_rtp_buffer_unmap (&rtp);

      /* trim remaining bytes not used */
      if (size_left != 0) {
        gst_buffer_set_size (rtpasfpay->current,
            gst_buffer_get_size (rtpasfpay->current) - size_left);
      }

      GST_BUFFER_TIMESTAMP (rtpasfpay->current) = GST_BUFFER_TIMESTAMP (buffer);

      rtppay->seqnum++;
      rtppay->timestamp = packetinfo->send_time;

      GST_DEBUG_OBJECT (rtpasfpay, "Pushing rtp buffer");
      ret = gst_rtp_base_payload_push (rtppay, rtpasfpay->current);
      rtpasfpay->current = NULL;
      if (ret != GST_FLOW_OK) {
        gst_buffer_unref (buffer);
        return ret;
      }
    }
  }
  gst_buffer_unref (buffer);
  return ret;
}
コード例 #14
0
static GstFlowReturn
gst_rtp_mpv_pay_flush (GstRTPMPVPay * rtpmpvpay)
{
  GstBuffer *outbuf;
  GstFlowReturn ret;
  guint avail;

  guint8 *payload;

  avail = gst_adapter_available (rtpmpvpay->adapter);

  ret = GST_FLOW_OK;

  while (avail > 0) {
    guint towrite;
    guint packet_len;
    guint payload_len;
    GstRTPBuffer rtp = { NULL };
    GstBuffer *paybuf;

    packet_len = gst_rtp_buffer_calc_packet_len (avail, 4, 0);

    towrite = MIN (packet_len, GST_RTP_BASE_PAYLOAD_MTU (rtpmpvpay));

    payload_len = gst_rtp_buffer_calc_payload_len (towrite, 4, 0);

    outbuf = gst_rtp_buffer_new_allocate (4, 0, 0);

    gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);

    payload = gst_rtp_buffer_get_payload (&rtp);
    /* enable MPEG Video-specific header
     *
     *  0                   1                   2                   3
     *  0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
     * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
     * |    MBZ  |T|         TR        | |N|S|B|E|  P  | | BFC | | FFC |
     * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
     *                                  AN              FBV     FFV
     */

    /* fill in the MPEG Video-specific header 
     * data is set to 0x0 here
     */
    memset (payload, 0x0, 4);

    avail -= payload_len;

    gst_rtp_buffer_set_marker (&rtp, avail == 0);
    gst_rtp_buffer_unmap (&rtp);

    paybuf = gst_adapter_take_buffer_fast (rtpmpvpay->adapter, payload_len);
    outbuf = gst_buffer_append (outbuf, paybuf);

    GST_BUFFER_TIMESTAMP (outbuf) = rtpmpvpay->first_ts;

    ret = gst_rtp_base_payload_push (GST_RTP_BASE_PAYLOAD (rtpmpvpay), outbuf);
  }

  return ret;
}
コード例 #15
0
static GstFlowReturn
gst_rtp_ac3_pay_flush (GstRtpAC3Pay * rtpac3pay)
{
  guint avail, FT, NF, mtu;
  GstBuffer *outbuf;
  GstFlowReturn ret;

  /* the data available in the adapter is either smaller
   * than the MTU or bigger. In the case it is smaller, the complete
   * adapter contents can be put in one packet. In the case the
   * adapter has more than one MTU, we need to split the AC3 data
   * over multiple packets. */
  avail = gst_adapter_available (rtpac3pay->adapter);

  ret = GST_FLOW_OK;

  FT = 0;
  /* number of frames */
  NF = rtpac3pay->NF;

  mtu = GST_RTP_BASE_PAYLOAD_MTU (rtpac3pay);

  GST_LOG_OBJECT (rtpac3pay, "flushing %u bytes", avail);

  while (avail > 0) {
    guint towrite;
    guint8 *payload;
    guint payload_len;
    guint packet_len;
    GstRTPBuffer rtp = { NULL, };
    GstBuffer *payload_buffer;

    /* this will be the total length of the packet */
    packet_len = gst_rtp_buffer_calc_packet_len (2 + avail, 0, 0);

    /* fill one MTU or all available bytes */
    towrite = MIN (packet_len, mtu);

    /* this is the payload length */
    payload_len = gst_rtp_buffer_calc_payload_len (towrite, 0, 0);

    /* create buffer to hold the payload */
    outbuf = gst_rtp_buffer_new_allocate (2, 0, 0);

    if (FT == 0) {
      /* check if it all fits */
      if (towrite < packet_len) {
        guint maxlen;

        GST_LOG_OBJECT (rtpac3pay, "we need to fragment");
        /* check if we will be able to put at least 5/8th of the total
         * frame in this first frame. */
        if ((avail * 5) / 8 >= (payload_len - 2))
          FT = 1;
        else
          FT = 2;
        /* check how many fragments we will need */
        maxlen = gst_rtp_buffer_calc_payload_len (mtu - 2, 0, 0);
        NF = (avail + maxlen - 1) / maxlen;
      }
    } else if (FT != 3) {
      /* remaining fragment */
      FT = 3;
    }

    /*
     *  0                   1
     *  0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
     * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
     * |    MBZ    | FT|       NF      |
     * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
     *
     * FT: 0: one or more complete frames
     *     1: initial 5/8 fragment
     *     2: initial fragment not 5/8
     *     3: other fragment
     * NF: amount of frames if FT = 0, else number of fragments.
     */
    gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
    GST_LOG_OBJECT (rtpac3pay, "FT %u, NF %u", FT, NF);
    payload = gst_rtp_buffer_get_payload (&rtp);
    payload[0] = (FT & 3);
    payload[1] = NF;
    payload_len -= 2;

    if (avail == payload_len)
      gst_rtp_buffer_set_marker (&rtp, TRUE);
    gst_rtp_buffer_unmap (&rtp);

    payload_buffer =
        gst_adapter_take_buffer_fast (rtpac3pay->adapter, payload_len);
    gst_rtp_copy_meta (GST_ELEMENT_CAST (rtpac3pay), outbuf, payload_buffer,
        g_quark_from_static_string (GST_META_TAG_AUDIO_STR));

    outbuf = gst_buffer_append (outbuf, payload_buffer);

    avail -= payload_len;

    GST_BUFFER_PTS (outbuf) = rtpac3pay->first_ts;
    GST_BUFFER_DURATION (outbuf) = rtpac3pay->duration;

    ret = gst_rtp_base_payload_push (GST_RTP_BASE_PAYLOAD (rtpac3pay), outbuf);
  }

  return ret;
}
コード例 #16
0
static GstFlowReturn
gst_rtp_klv_pay_handle_buffer (GstRTPBasePayload * basepayload, GstBuffer * buf)
{
  GstFlowReturn ret = GST_FLOW_OK;
  GstBufferList *list = NULL;
  GstRtpKlvPay *pay;
  GstMapInfo map;
  GstBuffer *outbuf = NULL;
  gsize offset;
  guint mtu, rtp_header_size, max_payload_size;

  pay = GST_RTP_KLV_PAY (basepayload);
  mtu = GST_RTP_BASE_PAYLOAD_MTU (basepayload);

  rtp_header_size = gst_rtp_buffer_calc_header_len (0);
  max_payload_size = mtu - rtp_header_size;

  gst_buffer_map (buf, &map, GST_MAP_READ);

  if (map.size == 0)
    goto done;

  /* KLV coding shall use and only use a fixed 16-byte SMPTE-administered
   * Universal Label, according to SMPTE 298M as Key (Rec. ITU R-BT.1653-1) */
  if (map.size < 16 || GST_READ_UINT32_BE (map.data) != 0x060E2B34)
    goto bad_input;

  if (map.size > max_payload_size)
    list = gst_buffer_list_new ();

  GST_LOG_OBJECT (pay, "%" G_GSIZE_FORMAT " bytes of data to payload",
      map.size);

  offset = 0;
  while (offset < map.size) {
    GstBuffer *payloadbuf;
    GstRTPBuffer rtp = { NULL };
    guint payload_size;
    guint bytes_left;

    bytes_left = map.size - offset;
    payload_size = MIN (bytes_left, max_payload_size);

    outbuf = gst_rtp_buffer_new_allocate (0, 0, 0);

    if (payload_size == bytes_left) {
      GST_LOG_OBJECT (pay, "last packet of KLV unit");
      gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
      gst_rtp_buffer_set_marker (&rtp, 1);
      gst_rtp_buffer_unmap (&rtp);
    }

    GST_LOG_OBJECT (pay, "packet with payload size %u", payload_size);

    gst_rtp_copy_meta (GST_ELEMENT_CAST (pay), outbuf, buf, 0);

    payloadbuf = gst_buffer_copy_region (buf, GST_BUFFER_COPY_MEMORY,
        offset, payload_size);

    /* join rtp header + payload memory parts */
    outbuf = gst_buffer_append (outbuf, payloadbuf);

    GST_BUFFER_PTS (outbuf) = GST_BUFFER_PTS (buf);
    GST_BUFFER_DTS (outbuf) = GST_BUFFER_DTS (buf);

    /* and add to list */
    if (list != NULL)
      gst_buffer_list_insert (list, -1, outbuf);

    offset += payload_size;
  }

done:

  gst_buffer_unmap (buf, &map);
  gst_buffer_unref (buf);

  if (list != NULL)
    ret = gst_rtp_base_payload_push_list (basepayload, list);
  else if (outbuf != NULL)
    ret = gst_rtp_base_payload_push (basepayload, outbuf);

  return ret;

/* ERRORS */
bad_input:
  {
    GST_ERROR_OBJECT (pay, "Input doesn't look like a KLV packet, ignoring");
    goto done;
  }
}
コード例 #17
0
static GstFlowReturn
gst_rtp_mp4g_pay_flush (GstRtpMP4GPay * rtpmp4gpay)
{
  guint avail, total;
  GstBuffer *outbuf;
  GstFlowReturn ret;
  guint mtu;

  /* the data available in the adapter is either smaller
   * than the MTU or bigger. In the case it is smaller, the complete
   * adapter contents can be put in one packet. In the case the
   * adapter has more than one MTU, we need to fragment the MPEG data
   * over multiple packets. */
  total = avail = gst_adapter_available (rtpmp4gpay->adapter);

  ret = GST_FLOW_OK;
  mtu = GST_RTP_BASE_PAYLOAD_MTU (rtpmp4gpay);

  while (avail > 0) {
    guint towrite;
    guint8 *payload;
    guint payload_len;
    guint packet_len;
    GstRTPBuffer rtp = { NULL };
    GstBuffer *paybuf;

    /* this will be the total lenght of the packet */
    packet_len = gst_rtp_buffer_calc_packet_len (avail, 0, 0);

    /* fill one MTU or all available bytes, we need 4 spare bytes for
     * the AU header. */
    towrite = MIN (packet_len, mtu - 4);

    /* this is the payload length */
    payload_len = gst_rtp_buffer_calc_payload_len (towrite, 0, 0);

    GST_DEBUG_OBJECT (rtpmp4gpay,
        "avail %d, towrite %d, packet_len %d, payload_len %d", avail, towrite,
        packet_len, payload_len);

    /* create buffer to hold the payload, also make room for the 4 header bytes. */
    outbuf = gst_rtp_buffer_new_allocate (4, 0, 0);

    gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);

    /* copy payload */
    payload = gst_rtp_buffer_get_payload (&rtp);

    /* +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+- .. -+-+-+-+-+-+-+-+-+-+
     * |AU-headers-length|AU-header|AU-header|      |AU-header|padding|
     * |                 |   (1)   |   (2)   |      |   (n)   | bits  |
     * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+- .. -+-+-+-+-+-+-+-+-+-+
     */
    /* AU-headers-length, we only have 1 AU-header */
    payload[0] = 0x00;
    payload[1] = 0x10;          /* we use 16 bits for the header */

    /* +---------------------------------------+
     * |     AU-size                           |
     * +---------------------------------------+
     * |     AU-Index / AU-Index-delta         |
     * +---------------------------------------+
     * |     CTS-flag                          |
     * +---------------------------------------+
     * |     CTS-delta                         |
     * +---------------------------------------+
     * |     DTS-flag                          |
     * +---------------------------------------+
     * |     DTS-delta                         |
     * +---------------------------------------+
     * |     RAP-flag                          |
     * +---------------------------------------+
     * |     Stream-state                      |
     * +---------------------------------------+
     */
    /* The AU-header, no CTS, DTS, RAP, Stream-state 
     *
     * AU-size is always the total size of the AU, not the fragmented size 
     */
    payload[2] = (total & 0x1fe0) >> 5;
    payload[3] = (total & 0x1f) << 3;   /* we use 13 bits for the size, 3 bits index */

    /* marker only if the packet is complete */
    gst_rtp_buffer_set_marker (&rtp, avail <= payload_len);

    gst_rtp_buffer_unmap (&rtp);

    paybuf = gst_adapter_take_buffer_fast (rtpmp4gpay->adapter, payload_len);
    outbuf = gst_buffer_append (outbuf, paybuf);

    GST_BUFFER_TIMESTAMP (outbuf) = rtpmp4gpay->first_timestamp;
    GST_BUFFER_DURATION (outbuf) = rtpmp4gpay->first_duration;

    if (rtpmp4gpay->frame_len) {
      GST_BUFFER_OFFSET (outbuf) = rtpmp4gpay->offset;
      rtpmp4gpay->offset += rtpmp4gpay->frame_len;
    }

    if (rtpmp4gpay->discont) {
      GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
      /* Only the first outputted buffer has the DISCONT flag */
      rtpmp4gpay->discont = FALSE;
    }

    ret = gst_rtp_base_payload_push (GST_RTP_BASE_PAYLOAD (rtpmp4gpay), outbuf);

    avail -= payload_len;
  }

  return ret;
}
コード例 #18
0
ファイル: gstrtpamrpay.c プロジェクト: PeterXu/gst-mobile
static GstFlowReturn
gst_rtp_amr_pay_handle_buffer (GstRTPBasePayload * basepayload,
    GstBuffer * buffer)
{
  GstRtpAMRPay *rtpamrpay;
  const gint *frame_size;
  GstFlowReturn ret;
  guint payload_len;
  GstMapInfo map;
  GstBuffer *outbuf;
  guint8 *payload, *ptr, *payload_amr;
  GstClockTime timestamp, duration;
  guint packet_len, mtu;
  gint i, num_packets, num_nonempty_packets;
  gint amr_len;
  gboolean sid = FALSE;
  GstRTPBuffer rtp = { NULL };

  rtpamrpay = GST_RTP_AMR_PAY (basepayload);
  mtu = GST_RTP_BASE_PAYLOAD_MTU (rtpamrpay);

  gst_buffer_map (buffer, &map, GST_MAP_READ);

  timestamp = GST_BUFFER_TIMESTAMP (buffer);
  duration = GST_BUFFER_DURATION (buffer);

  /* setup frame size pointer */
  if (rtpamrpay->mode == GST_RTP_AMR_P_MODE_NB)
    frame_size = nb_frame_size;
  else
    frame_size = wb_frame_size;

  GST_DEBUG_OBJECT (basepayload, "got %" G_GSIZE_FORMAT " bytes", map.size);

  /* FIXME, only
   * octet aligned, no interleaving, single channel, no CRC,
   * no robust-sorting. To fix this you need to implement the downstream
   * negotiation function. */

  /* first count number of packets and total amr frame size */
  amr_len = num_packets = num_nonempty_packets = 0;
  for (i = 0; i < map.size; i++) {
    guint8 FT;
    gint fr_size;

    FT = (map.data[i] & 0x78) >> 3;

    fr_size = frame_size[FT];
    GST_DEBUG_OBJECT (basepayload, "frame type %d, frame size %d", FT, fr_size);
    /* FIXME, we don't handle this yet.. */
    if (fr_size <= 0)
      goto wrong_size;

    if (fr_size == 5)
      sid = TRUE;

    amr_len += fr_size;
    num_nonempty_packets++;
    num_packets++;
    i += fr_size;
  }
  if (amr_len > map.size)
    goto incomplete_frame;

  /* we need one extra byte for the CMR, the ToC is in the input
   * data */
  payload_len = map.size + 1;

  /* get packet len to check against MTU */
  packet_len = gst_rtp_buffer_calc_packet_len (payload_len, 0, 0);
  if (packet_len > mtu)
    goto too_big;

  /* now alloc output buffer */
  outbuf = gst_rtp_buffer_new_allocate (payload_len, 0, 0);

  gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);

  /* copy timestamp */
  GST_BUFFER_TIMESTAMP (outbuf) = timestamp;

  if (duration != GST_CLOCK_TIME_NONE)
    GST_BUFFER_DURATION (outbuf) = duration;
  else {
    GST_BUFFER_DURATION (outbuf) = num_packets * 20 * GST_MSECOND;
  }

  if (GST_BUFFER_IS_DISCONT (buffer)) {
    GST_DEBUG_OBJECT (basepayload, "discont, setting marker bit");
    GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
    gst_rtp_buffer_set_marker (&rtp, TRUE);
    gst_rtp_amr_pay_recalc_rtp_time (rtpamrpay, timestamp);
  }

  if (G_UNLIKELY (sid)) {
    gst_rtp_amr_pay_recalc_rtp_time (rtpamrpay, timestamp);
  }

  /* perfect rtptime */
  if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (rtpamrpay->first_ts))) {
    rtpamrpay->first_ts = timestamp;
    rtpamrpay->first_rtp_time = rtpamrpay->next_rtp_time;
  }
  GST_BUFFER_OFFSET (outbuf) = rtpamrpay->next_rtp_time;
  rtpamrpay->next_rtp_time +=
      (num_packets * 160) << (rtpamrpay->mode == GST_RTP_AMR_P_MODE_WB);

  /* get payload, this is now writable */
  payload = gst_rtp_buffer_get_payload (&rtp);

  /*   0 1 2 3 4 5 6 7
   *  +-+-+-+-+-+-+-+-+
   *  |  CMR  |R|R|R|R|
   *  +-+-+-+-+-+-+-+-+
   */
  payload[0] = 0xF0;            /* CMR, no specific mode requested */

  /* this is where we copy the AMR data, after num_packets FTs and the
   * CMR. */
  payload_amr = payload + num_packets + 1;

  /* copy data in payload, first we copy all the FTs then all
   * the AMR data. The last FT has to have the F flag cleared. */
  ptr = map.data;
  for (i = 1; i <= num_packets; i++) {
    guint8 FT;
    gint fr_size;

    /*   0 1 2 3 4 5 6 7
     *  +-+-+-+-+-+-+-+-+
     *  |F|  FT   |Q|P|P| more FT...
     *  +-+-+-+-+-+-+-+-+
     */
    FT = (*ptr & 0x78) >> 3;

    fr_size = frame_size[FT];

    if (i == num_packets)
      /* last packet, clear F flag */
      payload[i] = *ptr & 0x7f;
    else
      /* set F flag */
      payload[i] = *ptr | 0x80;

    memcpy (payload_amr, &ptr[1], fr_size);

    /* all sizes are > 0 since we checked for that above */
    ptr += fr_size + 1;
    payload_amr += fr_size;
  }

  gst_buffer_unmap (buffer, &map);
  gst_buffer_unref (buffer);

  gst_rtp_buffer_unmap (&rtp);

  ret = gst_rtp_base_payload_push (basepayload, outbuf);

  return ret;

  /* ERRORS */
wrong_size:
  {
    GST_ELEMENT_ERROR (basepayload, STREAM, FORMAT,
        (NULL), ("received AMR frame with size <= 0"));
    gst_buffer_unmap (buffer, &map);
    gst_buffer_unref (buffer);

    return GST_FLOW_ERROR;
  }
incomplete_frame:
  {
    GST_ELEMENT_ERROR (basepayload, STREAM, FORMAT,
        (NULL), ("received incomplete AMR frames"));
    gst_buffer_unmap (buffer, &map);
    gst_buffer_unref (buffer);

    return GST_FLOW_ERROR;
  }
too_big:
  {
    GST_ELEMENT_ERROR (basepayload, STREAM, FORMAT,
        (NULL), ("received too many AMR frames for MTU"));
    gst_buffer_unmap (buffer, &map);
    gst_buffer_unref (buffer);

    return GST_FLOW_ERROR;
  }
}
コード例 #19
0
ファイル: gstrtph263ppay.c プロジェクト: PeterXu/gst-mobile
static GstFlowReturn
gst_rtp_h263p_pay_flush (GstRtpH263PPay * rtph263ppay)
{
  guint avail;
  GstBuffer *outbuf;
  GstFlowReturn ret;
  gboolean fragmented;

  avail = gst_adapter_available (rtph263ppay->adapter);
  if (avail == 0)
    return GST_FLOW_OK;

  fragmented = FALSE;
  /* This algorithm assumes the H263/+/++ encoder sends complete frames in each
   * buffer */
  /* With Fragmentation Mode at GST_FRAGMENTATION_MODE_NORMAL:
   *  This algorithm implements the Follow-on packets method for packetization.
   *  This assumes low packet loss network. 
   * With Fragmentation Mode at GST_FRAGMENTATION_MODE_SYNC:
   *  This algorithm separates large frames at synchronisation points (Segments)
   *  (See RFC 4629 section 6). It would be interesting to have a property such as network
   *  quality to select between both packetization methods */
  /* TODO Add VRC supprt (See RFC 4629 section 5.2) */

  while (avail > 0) {
    guint towrite;
    guint8 *payload;
    guint payload_len;
    gint header_len;
    guint next_gop = 0;
    gboolean found_gob = FALSE;
    GstRTPBuffer rtp = { NULL };

    if (rtph263ppay->fragmentation_mode == GST_FRAGMENTATION_MODE_SYNC) {
      /* start after 1st gop possible */
      guint parsed_len = 3;
      const guint8 *parse_data = NULL;

      parse_data = gst_adapter_map (rtph263ppay->adapter, avail);

      /* Check if we have a gob or eos , eossbs */
      /* FIXME EOS and EOSSBS packets should never contain any gobs and vice-versa */
      if (avail >= 3 && *parse_data == 0 && *(parse_data + 1) == 0
          && *(parse_data + 2) >= 0x80) {
        GST_DEBUG_OBJECT (rtph263ppay, " Found GOB header");
        found_gob = TRUE;
      }
      /* Find next and cut the packet accordingly */
      /* TODO we should get as many gobs as possible until MTU is reached, this
       * code seems to just get one GOB per packet */
      while (parsed_len + 2 < avail) {
        if (parse_data[parsed_len] == 0 && parse_data[parsed_len + 1] == 0
            && parse_data[parsed_len + 2] >= 0x80) {
          next_gop = parsed_len;
          GST_DEBUG_OBJECT (rtph263ppay, " Next GOB Detected at :  %d",
              next_gop);
          break;
        }
        parsed_len++;
      }
      gst_adapter_unmap (rtph263ppay->adapter);
    }

    /* for picture start frames (non-fragmented), we need to remove the first
     * two 0x00 bytes and set P=1 */
    header_len = (fragmented && !found_gob) ? 2 : 0;

    towrite = MIN (avail, gst_rtp_buffer_calc_payload_len
        (GST_RTP_BASE_PAYLOAD_MTU (rtph263ppay) - header_len, 0, 0));

    if (next_gop > 0)
      towrite = MIN (next_gop, towrite);

    payload_len = header_len + towrite;

    outbuf = gst_rtp_buffer_new_allocate (payload_len, 0, 0);

    gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
    /* last fragment gets the marker bit set */
    gst_rtp_buffer_set_marker (&rtp, avail > towrite ? 0 : 1);

    payload = gst_rtp_buffer_get_payload (&rtp);

    gst_adapter_copy (rtph263ppay->adapter, &payload[header_len], 0, towrite);

    /*  0                   1
     *  0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
     * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
     * |   RR    |P|V|   PLEN    |PEBIT|
     * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
     */
    /* if fragmented or gop header , write p bit =1 */
    payload[0] = (fragmented && !found_gob) ? 0x00 : 0x04;
    payload[1] = 0;

    GST_BUFFER_TIMESTAMP (outbuf) = rtph263ppay->first_timestamp;
    GST_BUFFER_DURATION (outbuf) = rtph263ppay->first_duration;
    gst_rtp_buffer_unmap (&rtp);

    gst_adapter_flush (rtph263ppay->adapter, towrite);

    ret =
        gst_rtp_base_payload_push (GST_RTP_BASE_PAYLOAD (rtph263ppay), outbuf);

    avail -= towrite;
    fragmented = TRUE;
  }

  return ret;
}
コード例 #20
0
/**
 * gst_rtp_base_audio_payload_flush:
 * @baseaudiopayload: a #GstRTPBasePayload
 * @payload_len: length of payload
 * @timestamp: a #GstClockTime
 *
 * Create an RTP buffer and store @payload_len bytes of the adapter as the
 * payload. Set the timestamp on the new buffer to @timestamp before pushing
 * the buffer downstream.
 *
 * If @payload_len is -1, all pending bytes will be flushed. If @timestamp is
 * -1, the timestamp will be calculated automatically.
 *
 * Returns: a #GstFlowReturn
 */
GstFlowReturn
gst_rtp_base_audio_payload_flush (GstRTPBaseAudioPayload * baseaudiopayload,
    guint payload_len, GstClockTime timestamp)
{
  GstRTPBasePayload *basepayload;
  GstRTPBaseAudioPayloadPrivate *priv;
  GstBuffer *outbuf;
  GstFlowReturn ret;
  GstAdapter *adapter;
  guint64 distance;

  priv = baseaudiopayload->priv;
  adapter = priv->adapter;

  basepayload = GST_RTP_BASE_PAYLOAD (baseaudiopayload);

  if (payload_len == -1)
    payload_len = gst_adapter_available (adapter);

  /* nothing to do, just return */
  if (payload_len == 0)
    return GST_FLOW_OK;

  if (timestamp == -1) {
    /* calculate the timestamp */
    timestamp = gst_adapter_prev_pts (adapter, &distance);

    GST_LOG_OBJECT (baseaudiopayload,
        "last timestamp %" GST_TIME_FORMAT ", distance %" G_GUINT64_FORMAT,
        GST_TIME_ARGS (timestamp), distance);

    if (GST_CLOCK_TIME_IS_VALID (timestamp) && distance > 0) {
      /* convert the number of bytes since the last timestamp to time and add to
       * the last seen timestamp */
      timestamp += priv->bytes_to_time (baseaudiopayload, distance);
    }
  }

  GST_DEBUG_OBJECT (baseaudiopayload, "Pushing %d bytes ts %" GST_TIME_FORMAT,
      payload_len, GST_TIME_ARGS (timestamp));

  if (priv->buffer_list && gst_adapter_available_fast (adapter) >= payload_len) {
    GstBuffer *buffer;
    /* we can quickly take a buffer out of the adapter without having to copy
     * anything. */
    buffer = gst_adapter_take_buffer (adapter, payload_len);

    ret =
        gst_rtp_base_audio_payload_push_buffer (baseaudiopayload, buffer,
        timestamp);
  } else {
    GstBuffer *paybuf;
    CopyMetaData data;


    /* create buffer to hold the payload */
    outbuf = gst_rtp_base_payload_allocate_output_buffer (basepayload, 0, 0, 0);

    paybuf = gst_adapter_take_buffer_fast (adapter, payload_len);

    data.pay = baseaudiopayload;
    data.outbuf = outbuf;
    gst_buffer_foreach_meta (paybuf, foreach_metadata, &data);
    outbuf = gst_buffer_append (outbuf, paybuf);

    /* set metadata */
    gst_rtp_base_audio_payload_set_meta (baseaudiopayload, outbuf, payload_len,
        timestamp);

    ret = gst_rtp_base_payload_push (basepayload, outbuf);
  }

  return ret;
}
コード例 #21
0
static GstFlowReturn
gst_rtp_gsm_pay_handle_buffer (GstRTPBasePayload * basepayload,
    GstBuffer * buffer)
{
  GstRTPGSMPay *rtpgsmpay;
  guint payload_len;
  GstBuffer *outbuf;
  GstMapInfo map;
  guint8 *payload;
  GstClockTime timestamp, duration;
  GstFlowReturn ret;
  GstRTPBuffer rtp = { NULL };

  rtpgsmpay = GST_RTP_GSM_PAY (basepayload);

  gst_buffer_map (buffer, &map, GST_MAP_READ);

  timestamp = GST_BUFFER_TIMESTAMP (buffer);
  duration = GST_BUFFER_DURATION (buffer);

  /* FIXME, only one GSM frame per RTP packet for now */
  payload_len = map.size;

  /* FIXME, just error out for now */
  if (payload_len > GST_RTP_BASE_PAYLOAD_MTU (rtpgsmpay))
    goto too_big;

  outbuf = gst_rtp_buffer_new_allocate (payload_len, 0, 0);

  /* copy timestamp and duration */
  GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
  GST_BUFFER_DURATION (outbuf) = duration;

  /* get payload */
  gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);

  /* copy data in payload */
  payload = gst_rtp_buffer_get_payload (&rtp);
  memcpy (payload, map.data, map.size);

  gst_rtp_buffer_unmap (&rtp);

  gst_buffer_unmap (buffer, &map);
  gst_buffer_unref (buffer);

  GST_DEBUG ("gst_rtp_gsm_pay_chain: pushing buffer of size %" G_GSIZE_FORMAT,
      gst_buffer_get_size (outbuf));

  ret = gst_rtp_base_payload_push (basepayload, outbuf);

  return ret;

  /* ERRORS */
too_big:
  {
    GST_ELEMENT_ERROR (rtpgsmpay, STREAM, ENCODE, (NULL),
        ("payload_len %u > mtu %u", payload_len,
            GST_RTP_BASE_PAYLOAD_MTU (rtpgsmpay)));
    gst_buffer_unmap (buffer, &map);
    return GST_FLOW_ERROR;
  }
}
コード例 #22
0
static GstFlowReturn
gst_rtp_base_audio_payload_push_buffer (GstRTPBaseAudioPayload *
    baseaudiopayload, GstBuffer * buffer, GstClockTime timestamp)
{
  GstRTPBasePayload *basepayload;
  GstRTPBaseAudioPayloadPrivate *priv;
  GstBuffer *outbuf;
  guint8 *payload;
  guint payload_len;
  GstFlowReturn ret;

  priv = baseaudiopayload->priv;
  basepayload = GST_RTP_BASE_PAYLOAD (baseaudiopayload);

  payload_len = gst_buffer_get_size (buffer);

  GST_DEBUG_OBJECT (baseaudiopayload, "Pushing %d bytes ts %" GST_TIME_FORMAT,
      payload_len, GST_TIME_ARGS (timestamp));

  if (priv->buffer_list) {
    /* create just the RTP header buffer */
    outbuf = gst_rtp_buffer_new_allocate (0, 0, 0);
  } else {
    /* create buffer to hold the payload */
    outbuf = gst_rtp_buffer_new_allocate (payload_len, 0, 0);
  }

  /* set metadata */
  gst_rtp_base_audio_payload_set_meta (baseaudiopayload, outbuf, payload_len,
      timestamp);

  if (priv->buffer_list) {
    GstBufferList *list;
    guint i, len;

    list = gst_buffer_list_new ();
    len = gst_buffer_list_length (list);

    for (i = 0; i < len; i++) {
      /* FIXME */
      g_warning ("bufferlist not implemented");
      gst_buffer_list_add (list, outbuf);
      gst_buffer_list_add (list, buffer);
    }

    GST_DEBUG_OBJECT (baseaudiopayload, "Pushing list %p", list);
    ret = gst_rtp_base_payload_push_list (basepayload, list);
  } else {
    GstRTPBuffer rtp = { NULL };

    /* copy payload */
    gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
    payload = gst_rtp_buffer_get_payload (&rtp);
    gst_buffer_extract (buffer, 0, payload, payload_len);
    gst_rtp_buffer_unmap (&rtp);

    gst_buffer_unref (buffer);

    GST_DEBUG_OBJECT (baseaudiopayload, "Pushing buffer %p", outbuf);
    ret = gst_rtp_base_payload_push (basepayload, outbuf);
  }

  return ret;
}
コード例 #23
0
static GstFlowReturn
gst_rtp_vraw_pay_handle_buffer (GstRTPBasePayload * payload, GstBuffer * buffer)
{
  GstRtpVRawPay *rtpvrawpay;
  GstFlowReturn ret = GST_FLOW_OK;
  guint line, offset;
  guint8 *yp, *up, *vp;
  guint ystride, uvstride;
  guint pgroup;
  guint mtu;
  guint width, height;
  gint field;
  GstVideoFrame frame;
  gint interlaced;
  GstRTPBuffer rtp = { NULL, };

  rtpvrawpay = GST_RTP_VRAW_PAY (payload);

  gst_video_frame_map (&frame, &rtpvrawpay->vinfo, buffer, GST_MAP_READ);

  GST_LOG_OBJECT (rtpvrawpay, "new frame of %" G_GSIZE_FORMAT " bytes",
      gst_buffer_get_size (buffer));

  /* get pointer and strides of the planes */
  yp = GST_VIDEO_FRAME_COMP_DATA (&frame, 0);
  up = GST_VIDEO_FRAME_COMP_DATA (&frame, 1);
  vp = GST_VIDEO_FRAME_COMP_DATA (&frame, 2);

  ystride = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 0);
  uvstride = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 1);

  mtu = GST_RTP_BASE_PAYLOAD_MTU (payload);

  /* amount of bytes for one pixel */
  pgroup = rtpvrawpay->pgroup;
  width = GST_VIDEO_INFO_WIDTH (&rtpvrawpay->vinfo);
  height = GST_VIDEO_INFO_HEIGHT (&rtpvrawpay->vinfo);

  interlaced = GST_VIDEO_INFO_IS_INTERLACED (&rtpvrawpay->vinfo);

  /* start with line 0, offset 0 */
  for (field = 0; field < 1 + interlaced; field++) {
    line = field;
    offset = 0;

    /* write all lines */
    while (line < height) {
      guint left;
      GstBuffer *out;
      guint8 *outdata, *headers;
      gboolean next_line;
      guint length, cont, pixels;

      /* get the max allowed payload length size, we try to fill the complete MTU */
      left = gst_rtp_buffer_calc_payload_len (mtu, 0, 0);
      out = gst_rtp_buffer_new_allocate (left, 0, 0);

      if (field == 0) {
        GST_BUFFER_TIMESTAMP (out) = GST_BUFFER_TIMESTAMP (buffer);
      } else {
        GST_BUFFER_TIMESTAMP (out) = GST_BUFFER_TIMESTAMP (buffer) +
            GST_BUFFER_DURATION (buffer) / 2;
      }

      gst_rtp_buffer_map (out, GST_MAP_WRITE, &rtp);
      outdata = gst_rtp_buffer_get_payload (&rtp);

      GST_LOG_OBJECT (rtpvrawpay, "created buffer of size %u for MTU %u", left,
          mtu);

      /*
       *   0                   1                   2                   3
       *   0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
       *  +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
       *  |   Extended Sequence Number    |            Length             |
       *  +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
       *  |F|          Line No            |C|           Offset            |
       *  +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
       *  |            Length             |F|          Line No            |
       *  +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
       *  |C|           Offset            |                               .
       *  +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+                               .
       *  .                                                               .
       *  .                 Two (partial) lines of video data             .
       *  .                                                               .
       *  +---------------------------------------------------------------+
       */

      /* need 2 bytes for the extended sequence number */
      *outdata++ = 0;
      *outdata++ = 0;
      left -= 2;

      /* the headers start here */
      headers = outdata;

      /* while we can fit at least one header and one pixel */
      while (left > (6 + pgroup)) {
        /* we need a 6 bytes header */
        left -= 6;

        /* get how may bytes we need for the remaining pixels */
        pixels = width - offset;
        length = (pixels * pgroup) / rtpvrawpay->xinc;

        if (left >= length) {
          /* pixels and header fit completely, we will write them and skip to the
           * next line. */
          next_line = TRUE;
        } else {
          /* line does not fit completely, see how many pixels fit */
          pixels = (left / pgroup) * rtpvrawpay->xinc;
          length = (pixels * pgroup) / rtpvrawpay->xinc;
          next_line = FALSE;
        }
        GST_LOG_OBJECT (rtpvrawpay, "filling %u bytes in %u pixels", length,
            pixels);
        left -= length;

        /* write length */
        *outdata++ = (length >> 8) & 0xff;
        *outdata++ = length & 0xff;

        /* write line no */
        *outdata++ = ((line >> 8) & 0x7f) | ((field << 7) & 0x80);
        *outdata++ = line & 0xff;

        if (next_line) {
          /* go to next line we do this here to make the check below easier */
          line += rtpvrawpay->yinc;
        }

        /* calculate continuation marker */
        cont = (left > (6 + pgroup) && line < height) ? 0x80 : 0x00;

        /* write offset and continuation marker */
        *outdata++ = ((offset >> 8) & 0x7f) | cont;
        *outdata++ = offset & 0xff;

        if (next_line) {
          /* reset offset */
          offset = 0;
          GST_LOG_OBJECT (rtpvrawpay, "go to next line %u", line);
        } else {
          offset += pixels;
          GST_LOG_OBJECT (rtpvrawpay, "next offset %u", offset);
        }

        if (!cont)
          break;
      }
      GST_LOG_OBJECT (rtpvrawpay, "consumed %u bytes",
          (guint) (outdata - headers));

      /* second pass, read headers and write the data */
      while (TRUE) {
        guint offs, lin;

        /* read length and cont */
        length = (headers[0] << 8) | headers[1];
        lin = ((headers[2] & 0x7f) << 8) | headers[3];
        offs = ((headers[4] & 0x7f) << 8) | headers[5];
        cont = headers[4] & 0x80;
        pixels = length / pgroup;
        headers += 6;

        GST_LOG_OBJECT (payload,
            "writing length %u, line %u, offset %u, cont %d", length, lin, offs,
            cont);

        switch (GST_VIDEO_INFO_FORMAT (&rtpvrawpay->vinfo)) {
          case GST_VIDEO_FORMAT_RGB:
          case GST_VIDEO_FORMAT_RGBA:
          case GST_VIDEO_FORMAT_BGR:
          case GST_VIDEO_FORMAT_BGRA:
          case GST_VIDEO_FORMAT_UYVY:
          case GST_VIDEO_FORMAT_UYVP:
            offs /= rtpvrawpay->xinc;
            memcpy (outdata, yp + (lin * ystride) + (offs * pgroup), length);
            outdata += length;
            break;
          case GST_VIDEO_FORMAT_AYUV:
          {
            gint i;
            guint8 *datap;

            datap = yp + (lin * ystride) + (offs * 4);

            for (i = 0; i < pixels; i++) {
              *outdata++ = datap[2];
              *outdata++ = datap[1];
              *outdata++ = datap[3];
              datap += 4;
            }
            break;
          }
          case GST_VIDEO_FORMAT_I420:
          {
            gint i;
            guint uvoff;
            guint8 *yd1p, *yd2p, *udp, *vdp;

            yd1p = yp + (lin * ystride) + (offs);
            yd2p = yd1p + ystride;
            uvoff =
                (lin / rtpvrawpay->yinc * uvstride) + (offs / rtpvrawpay->xinc);
            udp = up + uvoff;
            vdp = vp + uvoff;

            for (i = 0; i < pixels; i++) {
              *outdata++ = *yd1p++;
              *outdata++ = *yd1p++;
              *outdata++ = *yd2p++;
              *outdata++ = *yd2p++;
              *outdata++ = *udp++;
              *outdata++ = *vdp++;
            }
            break;
          }
          case GST_VIDEO_FORMAT_Y41B:
          {
            gint i;
            guint uvoff;
            guint8 *ydp, *udp, *vdp;

            ydp = yp + (lin * ystride) + offs;
            uvoff =
                (lin / rtpvrawpay->yinc * uvstride) + (offs / rtpvrawpay->xinc);
            udp = up + uvoff;
            vdp = vp + uvoff;

            for (i = 0; i < pixels; i++) {
              *outdata++ = *udp++;
              *outdata++ = *ydp++;
              *outdata++ = *ydp++;
              *outdata++ = *vdp++;
              *outdata++ = *ydp++;
              *outdata++ = *ydp++;
            }
            break;
          }
          default:
            gst_rtp_buffer_unmap (&rtp);
            gst_buffer_unref (out);
            goto unknown_sampling;
        }

        if (!cont)
          break;
      }

      if (line >= height) {
        GST_LOG_OBJECT (rtpvrawpay, "field/frame complete, set marker");
        gst_rtp_buffer_set_marker (&rtp, TRUE);
      }
      gst_rtp_buffer_unmap (&rtp);
      if (left > 0) {
        GST_LOG_OBJECT (rtpvrawpay, "we have %u bytes left", left);
        gst_buffer_resize (out, 0, gst_buffer_get_size (out) - left);
      }

      /* push buffer */
      ret = gst_rtp_base_payload_push (payload, out);
    }

  }

  gst_video_frame_unmap (&frame);
  gst_buffer_unref (buffer);

  return ret;

  /* ERRORS */
unknown_sampling:
  {
    GST_ELEMENT_ERROR (payload, STREAM, FORMAT,
        (NULL), ("unimplemented sampling"));
    gst_video_frame_unmap (&frame);
    gst_buffer_unref (buffer);
    return GST_FLOW_NOT_SUPPORTED;
  }
}
コード例 #24
0
static GstFlowReturn
gst_rtp_h263p_pay_flush (GstRtpH263PPay * rtph263ppay)
{
  guint avail;
  GstBufferList *list = NULL;
  GstBuffer *outbuf = NULL;
  GstFlowReturn ret;
  gboolean fragmented = FALSE;

  avail = gst_adapter_available (rtph263ppay->adapter);
  if (avail == 0)
    return GST_FLOW_OK;

  fragmented = FALSE;
  /* This algorithm assumes the H263/+/++ encoder sends complete frames in each
   * buffer */
  /* With Fragmentation Mode at GST_FRAGMENTATION_MODE_NORMAL:
   *  This algorithm implements the Follow-on packets method for packetization.
   *  This assumes low packet loss network. 
   * With Fragmentation Mode at GST_FRAGMENTATION_MODE_SYNC:
   *  This algorithm separates large frames at synchronisation points (Segments)
   *  (See RFC 4629 section 6). It would be interesting to have a property such as network
   *  quality to select between both packetization methods */
  /* TODO Add VRC supprt (See RFC 4629 section 5.2) */

  while (avail > 0) {
    guint towrite;
    guint8 *payload;
    gint header_len;
    guint next_gop = 0;
    gboolean found_gob = FALSE;
    GstRTPBuffer rtp = { NULL };
    GstBuffer *payload_buf;

    if (rtph263ppay->fragmentation_mode == GST_FRAGMENTATION_MODE_SYNC) {
      /* start after 1st gop possible */

      /* Check if we have a gob or eos , eossbs */
      /* FIXME EOS and EOSSBS packets should never contain any gobs and vice-versa */
      next_gop =
          gst_adapter_masked_scan_uint32 (rtph263ppay->adapter, 0xffff8000,
          0x00008000, 0, avail);
      if (next_gop == 0) {
        GST_DEBUG_OBJECT (rtph263ppay, " Found GOB header");
        found_gob = TRUE;
      }

      /* Find next and cut the packet accordingly */
      /* TODO we should get as many gobs as possible until MTU is reached, this
       * code seems to just get one GOB per packet */
      if (next_gop == 0 && avail > 3)
        next_gop =
            gst_adapter_masked_scan_uint32 (rtph263ppay->adapter, 0xffff8000,
            0x00008000, 3, avail - 3);
      GST_DEBUG_OBJECT (rtph263ppay, " Next GOB Detected at :  %d", next_gop);
      if (next_gop == -1)
        next_gop = 0;
    }

    /* for picture start frames (non-fragmented), we need to remove the first
     * two 0x00 bytes and set P=1 */
    if (!fragmented || found_gob) {
      gst_adapter_flush (rtph263ppay->adapter, 2);
      avail -= 2;
    }
    header_len = 2;

    towrite = MIN (avail, gst_rtp_buffer_calc_payload_len
        (GST_RTP_BASE_PAYLOAD_MTU (rtph263ppay) - header_len, 0, 0));

    if (next_gop > 0)
      towrite = MIN (next_gop, towrite);

    outbuf = gst_rtp_buffer_new_allocate (header_len, 0, 0);

    gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
    /* last fragment gets the marker bit set */
    gst_rtp_buffer_set_marker (&rtp, avail > towrite ? 0 : 1);

    payload = gst_rtp_buffer_get_payload (&rtp);

    /*  0                   1
     *  0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
     * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
     * |   RR    |P|V|   PLEN    |PEBIT|
     * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
     */
    /* if fragmented or gop header , write p bit =1 */
    payload[0] = (fragmented && !found_gob) ? 0x00 : 0x04;
    payload[1] = 0;

    GST_BUFFER_PTS (outbuf) = rtph263ppay->first_timestamp;
    GST_BUFFER_DURATION (outbuf) = rtph263ppay->first_duration;
    gst_rtp_buffer_unmap (&rtp);

    payload_buf = gst_adapter_take_buffer_fast (rtph263ppay->adapter, towrite);
    gst_rtp_copy_meta (GST_ELEMENT_CAST (rtph263ppay), outbuf, payload_buf,
        g_quark_from_static_string (GST_META_TAG_VIDEO_STR));
    outbuf = gst_buffer_append (outbuf, payload_buf);
    avail -= towrite;

    /* If more data is available and this is our first iteration,
     * we create a buffer list and remember that we're fragmented.
     *
     * If we're fragmented already, add buffers to the previously
     * created buffer list.
     *
     * Otherwise fragmented will be FALSE and we just push the single output
     * buffer, and no list is allocated.
     */
    if (avail && !fragmented) {
      fragmented = TRUE;
      list = gst_buffer_list_new ();
      gst_buffer_list_add (list, outbuf);
    } else if (fragmented) {
      gst_buffer_list_add (list, outbuf);
    }
  }

  if (fragmented) {
    ret =
        gst_rtp_base_payload_push_list (GST_RTP_BASE_PAYLOAD (rtph263ppay),
        list);
  } else {
    ret =
        gst_rtp_base_payload_push (GST_RTP_BASE_PAYLOAD (rtph263ppay), outbuf);
  }

  return ret;
}