示例#1
0
static gboolean
get_next_chunk (GstAdapter *adapter, Chunk *chunk)
{
  const guint8 *data;
  guint length;
  
  data = gst_adapter_peek (adapter, 8);
  if (!data) return FALSE;
  length = GST_READ_UINT32_BE (data + 4);

  chunk->fourcc = GST_READ_UINT32_LE (data);
  chunk->length = length;
  gst_adapter_flush (adapter, 8);
  chunk->data = gst_adapter_peek (adapter, gst_adapter_available_fast (adapter));
  chunk->available = gst_adapter_available_fast (adapter);
  return TRUE;
}
/**
 * gst_rtp_base_audio_payload_flush:
 * @baseaudiopayload: a #GstRTPBasePayload
 * @payload_len: length of payload
 * @timestamp: a #GstClockTime
 *
 * Create an RTP buffer and store @payload_len bytes of the adapter as the
 * payload. Set the timestamp on the new buffer to @timestamp before pushing
 * the buffer downstream.
 *
 * If @payload_len is -1, all pending bytes will be flushed. If @timestamp is
 * -1, the timestamp will be calculated automatically.
 *
 * Returns: a #GstFlowReturn
 */
GstFlowReturn
gst_rtp_base_audio_payload_flush (GstRTPBaseAudioPayload * baseaudiopayload,
    guint payload_len, GstClockTime timestamp)
{
  GstRTPBasePayload *basepayload;
  GstRTPBaseAudioPayloadPrivate *priv;
  GstBuffer *outbuf;
  GstFlowReturn ret;
  GstAdapter *adapter;
  guint64 distance;

  priv = baseaudiopayload->priv;
  adapter = priv->adapter;

  basepayload = GST_RTP_BASE_PAYLOAD (baseaudiopayload);

  if (payload_len == -1)
    payload_len = gst_adapter_available (adapter);

  /* nothing to do, just return */
  if (payload_len == 0)
    return GST_FLOW_OK;

  if (timestamp == -1) {
    /* calculate the timestamp */
    timestamp = gst_adapter_prev_pts (adapter, &distance);

    GST_LOG_OBJECT (baseaudiopayload,
        "last timestamp %" GST_TIME_FORMAT ", distance %" G_GUINT64_FORMAT,
        GST_TIME_ARGS (timestamp), distance);

    if (GST_CLOCK_TIME_IS_VALID (timestamp) && distance > 0) {
      /* convert the number of bytes since the last timestamp to time and add to
       * the last seen timestamp */
      timestamp += priv->bytes_to_time (baseaudiopayload, distance);
    }
  }

  GST_DEBUG_OBJECT (baseaudiopayload, "Pushing %d bytes ts %" GST_TIME_FORMAT,
      payload_len, GST_TIME_ARGS (timestamp));

  if (priv->buffer_list && gst_adapter_available_fast (adapter) >= payload_len) {
    GstBuffer *buffer;
    /* we can quickly take a buffer out of the adapter without having to copy
     * anything. */
    buffer = gst_adapter_take_buffer (adapter, payload_len);

    ret =
        gst_rtp_base_audio_payload_push_buffer (baseaudiopayload, buffer,
        timestamp);
  } else {
    GstBuffer *paybuf;
    CopyMetaData data;


    /* create buffer to hold the payload */
    outbuf = gst_rtp_base_payload_allocate_output_buffer (basepayload, 0, 0, 0);

    paybuf = gst_adapter_take_buffer_fast (adapter, payload_len);

    data.pay = baseaudiopayload;
    data.outbuf = outbuf;
    gst_buffer_foreach_meta (paybuf, foreach_metadata, &data);
    outbuf = gst_buffer_append (outbuf, paybuf);

    /* set metadata */
    gst_rtp_base_audio_payload_set_meta (baseaudiopayload, outbuf, payload_len,
        timestamp);

    ret = gst_rtp_base_payload_push (basepayload, outbuf);
  }

  return ret;
}