Exemplo n.º 1
0
static gboolean
gst_dv_pay_negotiate (GstRTPDVPay * rtpdvpay, guint8 * data, guint size)
{
  const gchar *encode, *media;
  gboolean audio_bundled, res;

  if ((data[3] & 0x80) == 0) {  /* DSF flag */
    /* it's an NTSC format */
    if ((data[80 * 5 + 48 + 3] & 0x4) && (data[80 * 5 + 48] == 0x60)) { /* 4:2:2 sampling */
      /* NTSC 50Mbps */
      encode = "314M-25/525-60";
    } else {                    /* 4:1:1 sampling */
      /* NTSC 25Mbps */
      encode = "SD-VCR/525-60";
    }
  } else {
    /* it's a PAL format */
    if ((data[80 * 5 + 48 + 3] & 0x4) && (data[80 * 5 + 48] == 0x60)) { /* 4:2:2 sampling */
      /* PAL 50Mbps */
      encode = "314M-50/625-50";
    } else if ((data[5] & 0x07) == 0) { /* APT flag */
      /* PAL 25Mbps 4:2:0 */
      encode = "SD-VCR/625-50";
    } else
      /* PAL 25Mbps 4:1:1 */
      encode = "314M-25/625-50";
  }

  media = "video";
  audio_bundled = FALSE;

  switch (rtpdvpay->mode) {
    case GST_DV_PAY_MODE_AUDIO:
      media = "audio";
      break;
    case GST_DV_PAY_MODE_BUNDLED:
      audio_bundled = TRUE;
      break;
    default:
      break;
  }
  gst_basertppayload_set_options (GST_BASE_RTP_PAYLOAD (rtpdvpay), media, TRUE,
      "DV", 90000);

  if (audio_bundled) {
    res = gst_basertppayload_set_outcaps (GST_BASE_RTP_PAYLOAD (rtpdvpay),
        "encode", G_TYPE_STRING, encode,
        "audio", G_TYPE_STRING, "bundled", NULL);
  } else {
    res = gst_basertppayload_set_outcaps (GST_BASE_RTP_PAYLOAD (rtpdvpay),
        "encode", G_TYPE_STRING, encode, NULL);
  }
  return res;
}
static GstCaps *
gst_basertppayload_sink_getcaps (GstPad * pad)
{
  GstBaseRTPPayload *basertppayload;
  GstBaseRTPPayloadClass *basertppayload_class;
  GstCaps *caps = NULL;

  GST_DEBUG_OBJECT (pad, "getting caps");

  basertppayload = GST_BASE_RTP_PAYLOAD (gst_pad_get_parent (pad));
  basertppayload_class = GST_BASE_RTP_PAYLOAD_GET_CLASS (basertppayload);

  if (basertppayload_class->get_caps)
    caps = basertppayload_class->get_caps (basertppayload, pad);

  if (!caps) {
    caps = GST_PAD_TEMPLATE_CAPS (GST_PAD_PAD_TEMPLATE (pad));
    GST_DEBUG_OBJECT (pad,
        "using pad template %p with caps %p %" GST_PTR_FORMAT,
        GST_PAD_PAD_TEMPLATE (pad), caps, caps);

    caps = gst_caps_ref (caps);
  }

  gst_object_unref (basertppayload);

  return caps;
}
/**
 * gst_base_rtp_audio_payload_push:
 * @baseaudiopayload: a #GstBaseRTPPayload
 * @data: data to set as payload
 * @payload_len: length of payload
 * @timestamp: a #GstClockTime
 *
 * Create an RTP buffer and store @payload_len bytes of @data as the
 * payload. Set the timestamp on the new buffer to @timestamp before pushing
 * the buffer downstream.
 *
 * Returns: a #GstFlowReturn
 *
 * Since: 0.10.13
 */
GstFlowReturn
gst_base_rtp_audio_payload_push (GstBaseRTPAudioPayload * baseaudiopayload,
    const guint8 * data, guint payload_len, GstClockTime timestamp)
{
  GstBaseRTPPayload *basepayload;
  GstBuffer *outbuf;
  guint8 *payload;
  GstFlowReturn ret;

  basepayload = GST_BASE_RTP_PAYLOAD (baseaudiopayload);

  GST_DEBUG_OBJECT (baseaudiopayload, "Pushing %d bytes ts %" GST_TIME_FORMAT,
      payload_len, GST_TIME_ARGS (timestamp));

  /* create buffer to hold the payload */
  outbuf = gst_rtp_buffer_new_allocate (payload_len, 0, 0);

  /* copy payload */
  payload = gst_rtp_buffer_get_payload (outbuf);
  memcpy (payload, data, payload_len);

  /* set metadata */
  gst_base_rtp_audio_payload_set_meta (baseaudiopayload, outbuf, payload_len,
      timestamp);

  ret = gst_basertppayload_push (basepayload, outbuf);

  return ret;
}
Exemplo n.º 4
0
static void
gst_rtp_speex_pay_init (GstRtpSPEEXPay * rtpspeexpay,
    GstRtpSPEEXPayClass * klass)
{
  GST_BASE_RTP_PAYLOAD (rtpspeexpay)->clock_rate = 8000;
  GST_BASE_RTP_PAYLOAD_PT (rtpspeexpay) = 110;  /* Create String */
}
static GstFlowReturn
gst_basertppayload_chain (GstPad * pad, GstBuffer * buffer)
{
  GstBaseRTPPayload *basertppayload;
  GstBaseRTPPayloadClass *basertppayload_class;
  GstFlowReturn ret;

  basertppayload = GST_BASE_RTP_PAYLOAD (gst_pad_get_parent (pad));
  basertppayload_class = GST_BASE_RTP_PAYLOAD_GET_CLASS (basertppayload);

  if (!basertppayload_class->handle_buffer)
    goto no_function;

  ret = basertppayload_class->handle_buffer (basertppayload, buffer);

  gst_object_unref (basertppayload);

  return ret;

  /* ERRORS */
no_function:
  {
    GST_ELEMENT_ERROR (basertppayload, STREAM, NOT_IMPLEMENTED, (NULL),
        ("subclass did not implement handle_buffer function"));
    gst_object_unref (basertppayload);
    gst_buffer_unref (buffer);
    return GST_FLOW_ERROR;
  }
}
Exemplo n.º 6
0
static GstFlowReturn
gst_rtp_g723_pay_flush (GstRTPG723Pay * pay)
{
  GstBuffer *outbuf;
  GstFlowReturn ret;
  guint8 *payload;
  guint avail;

  avail = gst_adapter_available (pay->adapter);

  outbuf = gst_rtp_buffer_new_allocate (avail, 0, 0);
  payload = gst_rtp_buffer_get_payload (outbuf);

  GST_BUFFER_TIMESTAMP (outbuf) = pay->timestamp;
  GST_BUFFER_DURATION (outbuf) = pay->duration;

  /* copy G723 data as payload */
  gst_adapter_copy (pay->adapter, payload, 0, avail);

  /* flush bytes from adapter */
  gst_adapter_flush (pay->adapter, avail);
  pay->timestamp = GST_CLOCK_TIME_NONE;
  pay->duration = 0;

  /* set discont and marker */
  if (pay->discont) {
    GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
    gst_rtp_buffer_set_marker (outbuf, TRUE);
    pay->discont = FALSE;
  }

  ret = gst_basertppayload_push (GST_BASE_RTP_PAYLOAD (pay), outbuf);

  return ret;
}
static GstFlowReturn
gst_rtp_mp2t_pay_flush (GstRTPMP2TPay * rtpmp2tpay)
{
  guint avail;
  guint8 *payload;
  GstFlowReturn ret;
  GstBuffer *outbuf;

  avail = gst_adapter_available (rtpmp2tpay->adapter);
  if (avail == 0)
    return GST_FLOW_OK;
  outbuf = gst_rtp_buffer_new_allocate (avail, 0, 0);

  /* get payload */
  payload = gst_rtp_buffer_get_payload (outbuf);

  /* copy stuff from adapter to payload */
  gst_adapter_copy (rtpmp2tpay->adapter, payload, 0, avail);

  GST_BUFFER_TIMESTAMP (outbuf) = rtpmp2tpay->first_ts;
  GST_BUFFER_DURATION (outbuf) = rtpmp2tpay->duration;

  GST_DEBUG_OBJECT (rtpmp2tpay, "pushing buffer of size %d",
      GST_BUFFER_SIZE (outbuf));

  ret = gst_basertppayload_push (GST_BASE_RTP_PAYLOAD (rtpmp2tpay), outbuf);

  /* flush the adapter content */
  gst_adapter_flush (rtpmp2tpay->adapter, avail);

  return ret;
}
Exemplo n.º 8
0
static GstFlowReturn
gst_rtp_mpv_pay_flush (GstRTPMPVPay * rtpmpvpay, GstClockTime timestamp,
    GstClockTime duration)
{
  GstBuffer *outbuf;
  GstFlowReturn ret;
  guint avail;
  guint8 *payload;
  gint packet_size;
  gint payload_size;

  avail = gst_adapter_available (rtpmpvpay->adapter);
  packet_size = gst_rtp_buffer_calc_packet_len (4 + avail, 0, 0);

  /* check for the maximum size of the rtp buffer */
  if (packet_size > GST_BASE_RTP_PAYLOAD_MTU (rtpmpvpay)) {
    payload_size =
        GST_BASE_RTP_PAYLOAD_MTU (rtpmpvpay) -
        gst_rtp_buffer_calc_packet_len (4, 0, 0);
  } else {
    payload_size = avail;
  }
  outbuf = gst_rtp_buffer_new_allocate (4 + payload_size, 0, 0);
  /* enable MPEG Video-specific header
   *
   *  0                   1                   2                   3
   *  0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
   * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
   * |    MBZ  |T|         TR        | |N|S|B|E|  P  | | BFC | | FFC |
   * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
   *                                  AN              FBV     FFV
   */
  payload = gst_rtp_buffer_get_payload (outbuf);
  /* fill in the MPEG Video-specific header */
  memset (payload, 0x0, 4);
  /* copy stuff from adapter to payload */
  gst_adapter_copy (rtpmpvpay->adapter, payload + 4, 0, payload_size);
  GST_BUFFER_TIMESTAMP (outbuf) = rtpmpvpay->first_ts;
  GST_BUFFER_DURATION (outbuf) = rtpmpvpay->duration;

  GST_DEBUG_OBJECT (rtpmpvpay, "pushing buffer of size %d",
      GST_BUFFER_SIZE (outbuf));
  ret = gst_basertppayload_push (GST_BASE_RTP_PAYLOAD (rtpmpvpay), outbuf);
  gst_adapter_flush (rtpmpvpay->adapter, payload_size);

  /* update the timestamp and duration */
  rtpmpvpay->first_ts = timestamp;
  rtpmpvpay->duration = duration;

  /* check if there is enough data for another rtp buffer */
  avail = gst_adapter_available (rtpmpvpay->adapter);
  packet_size = gst_rtp_buffer_calc_packet_len (4 + avail, 0, 0);

  if (packet_size >= GST_BASE_RTP_PAYLOAD_MTU (rtpmpvpay) && ret == GST_FLOW_OK) {
    GST_DEBUG_OBJECT (rtpmpvpay, "Have enough data for another rtp packet");
    ret = gst_rtp_mpv_pay_flush (rtpmpvpay, timestamp, duration);
  }
  return ret;
}
Exemplo n.º 9
0
static void
gst_rtp_mpv_pay_init (GstRTPMPVPay * rtpmpvpay, GstRTPMPVPayClass * klass)
{
  GST_BASE_RTP_PAYLOAD (rtpmpvpay)->clock_rate = 90000;
  GST_BASE_RTP_PAYLOAD_PT (rtpmpvpay) = GST_RTP_PAYLOAD_MPV;

  rtpmpvpay->adapter = gst_adapter_new ();
}
Exemplo n.º 10
0
static GstFlowReturn
gst_rtp_mpv_pay_flush (GstRTPMPVPay * rtpmpvpay)
{
  GstBuffer *outbuf;
  GstFlowReturn ret;
  guint avail;

  guint8 *payload;

  avail = gst_adapter_available (rtpmpvpay->adapter);

  ret = GST_FLOW_OK;

  while (avail > 0) {
    guint towrite;
    guint packet_len;
    guint payload_len;

    packet_len = gst_rtp_buffer_calc_packet_len (avail, 4, 0);

    towrite = MIN (packet_len, GST_BASE_RTP_PAYLOAD_MTU (rtpmpvpay));

    payload_len = gst_rtp_buffer_calc_payload_len (towrite, 4, 0);

    outbuf = gst_rtp_buffer_new_allocate (payload_len, 4, 0);

    payload = gst_rtp_buffer_get_payload (outbuf);
    /* enable MPEG Video-specific header
     *
     *  0                   1                   2                   3
     *  0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
     * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
     * |    MBZ  |T|         TR        | |N|S|B|E|  P  | | BFC | | FFC |
     * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
     *                                  AN              FBV     FFV
     */

    /* fill in the MPEG Video-specific header 
     * data is set to 0x0 here
     */
    memset (payload, 0x0, 4);

    gst_adapter_copy (rtpmpvpay->adapter, payload + 4, 0, payload_len);
    gst_adapter_flush (rtpmpvpay->adapter, payload_len);

    avail -= payload_len;

    gst_rtp_buffer_set_marker (outbuf, avail == 0);

    GST_BUFFER_TIMESTAMP (outbuf) = rtpmpvpay->first_ts;

    ret = gst_basertppayload_push (GST_BASE_RTP_PAYLOAD (rtpmpvpay), outbuf);
  }

  return ret;
}
Exemplo n.º 11
0
static GstFlowReturn
gst_rtp_celt_pay_flush_queued (GstRtpCELTPay * rtpceltpay)
{
  GstFlowReturn ret;
  GstBuffer *buf, *outbuf;
  guint8 *payload, *spayload;
  guint payload_len;
  GstClockTime duration;

  payload_len = rtpceltpay->bytes + rtpceltpay->sbytes;
  duration = rtpceltpay->qduration;

  GST_DEBUG_OBJECT (rtpceltpay, "flushing out %u, duration %" GST_TIME_FORMAT,
      payload_len, GST_TIME_ARGS (rtpceltpay->qduration));

  /* get a big enough packet for the sizes + payloads */
  outbuf = gst_rtp_buffer_new_allocate (payload_len, 0, 0);

  GST_BUFFER_DURATION (outbuf) = duration;

  /* point to the payload for size headers and data */
  spayload = gst_rtp_buffer_get_payload (outbuf);
  payload = spayload + rtpceltpay->sbytes;

  while ((buf = g_queue_pop_head (rtpceltpay->queue))) {
    guint size;

    /* copy first timestamp to output */
    if (GST_BUFFER_TIMESTAMP (outbuf) == -1)
      GST_BUFFER_TIMESTAMP (outbuf) = GST_BUFFER_TIMESTAMP (buf);

    /* write the size to the header */
    size = GST_BUFFER_SIZE (buf);
    while (size > 0xff) {
      *spayload++ = 0xff;
      size -= 0xff;
    }
    *spayload++ = size;

    size = GST_BUFFER_SIZE (buf);
    /* copy payload */
    memcpy (payload, GST_BUFFER_DATA (buf), size);
    payload += size;

    gst_buffer_unref (buf);
  }

  /* we consumed it all */
  rtpceltpay->bytes = 0;
  rtpceltpay->sbytes = 0;
  rtpceltpay->qduration = 0;

  ret = gst_basertppayload_push (GST_BASE_RTP_PAYLOAD (rtpceltpay), outbuf);

  return ret;
}
Exemplo n.º 12
0
static void
gst_rtp_g723_pay_init (GstRTPG723Pay * pay, GstRTPG723PayClass * klass)
{
  GstBaseRTPPayload *payload = GST_BASE_RTP_PAYLOAD (pay);

  pay->adapter = gst_adapter_new ();

  payload->pt = GST_RTP_PAYLOAD_G723;
  gst_basertppayload_set_options (payload, "audio", FALSE, "G723", 8000);
}
Exemplo n.º 13
0
static guint32
gst_base_rtp_audio_payload_frame_bytes_to_rtptime (GstBaseRTPAudioPayload *
    payload, guint64 bytes)
{
  guint64 time;

  time = (bytes / payload->frame_size) * (payload->priv->frame_duration_ns);

  return gst_util_uint64_scale_int (time,
      GST_BASE_RTP_PAYLOAD (payload)->clock_rate, GST_SECOND);
}
Exemplo n.º 14
0
static void
gst_basertppayload_get_property (GObject * object, guint prop_id,
    GValue * value, GParamSpec * pspec)
{
  GstBaseRTPPayload *basertppayload;
  GstBaseRTPPayloadPrivate *priv;

  basertppayload = GST_BASE_RTP_PAYLOAD (object);
  priv = basertppayload->priv;

  switch (prop_id) {
    case PROP_MTU:
      g_value_set_uint (value, basertppayload->mtu);
      break;
    case PROP_PT:
      g_value_set_uint (value, basertppayload->pt);
      break;
    case PROP_SSRC:
      if (priv->ssrc_random)
        g_value_set_uint (value, -1);
      else
        g_value_set_uint (value, basertppayload->ssrc);
      break;
    case PROP_TIMESTAMP_OFFSET:
      if (priv->ts_offset_random)
        g_value_set_uint (value, -1);
      else
        g_value_set_uint (value, (guint32) basertppayload->ts_offset);
      break;
    case PROP_SEQNUM_OFFSET:
      if (priv->seqnum_offset_random)
        g_value_set_int (value, -1);
      else
        g_value_set_int (value, (guint16) basertppayload->seqnum_offset);
      break;
    case PROP_MAX_PTIME:
      g_value_set_int64 (value, basertppayload->max_ptime);
      break;
    case PROP_MIN_PTIME:
      g_value_set_int64 (value, basertppayload->min_ptime);
      break;
    case PROP_TIMESTAMP:
      g_value_set_uint (value, basertppayload->timestamp);
      break;
    case PROP_SEQNUM:
      g_value_set_uint (value, basertppayload->seqnum);
      break;
    default:
      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
      break;
  }
}
Exemplo n.º 15
0
static GstFlowReturn
gst_rtp_mp4v_pay_flush (GstRtpMP4VPay * rtpmp4vpay)
{
  guint avail;
  GstBuffer *outbuf;
  GstFlowReturn ret;

  /* the data available in the adapter is either smaller
   * than the MTU or bigger. In the case it is smaller, the complete
   * adapter contents can be put in one packet. In the case the
   * adapter has more than one MTU, we need to split the MP4V data
   * over multiple packets. */
  avail = gst_adapter_available (rtpmp4vpay->adapter);

  ret = GST_FLOW_OK;

  while (avail > 0) {
    guint towrite;
    guint8 *payload;
    guint payload_len;
    guint packet_len;

    /* this will be the total lenght of the packet */
    packet_len = gst_rtp_buffer_calc_packet_len (avail, 0, 0);

    /* fill one MTU or all available bytes */
    towrite = MIN (packet_len, GST_BASE_RTP_PAYLOAD_MTU (rtpmp4vpay));

    /* this is the payload length */
    payload_len = gst_rtp_buffer_calc_payload_len (towrite, 0, 0);

    /* create buffer to hold the payload */
    outbuf = gst_rtp_buffer_new_allocate (payload_len, 0, 0);

    /* copy payload */
    payload = gst_rtp_buffer_get_payload (outbuf);

    gst_adapter_copy (rtpmp4vpay->adapter, payload, 0, payload_len);
    gst_adapter_flush (rtpmp4vpay->adapter, payload_len);

    avail -= payload_len;

    gst_rtp_buffer_set_marker (outbuf, avail == 0);

    GST_BUFFER_TIMESTAMP (outbuf) = rtpmp4vpay->first_timestamp;

    ret = gst_basertppayload_push (GST_BASE_RTP_PAYLOAD (rtpmp4vpay), outbuf);
  }

  return ret;
}
Exemplo n.º 16
0
static GstStateChangeReturn
gst_basertppayload_change_state (GstElement * element,
    GstStateChange transition)
{
  GstBaseRTPPayload *basertppayload;
  GstBaseRTPPayloadPrivate *priv;
  GstStateChangeReturn ret;

  basertppayload = GST_BASE_RTP_PAYLOAD (element);
  priv = basertppayload->priv;

  switch (transition) {
    case GST_STATE_CHANGE_NULL_TO_READY:
      break;
    case GST_STATE_CHANGE_READY_TO_PAUSED:
      gst_segment_init (&basertppayload->segment, GST_FORMAT_UNDEFINED);

      if (priv->seqnum_offset_random)
        basertppayload->seqnum_base =
            g_rand_int_range (basertppayload->seq_rand, 0, G_MAXUINT16);
      else
        basertppayload->seqnum_base = basertppayload->seqnum_offset;
      priv->next_seqnum = basertppayload->seqnum_base;
      basertppayload->seqnum = basertppayload->seqnum_base;

      if (priv->ssrc_random)
        basertppayload->current_ssrc = g_rand_int (basertppayload->ssrc_rand);
      else
        basertppayload->current_ssrc = basertppayload->ssrc;

      if (priv->ts_offset_random)
        basertppayload->ts_base = g_rand_int (basertppayload->ts_rand);
      else
        basertppayload->ts_base = basertppayload->ts_offset;
      basertppayload->timestamp = basertppayload->ts_base;
      break;
    default:
      break;
  }

  ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);

  switch (transition) {
    case GST_STATE_CHANGE_READY_TO_NULL:
      break;
    default:
      break;
  }
  return ret;
}
Exemplo n.º 17
0
static void
gst_rtp_pcma_pay_init (GstRtpPmcaPay * rtppcmapay, GstRtpPmcaPayClass * klass)
{
  GstBaseRTPAudioPayload *basertpaudiopayload;

  basertpaudiopayload = GST_BASE_RTP_AUDIO_PAYLOAD (rtppcmapay);

  GST_BASE_RTP_PAYLOAD (rtppcmapay)->clock_rate = 8000;

  /* tell basertpaudiopayload that this is a sample based codec */
  gst_base_rtp_audio_payload_set_sample_based (basertpaudiopayload);

  /* octet-per-sample is 1 for PCM */
  gst_base_rtp_audio_payload_set_sample_options (basertpaudiopayload, 1);
}
static guint64
gst_base_rtp_audio_payload_sample_time_to_bytes (GstBaseRTPAudioPayload *
    payload, guint64 time)
{
  guint64 samples;

  samples = gst_util_uint64_scale_int (time,
      GST_BASE_RTP_PAYLOAD (payload)->clock_rate, GST_SECOND);

  /* avoid multiplication when we can */
  if (G_LIKELY (payload->sample_size != 8))
    return gst_util_uint64_scale_int (samples, payload->sample_size, 8);
  else
    return samples;
}
Exemplo n.º 19
0
static gboolean
gst_rtp_mp4g_pay_new_caps (GstRtpMP4GPay * rtpmp4gpay)
{
  gchar *config;
  GValue v = { 0 };
  gboolean res;

#define MP4GCAPS						\
  "streamtype", G_TYPE_STRING, rtpmp4gpay->streamtype, 		\
  "profile-level-id", G_TYPE_STRING, rtpmp4gpay->profile,	\
  "mode", G_TYPE_STRING, rtpmp4gpay->mode,			\
  "config", G_TYPE_STRING, config,				\
  "sizelength", G_TYPE_STRING, "13",				\
  "indexlength", G_TYPE_STRING, "3",				\
  "indexdeltalength", G_TYPE_STRING, "3",			\
  NULL

  g_value_init (&v, GST_TYPE_BUFFER);
  gst_value_set_buffer (&v, rtpmp4gpay->config);
  config = gst_value_serialize (&v);

  /* hmm, silly */
  if (rtpmp4gpay->params) {
    res = gst_basertppayload_set_outcaps (GST_BASE_RTP_PAYLOAD (rtpmp4gpay),
        "encoding-params", G_TYPE_STRING, rtpmp4gpay->params, MP4GCAPS);
  } else {
    res = gst_basertppayload_set_outcaps (GST_BASE_RTP_PAYLOAD (rtpmp4gpay),
        MP4GCAPS);
  }

  g_value_unset (&v);
  g_free (config);

#undef MP4GCAPS
  return res;
}
/* sample conversion functions */
static GstClockTime
gst_base_rtp_audio_payload_sample_bytes_to_time (GstBaseRTPAudioPayload *
    payload, guint64 bytes)
{
  guint64 rtptime;

  /* avoid division when we can */
  if (G_LIKELY (payload->sample_size != 8))
    rtptime = gst_util_uint64_scale_int (bytes, 8, payload->sample_size);
  else
    rtptime = bytes;

  return gst_util_uint64_scale_int (rtptime, GST_SECOND,
      GST_BASE_RTP_PAYLOAD (payload)->clock_rate);
}
Exemplo n.º 21
0
static gboolean
gst_basertppayload_event (GstPad * pad, GstEvent * event)
{
  GstBaseRTPPayload *basertppayload;
  GstBaseRTPPayloadClass *basertppayload_class;
  gboolean res;

  basertppayload = GST_BASE_RTP_PAYLOAD (gst_pad_get_parent (pad));
  basertppayload_class = GST_BASE_RTP_PAYLOAD_GET_CLASS (basertppayload);

  if (basertppayload_class->handle_event) {
    res = basertppayload_class->handle_event (pad, event);
    if (res)
      goto done;
  }

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_FLUSH_START:
      res = gst_pad_event_default (pad, event);
      break;
    case GST_EVENT_FLUSH_STOP:
      res = gst_pad_event_default (pad, event);
      gst_segment_init (&basertppayload->segment, GST_FORMAT_UNDEFINED);
      break;
    case GST_EVENT_NEWSEGMENT:
    {
      gboolean update;
      gdouble rate;
      GstFormat fmt;
      gint64 start, stop, position;

      gst_event_parse_new_segment (event, &update, &rate, &fmt, &start, &stop,
          &position);
      gst_segment_set_newsegment (&basertppayload->segment, update, rate, fmt,
          start, stop, position);

      /* fallthrough */
    }
    default:
      res = gst_pad_event_default (pad, event);
      break;
  }

done:
  gst_object_unref (basertppayload);

  return res;
}
static guint32
gst_base_rtp_audio_payload_frame_bytes_to_rtptime (GstBaseRTPAudioPayload *
    payload, guint64 bytes)
{
  guint64 framecount;
  guint64 time;

  framecount = bytes / payload->frame_size;
  if (G_UNLIKELY (bytes % payload->frame_size))
    framecount++;

  time = framecount * payload->priv->frame_duration_ns;

  return gst_util_uint64_scale_int (time,
      GST_BASE_RTP_PAYLOAD (payload)->clock_rate, GST_SECOND);
}
Exemplo n.º 23
0
static GstFlowReturn gst_rtp_sbc_pay_flush_buffers(GstRtpSBCPay *sbcpay)
{
	guint available;
	guint max_payload;
	GstBuffer *outbuf;
	guint8 *payload_data;
	guint frame_count;
	guint payload_length;
	struct rtp_payload *payload;

	if (sbcpay->frame_length == 0) {
		GST_ERROR_OBJECT(sbcpay, "Frame length is 0");
		return GST_FLOW_ERROR;
	}

	available = gst_adapter_available(sbcpay->adapter);

	max_payload = gst_rtp_buffer_calc_payload_len(
		GST_BASE_RTP_PAYLOAD_MTU(sbcpay) - RTP_SBC_PAYLOAD_HEADER_SIZE,
		0, 0);

	max_payload = MIN(max_payload, available);
	frame_count = max_payload / sbcpay->frame_length;
	payload_length = frame_count * sbcpay->frame_length;
	if (payload_length == 0) /* Nothing to send */
		return GST_FLOW_OK;

	outbuf = gst_rtp_buffer_new_allocate(payload_length +
			RTP_SBC_PAYLOAD_HEADER_SIZE, 0, 0);

	gst_rtp_buffer_set_payload_type(outbuf,
			GST_BASE_RTP_PAYLOAD_PT(sbcpay));

	payload_data = gst_rtp_buffer_get_payload(outbuf);
	payload = (struct rtp_payload *) payload_data;
	memset(payload, 0, sizeof(struct rtp_payload));
	payload->frame_count = frame_count;

	gst_adapter_copy(sbcpay->adapter, payload_data +
			RTP_SBC_PAYLOAD_HEADER_SIZE, 0, payload_length);
	gst_adapter_flush(sbcpay->adapter, payload_length);

	GST_BUFFER_TIMESTAMP(outbuf) = sbcpay->timestamp;
	GST_DEBUG_OBJECT(sbcpay, "Pushing %d bytes", payload_length);

	return gst_basertppayload_push(GST_BASE_RTP_PAYLOAD(sbcpay), outbuf);
}
Exemplo n.º 24
0
static void
gst_rtp_siren_pay_init (GstRTPSirenPay * rtpsirenpay,
    GstRTPSirenPayClass * klass)
{
  GstBaseRTPPayload *basertppayload;
  GstBaseRTPAudioPayload *basertpaudiopayload;

  basertppayload = GST_BASE_RTP_PAYLOAD (rtpsirenpay);
  basertpaudiopayload = GST_BASE_RTP_AUDIO_PAYLOAD (rtpsirenpay);

  /* we don't set the payload type, it should be set by the application using
   * the pt property or the default 96 will be used */
  basertppayload->clock_rate = 16000;

  /* tell basertpaudiopayload that this is a frame based codec */
  gst_base_rtp_audio_payload_set_frame_based (basertpaudiopayload);
}
Exemplo n.º 25
0
static void
gst_basertppayload_set_property (GObject * object, guint prop_id,
    const GValue * value, GParamSpec * pspec)
{
  GstBaseRTPPayload *basertppayload;
  GstBaseRTPPayloadPrivate *priv;
  gint64 val;

  basertppayload = GST_BASE_RTP_PAYLOAD (object);
  priv = basertppayload->priv;

  switch (prop_id) {
    case PROP_MTU:
      basertppayload->mtu = g_value_get_uint (value);
      break;
    case PROP_PT:
      basertppayload->pt = g_value_get_uint (value);
      break;
    case PROP_SSRC:
      val = g_value_get_uint (value);
      basertppayload->ssrc = val;
      priv->ssrc_random = FALSE;
      break;
    case PROP_TIMESTAMP_OFFSET:
      val = g_value_get_uint (value);
      basertppayload->ts_offset = val;
      priv->ts_offset_random = FALSE;
      break;
    case PROP_SEQNUM_OFFSET:
      val = g_value_get_int (value);
      basertppayload->seqnum_offset = val;
      priv->seqnum_offset_random = (val == -1);
      GST_DEBUG_OBJECT (basertppayload, "seqnum offset 0x%04x, random %d",
          basertppayload->seqnum_offset, priv->seqnum_offset_random);
      break;
    case PROP_MAX_PTIME:
      basertppayload->max_ptime = g_value_get_int64 (value);
      break;
    case PROP_MIN_PTIME:
      basertppayload->min_ptime = g_value_get_int64 (value);
      break;
    default:
      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
      break;
  }
}
Exemplo n.º 26
0
static gboolean
gst_basertppayload_sink_setcaps (GstPad * pad, GstCaps * caps)
{
  GstBaseRTPPayload *basertppayload;
  GstBaseRTPPayloadClass *basertppayload_class;
  gboolean ret = TRUE;

  GST_DEBUG_OBJECT (pad, "setting caps %" GST_PTR_FORMAT, caps);
  basertppayload = GST_BASE_RTP_PAYLOAD (gst_pad_get_parent (pad));
  basertppayload_class = GST_BASE_RTP_PAYLOAD_GET_CLASS (basertppayload);

  if (basertppayload_class->set_caps)
    ret = basertppayload_class->set_caps (basertppayload, caps);

  gst_object_unref (basertppayload);

  return ret;
}
Exemplo n.º 27
0
static GstFlowReturn
gst_rtp_asf_pay_parse_headers (GstRtpAsfPay * rtpasfpay)
{
  gchar *maxps;
  g_return_val_if_fail (rtpasfpay->headers, GST_FLOW_ERROR);

  if (!gst_asf_parse_headers (rtpasfpay->headers, &rtpasfpay->asfinfo))
    goto error;

  GST_DEBUG_OBJECT (rtpasfpay, "Packets number: %" G_GUINT64_FORMAT,
      rtpasfpay->asfinfo.packets_count);
  GST_DEBUG_OBJECT (rtpasfpay, "Packets size: %" G_GUINT32_FORMAT,
      rtpasfpay->asfinfo.packet_size);
  GST_DEBUG_OBJECT (rtpasfpay, "Broadcast mode: %s",
      rtpasfpay->asfinfo.broadcast ? "true" : "false");

  /* get the config for caps */
  g_free (rtpasfpay->config);
  rtpasfpay->config = g_base64_encode (GST_BUFFER_DATA (rtpasfpay->headers),
      GST_BUFFER_SIZE (rtpasfpay->headers));
  GST_DEBUG_OBJECT (rtpasfpay, "Serialized headers to base64 string %s",
      rtpasfpay->config);

  g_assert (rtpasfpay->config != NULL);
  GST_DEBUG_OBJECT (rtpasfpay, "Setting optional caps values: maxps=%"
      G_GUINT32_FORMAT " and config=%s", rtpasfpay->asfinfo.packet_size,
      rtpasfpay->config);
  maxps =
      g_strdup_printf ("%" G_GUINT32_FORMAT, rtpasfpay->asfinfo.packet_size);
  gst_basertppayload_set_outcaps (GST_BASE_RTP_PAYLOAD (rtpasfpay), "maxps",
      G_TYPE_STRING, maxps, "config", G_TYPE_STRING, rtpasfpay->config, NULL);
  g_free (maxps);

  return GST_FLOW_OK;

error:
  {
    GST_ELEMENT_ERROR (rtpasfpay, STREAM, DECODE, (NULL),
        ("Error parsing headers"));
    return GST_FLOW_ERROR;
  }
}
Exemplo n.º 28
0
static gboolean gst_a2dp_sink_init_rtp_sbc_element(GstA2dpSink *self)
{
	GstElement *rtppay;

	/* if we already have a rtp, we don't need a new one */
	if (self->rtp != NULL)
		return TRUE;

	rtppay = gst_a2dp_sink_init_element(self, "rtpsbcpay", "rtp",
						self->capsfilter);
	if (rtppay == NULL)
		return FALSE;

	self->rtp = GST_BASE_RTP_PAYLOAD(rtppay);
	g_object_set(G_OBJECT(self->rtp), "min-frames", -1, NULL);

	gst_element_set_state(rtppay, GST_STATE_PAUSED);

	return TRUE;
}
Exemplo n.º 29
0
static void
gst_rtp_mp4v_pay_new_caps (GstRtpMP4VPay * rtpmp4vpay)
{
  gchar *profile, *config;
  GValue v = { 0 };

  profile = g_strdup_printf ("%d", rtpmp4vpay->profile);
  g_value_init (&v, GST_TYPE_BUFFER);
  gst_value_set_buffer (&v, rtpmp4vpay->config);
  config = gst_value_serialize (&v);

  gst_basertppayload_set_outcaps (GST_BASE_RTP_PAYLOAD (rtpmp4vpay),
      "profile-level-id", G_TYPE_STRING, profile,
      "config", G_TYPE_STRING, config, NULL);

  g_value_unset (&v);

  g_free (profile);
  g_free (config);
}
Exemplo n.º 30
0
static void
gst_basertppayload_finalize (GObject * object)
{
  GstBaseRTPPayload *basertppayload;

  basertppayload = GST_BASE_RTP_PAYLOAD (object);

  g_rand_free (basertppayload->seq_rand);
  basertppayload->seq_rand = NULL;
  g_rand_free (basertppayload->ssrc_rand);
  basertppayload->ssrc_rand = NULL;
  g_rand_free (basertppayload->ts_rand);
  basertppayload->ts_rand = NULL;

  g_free (basertppayload->media);
  basertppayload->media = NULL;
  g_free (basertppayload->encoding_name);
  basertppayload->encoding_name = NULL;

  G_OBJECT_CLASS (parent_class)->finalize (object);
}