static gboolean gst_dv_pay_negotiate (GstRTPDVPay * rtpdvpay, guint8 * data, gsize size) { const gchar *encode, *media; gboolean audio_bundled, res; if ((data[3] & 0x80) == 0) { /* DSF flag */ /* it's an NTSC format */ if ((data[80 * 5 + 48 + 3] & 0x4) && (data[80 * 5 + 48] == 0x60)) { /* 4:2:2 sampling */ /* NTSC 50Mbps */ encode = "314M-25/525-60"; } else { /* 4:1:1 sampling */ /* NTSC 25Mbps */ encode = "SD-VCR/525-60"; } } else { /* it's a PAL format */ if ((data[80 * 5 + 48 + 3] & 0x4) && (data[80 * 5 + 48] == 0x60)) { /* 4:2:2 sampling */ /* PAL 50Mbps */ encode = "314M-50/625-50"; } else if ((data[5] & 0x07) == 0) { /* APT flag */ /* PAL 25Mbps 4:2:0 */ encode = "SD-VCR/625-50"; } else /* PAL 25Mbps 4:1:1 */ encode = "314M-25/625-50"; } media = "video"; audio_bundled = FALSE; switch (rtpdvpay->mode) { case GST_DV_PAY_MODE_AUDIO: media = "audio"; break; case GST_DV_PAY_MODE_BUNDLED: audio_bundled = TRUE; break; default: break; } gst_rtp_base_payload_set_options (GST_RTP_BASE_PAYLOAD (rtpdvpay), media, TRUE, "DV", 90000); if (audio_bundled) { res = gst_rtp_base_payload_set_outcaps (GST_RTP_BASE_PAYLOAD (rtpdvpay), "encode", G_TYPE_STRING, encode, "audio", G_TYPE_STRING, "bundled", NULL); } else { res = gst_rtp_base_payload_set_outcaps (GST_RTP_BASE_PAYLOAD (rtpdvpay), "encode", G_TYPE_STRING, encode, NULL); } return res; }
static void gst_rtp_pcma_pay_init (GstRtpPcmaPay * rtppcmapay) { GstRTPBaseAudioPayload *rtpbaseaudiopayload; rtpbaseaudiopayload = GST_RTP_BASE_AUDIO_PAYLOAD (rtppcmapay); GST_RTP_BASE_PAYLOAD (rtppcmapay)->pt = GST_RTP_PAYLOAD_PCMA; GST_RTP_BASE_PAYLOAD (rtppcmapay)->clock_rate = 8000; /* tell rtpbaseaudiopayload that this is a sample based codec */ gst_rtp_base_audio_payload_set_sample_based (rtpbaseaudiopayload); /* octet-per-sample is 1 for PCM */ gst_rtp_base_audio_payload_set_sample_options (rtpbaseaudiopayload, 1); }
static GstFlowReturn gst_rtp_base_payload_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer) { GstRTPBasePayload *rtpbasepayload; GstRTPBasePayloadClass *rtpbasepayload_class; GstFlowReturn ret; rtpbasepayload = GST_RTP_BASE_PAYLOAD (parent); rtpbasepayload_class = GST_RTP_BASE_PAYLOAD_GET_CLASS (rtpbasepayload); if (!rtpbasepayload_class->handle_buffer) goto no_function; ret = rtpbasepayload_class->handle_buffer (rtpbasepayload, buffer); return ret; /* ERRORS */ no_function: { GST_ELEMENT_ERROR (rtpbasepayload, STREAM, NOT_IMPLEMENTED, (NULL), ("subclass did not implement handle_buffer function")); gst_buffer_unref (buffer); return GST_FLOW_ERROR; } }
static void gst_rtp_mpa_pay_init (GstRtpMPAPay * rtpmpapay) { rtpmpapay->adapter = gst_adapter_new (); GST_RTP_BASE_PAYLOAD (rtpmpapay)->pt = GST_RTP_PAYLOAD_MPA; }
/** * gst_rtp_base_audio_payload_push: * @baseaudiopayload: a #GstRTPBasePayload * @data: data to set as payload * @payload_len: length of payload * @timestamp: a #GstClockTime * * Create an RTP buffer and store @payload_len bytes of @data as the * payload. Set the timestamp on the new buffer to @timestamp before pushing * the buffer downstream. * * Returns: a #GstFlowReturn */ GstFlowReturn gst_rtp_base_audio_payload_push (GstRTPBaseAudioPayload * baseaudiopayload, const guint8 * data, guint payload_len, GstClockTime timestamp) { GstRTPBasePayload *basepayload; GstBuffer *outbuf; guint8 *payload; GstFlowReturn ret; GstRTPBuffer rtp = { NULL }; basepayload = GST_RTP_BASE_PAYLOAD (baseaudiopayload); GST_DEBUG_OBJECT (baseaudiopayload, "Pushing %d bytes ts %" GST_TIME_FORMAT, payload_len, GST_TIME_ARGS (timestamp)); /* create buffer to hold the payload */ outbuf = gst_rtp_base_payload_allocate_output_buffer (basepayload, payload_len, 0, 0); /* copy payload */ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp); payload = gst_rtp_buffer_get_payload (&rtp); memcpy (payload, data, payload_len); gst_rtp_buffer_unmap (&rtp); /* set metadata */ gst_rtp_base_audio_payload_set_meta (baseaudiopayload, outbuf, payload_len, timestamp); ret = gst_rtp_base_payload_push (basepayload, outbuf); return ret; }
static GstFlowReturn gst_rtp_celt_pay_flush_queued (GstRtpCELTPay * rtpceltpay) { GstFlowReturn ret; GstBuffer *buf, *outbuf; guint8 *payload, *spayload; guint payload_len; GstClockTime duration; GstRTPBuffer rtp = { NULL, }; payload_len = rtpceltpay->bytes + rtpceltpay->sbytes; duration = rtpceltpay->qduration; GST_DEBUG_OBJECT (rtpceltpay, "flushing out %u, duration %" GST_TIME_FORMAT, payload_len, GST_TIME_ARGS (rtpceltpay->qduration)); /* get a big enough packet for the sizes + payloads */ outbuf = gst_rtp_buffer_new_allocate (payload_len, 0, 0); GST_BUFFER_DURATION (outbuf) = duration; gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp); /* point to the payload for size headers and data */ spayload = gst_rtp_buffer_get_payload (&rtp); payload = spayload + rtpceltpay->sbytes; while ((buf = g_queue_pop_head (rtpceltpay->queue))) { guint size; /* copy first timestamp to output */ if (GST_BUFFER_TIMESTAMP (outbuf) == -1) GST_BUFFER_TIMESTAMP (outbuf) = GST_BUFFER_TIMESTAMP (buf); /* write the size to the header */ size = gst_buffer_get_size (buf); while (size > 0xff) { *spayload++ = 0xff; size -= 0xff; } *spayload++ = size; /* copy payload */ size = gst_buffer_get_size (buf); gst_buffer_extract (buf, 0, payload, size); payload += size; gst_buffer_unref (buf); } gst_rtp_buffer_unmap (&rtp); /* we consumed it all */ rtpceltpay->bytes = 0; rtpceltpay->sbytes = 0; rtpceltpay->qduration = 0; ret = gst_rtp_base_payload_push (GST_RTP_BASE_PAYLOAD (rtpceltpay), outbuf); return ret; }
static void gst_rtp_mp2t_pay_init (GstRTPMP2TPay * rtpmp2tpay) { GST_RTP_BASE_PAYLOAD (rtpmp2tpay)->clock_rate = 90000; GST_RTP_BASE_PAYLOAD_PT (rtpmp2tpay) = GST_RTP_PAYLOAD_MP2T; rtpmp2tpay->adapter = gst_adapter_new (); }
static void gst_rtp_base_payload_get_property (GObject * object, guint prop_id, GValue * value, GParamSpec * pspec) { GstRTPBasePayload *rtpbasepayload; GstRTPBasePayloadPrivate *priv; rtpbasepayload = GST_RTP_BASE_PAYLOAD (object); priv = rtpbasepayload->priv; switch (prop_id) { case PROP_MTU: g_value_set_uint (value, rtpbasepayload->mtu); break; case PROP_PT: g_value_set_uint (value, rtpbasepayload->pt); break; case PROP_SSRC: if (priv->ssrc_random) g_value_set_uint (value, -1); else g_value_set_uint (value, rtpbasepayload->ssrc); break; case PROP_TIMESTAMP_OFFSET: if (priv->ts_offset_random) g_value_set_uint (value, -1); else g_value_set_uint (value, (guint32) rtpbasepayload->ts_offset); break; case PROP_SEQNUM_OFFSET: if (priv->seqnum_offset_random) g_value_set_int (value, -1); else g_value_set_int (value, (guint16) rtpbasepayload->seqnum_offset); break; case PROP_MAX_PTIME: g_value_set_int64 (value, rtpbasepayload->max_ptime); break; case PROP_MIN_PTIME: g_value_set_int64 (value, rtpbasepayload->min_ptime); break; case PROP_TIMESTAMP: g_value_set_uint (value, rtpbasepayload->timestamp); break; case PROP_SEQNUM: g_value_set_uint (value, rtpbasepayload->seqnum); break; case PROP_PERFECT_RTPTIME: g_value_set_boolean (value, priv->perfect_rtptime); break; case PROP_PTIME_MULTIPLE: g_value_set_int64 (value, rtpbasepayload->ptime_multiple); break; default: G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); break; } }
static void gst_rtp_g723_pay_init (GstRTPG723Pay * pay) { GstRTPBasePayload *payload = GST_RTP_BASE_PAYLOAD (pay); pay->adapter = gst_adapter_new (); payload->pt = GST_RTP_PAYLOAD_G723; }
static GstFlowReturn gst_rtp_sbc_pay_flush_buffers (GstRtpSBCPay * sbcpay) { GstRTPBuffer rtp = GST_RTP_BUFFER_INIT; guint available; guint max_payload; GstBuffer *outbuf; guint8 *payload_data; guint frame_count; guint payload_length; struct rtp_payload *payload; if (sbcpay->frame_length == 0) { GST_ERROR_OBJECT (sbcpay, "Frame length is 0"); return GST_FLOW_ERROR; } available = gst_adapter_available (sbcpay->adapter); max_payload = gst_rtp_buffer_calc_payload_len (GST_RTP_BASE_PAYLOAD_MTU (sbcpay) - RTP_SBC_PAYLOAD_HEADER_SIZE, 0, 0); max_payload = MIN (max_payload, available); frame_count = max_payload / sbcpay->frame_length; payload_length = frame_count * sbcpay->frame_length; if (payload_length == 0) /* Nothing to send */ return GST_FLOW_OK; outbuf = gst_rtp_buffer_new_allocate (payload_length + RTP_SBC_PAYLOAD_HEADER_SIZE, 0, 0); /* get payload */ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp); gst_rtp_buffer_set_payload_type (&rtp, GST_RTP_BASE_PAYLOAD_PT (sbcpay)); /* write header and copy data into payload */ payload_data = gst_rtp_buffer_get_payload (&rtp); payload = (struct rtp_payload *) payload_data; memset (payload, 0, sizeof (struct rtp_payload)); payload->frame_count = frame_count; gst_adapter_copy (sbcpay->adapter, payload_data + RTP_SBC_PAYLOAD_HEADER_SIZE, 0, payload_length); gst_rtp_buffer_unmap (&rtp); gst_adapter_flush (sbcpay->adapter, payload_length); /* FIXME: what about duration? */ GST_BUFFER_PTS (outbuf) = sbcpay->timestamp; GST_DEBUG_OBJECT (sbcpay, "Pushing %d bytes", payload_length); return gst_rtp_base_payload_push (GST_RTP_BASE_PAYLOAD (sbcpay), outbuf); }
static GstFlowReturn gst_rtp_base_audio_payload_push_buffer (GstRTPBaseAudioPayload * baseaudiopayload, GstBuffer * buffer, GstClockTime timestamp) { GstRTPBasePayload *basepayload; GstRTPBaseAudioPayloadPrivate *priv; GstBuffer *outbuf; guint payload_len; GstFlowReturn ret; priv = baseaudiopayload->priv; basepayload = GST_RTP_BASE_PAYLOAD (baseaudiopayload); payload_len = gst_buffer_get_size (buffer); GST_DEBUG_OBJECT (baseaudiopayload, "Pushing %d bytes ts %" GST_TIME_FORMAT, payload_len, GST_TIME_ARGS (timestamp)); /* create just the RTP header buffer */ outbuf = gst_rtp_base_payload_allocate_output_buffer (basepayload, 0, 0, 0); /* set metadata */ gst_rtp_base_audio_payload_set_meta (baseaudiopayload, outbuf, payload_len, timestamp); if (priv->buffer_list) { GstBufferList *list; guint i, len; list = gst_buffer_list_new (); len = gst_buffer_list_length (list); for (i = 0; i < len; i++) { /* FIXME */ g_warning ("bufferlist not implemented"); gst_buffer_list_add (list, outbuf); gst_buffer_list_add (list, buffer); } GST_DEBUG_OBJECT (baseaudiopayload, "Pushing list %p", list); ret = gst_rtp_base_payload_push_list (basepayload, list); } else { CopyMetaData data; /* copy payload */ data.pay = baseaudiopayload; data.outbuf = outbuf; gst_buffer_foreach_meta (buffer, foreach_metadata, &data); outbuf = gst_buffer_append (outbuf, buffer); GST_DEBUG_OBJECT (baseaudiopayload, "Pushing buffer %p", outbuf); ret = gst_rtp_base_payload_push (basepayload, outbuf); } return ret; }
static void gst_rtp_g723_pay_init (GstRTPG723Pay * pay) { GstRTPBasePayload *payload = GST_RTP_BASE_PAYLOAD (pay); pay->adapter = gst_adapter_new (); payload->pt = GST_RTP_PAYLOAD_G723; gst_rtp_base_payload_set_options (payload, "audio", FALSE, "G723", 8000); }
static GstStateChangeReturn gst_rtp_base_payload_change_state (GstElement * element, GstStateChange transition) { GstRTPBasePayload *rtpbasepayload; GstRTPBasePayloadPrivate *priv; GstStateChangeReturn ret; rtpbasepayload = GST_RTP_BASE_PAYLOAD (element); priv = rtpbasepayload->priv; switch (transition) { case GST_STATE_CHANGE_NULL_TO_READY: break; case GST_STATE_CHANGE_READY_TO_PAUSED: gst_segment_init (&rtpbasepayload->segment, GST_FORMAT_UNDEFINED); if (priv->seqnum_offset_random) rtpbasepayload->seqnum_base = g_random_int_range (0, G_MAXUINT16); else rtpbasepayload->seqnum_base = rtpbasepayload->seqnum_offset; priv->next_seqnum = rtpbasepayload->seqnum_base; rtpbasepayload->seqnum = rtpbasepayload->seqnum_base; if (priv->ssrc_random) rtpbasepayload->current_ssrc = g_random_int (); else rtpbasepayload->current_ssrc = rtpbasepayload->ssrc; if (priv->ts_offset_random) rtpbasepayload->ts_base = g_random_int (); else rtpbasepayload->ts_base = rtpbasepayload->ts_offset; rtpbasepayload->timestamp = rtpbasepayload->ts_base; g_atomic_int_set (&rtpbasepayload->priv->notified_first_timestamp, 1); priv->base_offset = GST_BUFFER_OFFSET_NONE; break; default: break; } ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition); switch (transition) { case GST_STATE_CHANGE_PLAYING_TO_PAUSED: g_atomic_int_set (&rtpbasepayload->priv->notified_first_timestamp, 1); break; case GST_STATE_CHANGE_READY_TO_NULL: break; default: break; } return ret; }
static void gst_rtp_base_payload_set_property (GObject * object, guint prop_id, const GValue * value, GParamSpec * pspec) { GstRTPBasePayload *rtpbasepayload; GstRTPBasePayloadPrivate *priv; gint64 val; rtpbasepayload = GST_RTP_BASE_PAYLOAD (object); priv = rtpbasepayload->priv; switch (prop_id) { case PROP_MTU: rtpbasepayload->mtu = g_value_get_uint (value); break; case PROP_PT: rtpbasepayload->pt = g_value_get_uint (value); break; case PROP_SSRC: val = g_value_get_uint (value); rtpbasepayload->ssrc = val; priv->ssrc_random = FALSE; break; case PROP_TIMESTAMP_OFFSET: val = g_value_get_uint (value); rtpbasepayload->ts_offset = val; priv->ts_offset_random = FALSE; break; case PROP_SEQNUM_OFFSET: val = g_value_get_int (value); rtpbasepayload->seqnum_offset = val; priv->seqnum_offset_random = (val == -1); GST_DEBUG_OBJECT (rtpbasepayload, "seqnum offset 0x%04x, random %d", rtpbasepayload->seqnum_offset, priv->seqnum_offset_random); break; case PROP_MAX_PTIME: rtpbasepayload->priv->prop_max_ptime = g_value_get_int64 (value); update_max_ptime (rtpbasepayload); break; case PROP_MIN_PTIME: rtpbasepayload->min_ptime = g_value_get_int64 (value); break; case PROP_PERFECT_RTPTIME: priv->perfect_rtptime = g_value_get_boolean (value); break; case PROP_PTIME_MULTIPLE: rtpbasepayload->ptime_multiple = g_value_get_int64 (value); break; default: G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); break; } }
static void gst_rtp_g722_pay_init (GstRtpG722Pay * rtpg722pay) { GstRTPBaseAudioPayload *rtpbaseaudiopayload; rtpbaseaudiopayload = GST_RTP_BASE_AUDIO_PAYLOAD (rtpg722pay); GST_RTP_BASE_PAYLOAD (rtpg722pay)->pt = GST_RTP_PAYLOAD_G722; /* tell rtpbaseaudiopayload that this is a sample based codec */ gst_rtp_base_audio_payload_set_sample_based (rtpbaseaudiopayload); }
static GstFlowReturn gst_rtp_mp2t_pay_flush (GstRTPMP2TPay * rtpmp2tpay) { guint avail, mtu; GstFlowReturn ret = GST_FLOW_OK; GstBuffer *outbuf; avail = gst_adapter_available (rtpmp2tpay->adapter); mtu = GST_RTP_BASE_PAYLOAD_MTU (rtpmp2tpay); while (avail > 0 && (ret == GST_FLOW_OK)) { guint towrite; guint payload_len; guint packet_len; GstBuffer *paybuf; /* this will be the total length of the packet */ packet_len = gst_rtp_buffer_calc_packet_len (avail, 0, 0); /* fill one MTU or all available bytes */ towrite = MIN (packet_len, mtu); /* this is the payload length */ payload_len = gst_rtp_buffer_calc_payload_len (towrite, 0, 0); payload_len -= payload_len % 188; /* need whole packets */ if (!payload_len) break; /* create buffer to hold the payload */ outbuf = gst_rtp_buffer_new_allocate (0, 0, 0); /* get payload */ paybuf = gst_adapter_take_buffer_fast (rtpmp2tpay->adapter, payload_len); gst_rtp_copy_meta (GST_ELEMENT_CAST (rtpmp2tpay), outbuf, paybuf, 0); outbuf = gst_buffer_append (outbuf, paybuf); avail -= payload_len; GST_BUFFER_PTS (outbuf) = rtpmp2tpay->first_ts; GST_BUFFER_DURATION (outbuf) = rtpmp2tpay->duration; GST_DEBUG_OBJECT (rtpmp2tpay, "pushing buffer of size %u", (guint) gst_buffer_get_size (outbuf)); ret = gst_rtp_base_payload_push (GST_RTP_BASE_PAYLOAD (rtpmp2tpay), outbuf); } return ret; }
static void gst_rtp_base_payload_finalize (GObject * object) { GstRTPBasePayload *rtpbasepayload; rtpbasepayload = GST_RTP_BASE_PAYLOAD (object); g_free (rtpbasepayload->media); rtpbasepayload->media = NULL; g_free (rtpbasepayload->encoding_name); rtpbasepayload->encoding_name = NULL; G_OBJECT_CLASS (parent_class)->finalize (object); }
/* sample conversion functions */ static GstClockTime gst_rtp_base_audio_payload_sample_bytes_to_time (GstRTPBaseAudioPayload * payload, guint64 bytes) { guint64 rtptime; /* avoid division when we can */ if (G_LIKELY (payload->sample_size != 8)) rtptime = gst_util_uint64_scale_int (bytes, 8, payload->sample_size); else rtptime = bytes; return gst_util_uint64_scale_int (rtptime, GST_SECOND, GST_RTP_BASE_PAYLOAD (payload)->clock_rate); }
static guint64 gst_rtp_base_audio_payload_sample_time_to_bytes (GstRTPBaseAudioPayload * payload, guint64 time) { guint64 samples; samples = gst_util_uint64_scale_int (time, GST_RTP_BASE_PAYLOAD (payload)->clock_rate, GST_SECOND); /* avoid multiplication when we can */ if (G_LIKELY (payload->sample_size != 8)) return gst_util_uint64_scale_int (samples, payload->sample_size, 8); else return samples; }
static gboolean gst_rtp_mp4g_pay_new_caps (GstRtpMP4GPay * rtpmp4gpay) { gchar *config; GValue v = { 0 }; gboolean res; #define MP4GCAPS \ "streamtype", G_TYPE_STRING, rtpmp4gpay->streamtype, \ "profile-level-id", G_TYPE_STRING, rtpmp4gpay->profile, \ "mode", G_TYPE_STRING, rtpmp4gpay->mode, \ "config", G_TYPE_STRING, config, \ "sizelength", G_TYPE_STRING, "13", \ "indexlength", G_TYPE_STRING, "3", \ "indexdeltalength", G_TYPE_STRING, "3", \ NULL g_value_init (&v, GST_TYPE_BUFFER); gst_value_set_buffer (&v, rtpmp4gpay->config); config = gst_value_serialize (&v); /* hmm, silly */ if (rtpmp4gpay->params) { res = gst_rtp_base_payload_set_outcaps (GST_RTP_BASE_PAYLOAD (rtpmp4gpay), "encoding-params", G_TYPE_STRING, rtpmp4gpay->params, MP4GCAPS); } else { res = gst_rtp_base_payload_set_outcaps (GST_RTP_BASE_PAYLOAD (rtpmp4gpay), MP4GCAPS); } g_value_unset (&v); g_free (config); #undef MP4GCAPS return res; }
static gboolean gst_rtp_base_payload_query (GstPad * pad, GstObject * parent, GstQuery * query) { GstRTPBasePayload *rtpbasepayload; GstRTPBasePayloadClass *rtpbasepayload_class; gboolean res = FALSE; rtpbasepayload = GST_RTP_BASE_PAYLOAD (parent); rtpbasepayload_class = GST_RTP_BASE_PAYLOAD_GET_CLASS (rtpbasepayload); if (rtpbasepayload_class->query) res = rtpbasepayload_class->query (rtpbasepayload, pad, query); return res; }
static void gst_rtp_siren_pay_init (GstRTPSirenPay * rtpsirenpay) { GstRTPBasePayload *rtpbasepayload; GstRTPBaseAudioPayload *rtpbaseaudiopayload; rtpbasepayload = GST_RTP_BASE_PAYLOAD (rtpsirenpay); rtpbaseaudiopayload = GST_RTP_BASE_AUDIO_PAYLOAD (rtpsirenpay); /* we don't set the payload type, it should be set by the application using * the pt property or the default 96 will be used */ rtpbasepayload->clock_rate = 16000; /* tell rtpbaseaudiopayload that this is a frame based codec */ gst_rtp_base_audio_payload_set_frame_based (rtpbaseaudiopayload); }
static guint32 gst_rtp_base_audio_payload_frame_bytes_to_rtptime (GstRTPBaseAudioPayload * payload, guint64 bytes) { guint64 framecount; guint64 time; framecount = bytes / payload->frame_size; if (G_UNLIKELY (bytes % payload->frame_size)) framecount++; time = framecount * payload->priv->frame_duration_ns; return gst_util_uint64_scale_int (time, GST_RTP_BASE_PAYLOAD (payload)->clock_rate, GST_SECOND); }
static GstFlowReturn gst_rtp_asf_pay_parse_headers (GstRtpAsfPay * rtpasfpay) { gchar *maxps; GstMapInfo map; g_return_val_if_fail (rtpasfpay->headers, GST_FLOW_ERROR); if (!gst_asf_parse_headers (rtpasfpay->headers, &rtpasfpay->asfinfo)) goto error; GST_DEBUG_OBJECT (rtpasfpay, "Packets number: %" G_GUINT64_FORMAT, rtpasfpay->asfinfo.packets_count); GST_DEBUG_OBJECT (rtpasfpay, "Packets size: %" G_GUINT32_FORMAT, rtpasfpay->asfinfo.packet_size); GST_DEBUG_OBJECT (rtpasfpay, "Broadcast mode: %s", rtpasfpay->asfinfo.broadcast ? "true" : "false"); /* get the config for caps */ g_free (rtpasfpay->config); gst_buffer_map (rtpasfpay->headers, &map, GST_MAP_READ); rtpasfpay->config = g_base64_encode (map.data, map.size); gst_buffer_unmap (rtpasfpay->headers, &map); GST_DEBUG_OBJECT (rtpasfpay, "Serialized headers to base64 string %s", rtpasfpay->config); g_assert (rtpasfpay->config != NULL); GST_DEBUG_OBJECT (rtpasfpay, "Setting optional caps values: maxps=%" G_GUINT32_FORMAT " and config=%s", rtpasfpay->asfinfo.packet_size, rtpasfpay->config); maxps = g_strdup_printf ("%" G_GUINT32_FORMAT, rtpasfpay->asfinfo.packet_size); gst_rtp_base_payload_set_outcaps (GST_RTP_BASE_PAYLOAD (rtpasfpay), "maxps", G_TYPE_STRING, maxps, "config", G_TYPE_STRING, rtpasfpay->config, NULL); g_free (maxps); return GST_FLOW_OK; error: { GST_ELEMENT_ERROR (rtpasfpay, STREAM, DECODE, (NULL), ("Error parsing headers")); return GST_FLOW_ERROR; } }
static gboolean gst_rtp_base_payload_sink_event (GstPad * pad, GstObject * parent, GstEvent * event) { GstRTPBasePayload *rtpbasepayload; GstRTPBasePayloadClass *rtpbasepayload_class; gboolean res = FALSE; rtpbasepayload = GST_RTP_BASE_PAYLOAD (parent); rtpbasepayload_class = GST_RTP_BASE_PAYLOAD_GET_CLASS (rtpbasepayload); if (rtpbasepayload_class->sink_event) res = rtpbasepayload_class->sink_event (rtpbasepayload, event); else gst_event_unref (event); return res; }
static GstFlowReturn gst_rtp_g723_pay_flush (GstRTPG723Pay * pay) { GstBuffer *outbuf; GstFlowReturn ret; guint8 *payload; guint avail; GstRTPBuffer rtp = { NULL }; avail = gst_adapter_available (pay->adapter); outbuf = gst_rtp_buffer_new_allocate (avail, 0, 0); gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp); payload = gst_rtp_buffer_get_payload (&rtp); GST_BUFFER_TIMESTAMP (outbuf) = pay->timestamp; GST_BUFFER_DURATION (outbuf) = pay->duration; /* copy G723 data as payload */ gst_adapter_copy (pay->adapter, payload, 0, avail); /* flush bytes from adapter */ gst_adapter_flush (pay->adapter, avail); pay->timestamp = GST_CLOCK_TIME_NONE; pay->duration = 0; /* set discont and marker */ if (pay->discont) { GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT); gst_rtp_buffer_set_marker (&rtp, TRUE); pay->discont = FALSE; } gst_rtp_buffer_unmap (&rtp); ret = gst_rtp_base_payload_push (GST_RTP_BASE_PAYLOAD (pay), outbuf); return ret; }
static GstFlowReturn gst_rtp_g723_pay_flush (GstRTPG723Pay * pay) { GstBuffer *outbuf, *payload_buf; GstFlowReturn ret; guint avail; GstRTPBuffer rtp = { NULL }; avail = gst_adapter_available (pay->adapter); outbuf = gst_rtp_buffer_new_allocate (0, 0, 0); gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp); GST_BUFFER_PTS (outbuf) = pay->timestamp; GST_BUFFER_DURATION (outbuf) = pay->duration; /* copy G723 data as payload */ payload_buf = gst_adapter_take_buffer_fast (pay->adapter, avail); pay->timestamp = GST_CLOCK_TIME_NONE; pay->duration = 0; /* set discont and marker */ if (pay->discont) { GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT); gst_rtp_buffer_set_marker (&rtp, TRUE); pay->discont = FALSE; } gst_rtp_buffer_unmap (&rtp); gst_rtp_copy_meta (GST_ELEMENT_CAST (pay), outbuf, payload_buf, g_quark_from_static_string (GST_META_TAG_AUDIO_STR)); outbuf = gst_buffer_append (outbuf, payload_buf); ret = gst_rtp_base_payload_push (GST_RTP_BASE_PAYLOAD (pay), outbuf); return ret; }
static GstFlowReturn gst_rtp_asf_pay_handle_packet (GstRtpAsfPay * rtpasfpay, GstBuffer * buffer) { GstRTPBasePayload *rtppay; GstAsfPacketInfo *packetinfo; guint8 flags; guint8 *data; guint32 packet_util_size; guint32 packet_offset; guint32 size_left; GstFlowReturn ret = GST_FLOW_OK; rtppay = GST_RTP_BASE_PAYLOAD (rtpasfpay); packetinfo = &rtpasfpay->packetinfo; if (!gst_asf_parse_packet (buffer, packetinfo, TRUE, rtpasfpay->asfinfo.packet_size)) { GST_ERROR_OBJECT (rtpasfpay, "Error while parsing asf packet"); gst_buffer_unref (buffer); return GST_FLOW_ERROR; } if (packetinfo->packet_size == 0) packetinfo->packet_size = rtpasfpay->asfinfo.packet_size; GST_LOG_OBJECT (rtpasfpay, "Packet size: %" G_GUINT32_FORMAT ", padding: %" G_GUINT32_FORMAT, packetinfo->packet_size, packetinfo->padding); /* update padding field to 0 */ if (packetinfo->padding > 0) { GstAsfPacketInfo info; /* find padding field offset */ guint offset = packetinfo->err_cor_len + 2 + gst_asf_get_var_size_field_len (packetinfo->packet_field_type) + gst_asf_get_var_size_field_len (packetinfo->seq_field_type); buffer = gst_buffer_make_writable (buffer); switch (packetinfo->padd_field_type) { case ASF_FIELD_TYPE_DWORD: gst_buffer_memset (buffer, offset, 0, 4); break; case ASF_FIELD_TYPE_WORD: gst_buffer_memset (buffer, offset, 0, 2); break; case ASF_FIELD_TYPE_BYTE: gst_buffer_memset (buffer, offset, 0, 1); break; case ASF_FIELD_TYPE_NONE: default: break; } gst_asf_parse_packet (buffer, &info, FALSE, 0); } if (packetinfo->padding != 0) packet_util_size = rtpasfpay->asfinfo.packet_size - packetinfo->padding; else packet_util_size = packetinfo->packet_size; packet_offset = 0; while (packet_util_size > 0) { /* Even if we don't fill completely an output buffer we * push it when we add an fragment. Because it seems that * it is not possible to determine where a asf packet * fragment ends inside a rtp packet payload. * This flag tells us to push the packet. */ gboolean force_push = FALSE; GstRTPBuffer rtp = GST_RTP_BUFFER_INIT; /* we have no output buffer pending, create one */ if (rtpasfpay->current == NULL) { GST_LOG_OBJECT (rtpasfpay, "Creating new output buffer"); rtpasfpay->current = gst_rtp_buffer_new_allocate_len (GST_RTP_BASE_PAYLOAD_MTU (rtpasfpay), 0, 0); rtpasfpay->cur_off = 0; rtpasfpay->has_ts = FALSE; rtpasfpay->marker = FALSE; } gst_rtp_buffer_map (rtpasfpay->current, GST_MAP_READWRITE, &rtp); data = gst_rtp_buffer_get_payload (&rtp); data += rtpasfpay->cur_off; size_left = gst_rtp_buffer_get_payload_len (&rtp) - rtpasfpay->cur_off; GST_DEBUG_OBJECT (rtpasfpay, "Input buffer bytes consumed: %" G_GUINT32_FORMAT "/%" G_GSIZE_FORMAT, packet_offset, gst_buffer_get_size (buffer)); GST_DEBUG_OBJECT (rtpasfpay, "Output rtpbuffer status"); GST_DEBUG_OBJECT (rtpasfpay, "Current offset: %" G_GUINT32_FORMAT, rtpasfpay->cur_off); GST_DEBUG_OBJECT (rtpasfpay, "Size left: %" G_GUINT32_FORMAT, size_left); GST_DEBUG_OBJECT (rtpasfpay, "Has ts: %s", rtpasfpay->has_ts ? "yes" : "no"); if (rtpasfpay->has_ts) { GST_DEBUG_OBJECT (rtpasfpay, "Ts: %" G_GUINT32_FORMAT, rtpasfpay->ts); } flags = 0; if (packetinfo->has_keyframe) { flags = flags | 0x80; } flags = flags | 0x20; /* Relative timestamp is present */ if (!rtpasfpay->has_ts) { /* this is the first asf packet, its send time is the * rtp packet timestamp */ rtpasfpay->has_ts = TRUE; rtpasfpay->ts = packetinfo->send_time; } if (size_left >= packet_util_size + 8) { /* enough space for the rest of the packet */ if (packet_offset == 0) { flags = flags | 0x40; GST_WRITE_UINT24_BE (data + 1, packet_util_size); } else { GST_WRITE_UINT24_BE (data + 1, packet_offset); force_push = TRUE; } data[0] = flags; GST_WRITE_UINT32_BE (data + 4, (gint32) (packetinfo->send_time) - (gint32) rtpasfpay->ts); gst_buffer_extract (buffer, packet_offset, data + 8, packet_util_size); /* updating status variables */ rtpasfpay->cur_off += 8 + packet_util_size; size_left -= packet_util_size + 8; packet_offset += packet_util_size; packet_util_size = 0; rtpasfpay->marker = TRUE; } else { /* fragment packet */ data[0] = flags; GST_WRITE_UINT24_BE (data + 1, packet_offset); GST_WRITE_UINT32_BE (data + 4, (gint32) (packetinfo->send_time) - (gint32) rtpasfpay->ts); gst_buffer_extract (buffer, packet_offset, data + 8, size_left - 8); /* updating status variables */ rtpasfpay->cur_off += size_left; packet_offset += size_left - 8; packet_util_size -= size_left - 8; size_left = 0; force_push = TRUE; } /* there is not enough room for any more buffers */ if (force_push || size_left <= 8) { gst_rtp_buffer_set_ssrc (&rtp, rtppay->current_ssrc); gst_rtp_buffer_set_marker (&rtp, rtpasfpay->marker); gst_rtp_buffer_set_payload_type (&rtp, GST_RTP_BASE_PAYLOAD_PT (rtppay)); gst_rtp_buffer_set_seq (&rtp, rtppay->seqnum + 1); gst_rtp_buffer_set_timestamp (&rtp, packetinfo->send_time); gst_rtp_buffer_unmap (&rtp); /* trim remaining bytes not used */ if (size_left != 0) { gst_buffer_set_size (rtpasfpay->current, gst_buffer_get_size (rtpasfpay->current) - size_left); } GST_BUFFER_TIMESTAMP (rtpasfpay->current) = GST_BUFFER_TIMESTAMP (buffer); rtppay->seqnum++; rtppay->timestamp = packetinfo->send_time; GST_DEBUG_OBJECT (rtpasfpay, "Pushing rtp buffer"); ret = gst_rtp_base_payload_push (rtppay, rtpasfpay->current); rtpasfpay->current = NULL; if (ret != GST_FLOW_OK) { gst_buffer_unref (buffer); return ret; } } } gst_buffer_unref (buffer); return ret; }
static GstFlowReturn gst_rtp_mpa_pay_flush (GstRtpMPAPay * rtpmpapay) { guint avail; GstBuffer *outbuf; GstFlowReturn ret; guint16 frag_offset; GstBufferList *list; /* the data available in the adapter is either smaller * than the MTU or bigger. In the case it is smaller, the complete * adapter contents can be put in one packet. In the case the * adapter has more than one MTU, we need to split the MPA data * over multiple packets. The frag_offset in each packet header * needs to be updated with the position in the MPA frame. */ avail = gst_adapter_available (rtpmpapay->adapter); ret = GST_FLOW_OK; list = gst_buffer_list_new_sized (avail / (GST_RTP_BASE_PAYLOAD_MTU (rtpmpapay) - RTP_HEADER_LEN) + 1); frag_offset = 0; while (avail > 0) { guint towrite; guint8 *payload; guint payload_len; guint packet_len; GstRTPBuffer rtp = { NULL }; GstBuffer *paybuf; /* this will be the total length of the packet */ packet_len = gst_rtp_buffer_calc_packet_len (4 + avail, 0, 0); /* fill one MTU or all available bytes */ towrite = MIN (packet_len, GST_RTP_BASE_PAYLOAD_MTU (rtpmpapay)); /* this is the payload length */ payload_len = gst_rtp_buffer_calc_payload_len (towrite, 0, 0); /* create buffer to hold the payload */ outbuf = gst_rtp_buffer_new_allocate (4, 0, 0); gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp); payload_len -= 4; gst_rtp_buffer_set_payload_type (&rtp, GST_RTP_PAYLOAD_MPA); /* * 0 1 2 3 * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * | MBZ | Frag_offset | * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ */ payload = gst_rtp_buffer_get_payload (&rtp); payload[0] = 0; payload[1] = 0; payload[2] = frag_offset >> 8; payload[3] = frag_offset & 0xff; avail -= payload_len; frag_offset += payload_len; if (avail == 0) gst_rtp_buffer_set_marker (&rtp, TRUE); gst_rtp_buffer_unmap (&rtp); paybuf = gst_adapter_take_buffer_fast (rtpmpapay->adapter, payload_len); gst_rtp_copy_meta (GST_ELEMENT_CAST (rtpmpapay), outbuf, paybuf, g_quark_from_static_string (GST_META_TAG_AUDIO_STR)); outbuf = gst_buffer_append (outbuf, paybuf); GST_BUFFER_PTS (outbuf) = rtpmpapay->first_ts; GST_BUFFER_DURATION (outbuf) = rtpmpapay->duration; gst_buffer_list_add (list, outbuf); } ret = gst_rtp_base_payload_push_list (GST_RTP_BASE_PAYLOAD (rtpmpapay), list); return ret; }
static gboolean gst_rtp_celt_pay_parse_ident (GstRtpCELTPay * rtpceltpay, const guint8 * data, guint size) { guint32 version, header_size, rate, nb_channels, frame_size, overlap; guint32 bytes_per_packet; GstRTPBasePayload *payload; gchar *cstr, *fsstr; gboolean res; /* we need the header string (8), the version string (20), the version * and the header length. */ if (size < 36) goto too_small; if (!g_str_has_prefix ((const gchar *) data, "CELT ")) goto wrong_header; /* skip header and version string */ data += 28; version = GST_READ_UINT32_LE (data); GST_DEBUG_OBJECT (rtpceltpay, "version %08x", version); #if 0 if (version != 1) goto wrong_version; #endif data += 4; /* ensure sizes */ header_size = GST_READ_UINT32_LE (data); if (header_size < 56) goto header_too_small; if (size < header_size) goto payload_too_small; data += 4; rate = GST_READ_UINT32_LE (data); data += 4; nb_channels = GST_READ_UINT32_LE (data); data += 4; frame_size = GST_READ_UINT32_LE (data); data += 4; overlap = GST_READ_UINT32_LE (data); data += 4; bytes_per_packet = GST_READ_UINT32_LE (data); GST_DEBUG_OBJECT (rtpceltpay, "rate %d, nb_channels %d, frame_size %d", rate, nb_channels, frame_size); GST_DEBUG_OBJECT (rtpceltpay, "overlap %d, bytes_per_packet %d", overlap, bytes_per_packet); payload = GST_RTP_BASE_PAYLOAD (rtpceltpay); gst_rtp_base_payload_set_options (payload, "audio", FALSE, "CELT", rate); cstr = g_strdup_printf ("%d", nb_channels); fsstr = g_strdup_printf ("%d", frame_size); res = gst_rtp_base_payload_set_outcaps (payload, "encoding-params", G_TYPE_STRING, cstr, "frame-size", G_TYPE_STRING, fsstr, NULL); g_free (cstr); g_free (fsstr); return res; /* ERRORS */ too_small: { GST_DEBUG_OBJECT (rtpceltpay, "ident packet too small, need at least 32 bytes"); return FALSE; } wrong_header: { GST_DEBUG_OBJECT (rtpceltpay, "ident packet does not start with \"CELT \""); return FALSE; } #if 0 wrong_version: { GST_DEBUG_OBJECT (rtpceltpay, "can only handle version 1, have version %d", version); return FALSE; } #endif header_too_small: { GST_DEBUG_OBJECT (rtpceltpay, "header size too small, need at least 80 bytes, " "got only %d", header_size); return FALSE; } payload_too_small: { GST_DEBUG_OBJECT (rtpceltpay, "payload too small, need at least %d bytes, got only %d", header_size, size); return FALSE; } }