예제 #1
0
static void
update_current_position_bar (GthMediaViewerPage *self)
{
	GstFormat format;
        gint64    current_value = 0;

        format = GST_FORMAT_TIME;
        if (gst_element_query_position (self->priv->playbin, format, &current_value)) {
        	char *s;

		if (self->priv->duration <= 0) {
			gst_element_query_duration (self->priv->playbin, format, &self->priv->duration);
			s = _g_format_duration_for_display (GST_TIME_AS_MSECONDS (self->priv->duration));
			gtk_label_set_text (GTK_LABEL (GET_WIDGET ("label_duration")), s);

			g_free (s);
		}

		/*
		g_print ("==> %" G_GINT64_FORMAT " / %" G_GINT64_FORMAT " (%0.3g)\n" ,
			 current_value,
			 self->priv->duration,
			 ((double) current_value / self->priv->duration) * 100.0);
		*/

		g_signal_handlers_block_by_func(GET_WIDGET ("position_adjustment"), position_value_changed_cb, self);
		gtk_adjustment_set_value (GTK_ADJUSTMENT (GET_WIDGET ("position_adjustment")), (self->priv->duration > 0) ? ((double) current_value / self->priv->duration) * 100.0 : 0.0);
		g_signal_handlers_unblock_by_func(GET_WIDGET ("position_adjustment"), position_value_changed_cb, self);

		s = _g_format_duration_for_display (GST_TIME_AS_MSECONDS (current_value));
		gtk_label_set_text (GTK_LABEL (GET_WIDGET ("label_position")), s);

		g_free (s);
        }
}
예제 #2
0
static void
position_value_changed_cb (GtkAdjustment *adjustment,
			   gpointer       user_data)
{
	GthMediaViewerPage *self = user_data;
	gint64              current_value;
	char               *s;

	if (self->priv->playbin == NULL)
		return;

	current_value = (gint64) (gtk_adjustment_get_value (adjustment) / 100.0 * self->priv->duration);
	if (! gst_element_seek_simple (self->priv->playbin,
				       GST_FORMAT_TIME,
				       GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_KEY_UNIT,
				       current_value))
	{
		g_warning ("seek failed");
	}

	s = _g_format_duration_for_display (GST_TIME_AS_MSECONDS (current_value));
	gtk_label_set_text (GTK_LABEL (GET_WIDGET ("label_position")), s);

	g_free (s);
}
예제 #3
0
/**
 * gst_rtsp_session_next_timeout:
 * @session: a #GstRTSPSession
 * @now: the current system time
 *
 * Get the amount of milliseconds till the session will expire.
 *
 * Returns: the amount of milliseconds since the session will time out.
 */
gint
gst_rtsp_session_next_timeout (GstRTSPSession * session, GTimeVal * now)
{
  GstRTSPSessionPrivate *priv;
  gint res;
  GstClockTime last_access, now_ns;

  g_return_val_if_fail (GST_IS_RTSP_SESSION (session), -1);
  g_return_val_if_fail (now != NULL, -1);

  priv = session->priv;

  g_mutex_lock (&priv->lock);
  if (g_atomic_int_get (&priv->expire_count) != 0) {
    /* touch session when the expire count is not 0 */
    g_get_current_time (&priv->last_access);
  }

  last_access = GST_TIMEVAL_TO_TIME (priv->last_access);
  /* add timeout allow for 5 seconds of extra time */
  last_access += priv->timeout * GST_SECOND + (5 * GST_SECOND);
  g_mutex_unlock (&priv->lock);

  now_ns = GST_TIMEVAL_TO_TIME (*now);

  if (last_access > now_ns)
    res = GST_TIME_AS_MSECONDS (last_access - now_ns);
  else
    res = 0;

  return res;
}
예제 #4
0
static void
gst_sh_video_sink_get_times (GstBaseSink * bsink, GstBuffer * buf,
			   GstClockTime * start, GstClockTime * end)
{
	GstSHVideoSink *sink = GST_SH_VIDEO_SINK (bsink);

	GST_LOG_OBJECT(sink,"Times requested. Buffer timestamp: %dms duration:%dms",
		 GST_TIME_AS_MSECONDS(GST_BUFFER_TIMESTAMP (buf)),
		 GST_TIME_AS_MSECONDS(GST_BUFFER_DURATION (buf)));

	if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) 
	{
		*start = GST_BUFFER_TIMESTAMP (buf);
		if (GST_BUFFER_DURATION_IS_VALID (buf) && 
	GST_TIME_AS_MSECONDS(GST_BUFFER_DURATION (buf)) > 0 )
		{
			*end = *start + GST_BUFFER_DURATION (buf);
		} 
		else 
		{
			if (sink->fps_numerator > 0) 
			{
				*end = *start +
				 	gst_util_uint64_scale_int (GST_SECOND,
 						sink->fps_denominator,
						sink->fps_numerator);
			}
			else 
			{
		 		GST_ELEMENT_ERROR((GstElement*)sink,
		 			CORE,FAILED,("No framerate set."), 
			 	("%s failed (No framerate set for playback)",
				__FUNCTION__));        
			}
		}
		GST_LOG_OBJECT(sink,"Times given, start: %dms end:%dms",
			 GST_TIME_AS_MSECONDS(*start),
			 GST_TIME_AS_MSECONDS(*end));
	}
	else 
	{
		GST_ELEMENT_ERROR((GstElement*)sink,
			CORE,FAILED,("No timestamp given."), 
			("%s failed (No timestamp set for the buffer)",
			__FUNCTION__));        
	}
}
static void mplayer_position_update()
{
    GstFormat fmt = GST_FORMAT_TIME;
    gint64 pos = 0;

    gst_element_query_position (pipeline, &fmt, &pos);
    gCurrentPosition = GST_TIME_AS_MSECONDS(pos);
    g_print ("Current Position=%lu\n", gCurrentPosition);
    return;
}
void GStreamerWrapper::retrieveVideoInfo()
{
	////////////////////////////////////////////////////////////////////////// Media Duration
	// Nanoseconds
	GstFormat gstFormat = GST_FORMAT_TIME;
	gst_element_query_duration( GST_ELEMENT( m_GstPipeline ), &gstFormat, &m_iDurationInNs );

	// Milliseconds
	m_dDurationInMs = GST_TIME_AS_MSECONDS( m_iDurationInNs );

	////////////////////////////////////////////////////////////////////////// Stream Info
	// Number of Video Streams
	g_object_get( m_GstPipeline, "n-video", &m_iNumVideoStreams, NULL );

	// Number of Audio Streams
	g_object_get( m_GstPipeline, "n-audio", &m_iNumAudioStreams, NULL );

	// Set Content Type according to the number of available Video and Audio streams
	if ( m_iNumVideoStreams > 0 && m_iNumAudioStreams > 0 )
		m_ContentType = VIDEO_AND_AUDIO;
	else if ( m_iNumVideoStreams > 0 )
		m_ContentType = VIDEO;
	else if ( m_iNumAudioStreams > 0 )
		m_ContentType = AUDIO;

	////////////////////////////////////////////////////////////////////////// Video Data
	if ( m_iNumVideoStreams > 0 )
	{
		GstPad* gstPad = gst_element_get_static_pad( m_GstVideoSink, "sink" );
		if ( gstPad )
		{
			// Video Size
			gst_video_get_size( GST_PAD( gstPad ), &m_iWidth, &m_iHeight );

			// Frame Rate
			const GValue* framerate = gst_video_frame_rate( gstPad );

			int iFpsNumerator = gst_value_get_fraction_numerator( framerate );
			int iFpsDenominator = gst_value_get_fraction_denominator( framerate );

			// Number of frames
			m_iNumberOfFrames = (float)( m_iDurationInNs / GST_SECOND ) * (float)iFpsNumerator / (float)iFpsDenominator;

			// FPS
			m_fFps = (float)iFpsNumerator / (float)iFpsDenominator;


			gst_object_unref( gstPad );
		}
	}
}
예제 #7
0
static void
shmdata_any_reader_on_new_buffer_from_source (GstElement * elt,
					      gpointer user_data)
{
  shmdata_any_reader_t *me = (shmdata_any_reader_t *) user_data;

  GstBuffer *buf;

  /* pull the next item, this can return NULL when there is no more data and
   * EOS has been received */
  buf = gst_app_sink_pull_buffer (GST_APP_SINK (me->sink_));

  if (me->on_data_ != NULL)
    {

      if (me->type_ == NULL
	  || gst_caps_is_always_compatible (me->data_caps_,
					    GST_BUFFER_CAPS (buf)))
	{
	  gchar *caps_string = gst_caps_to_string (GST_BUFFER_CAPS (buf)); 
	  me->on_data_ (me,
			(void *) buf,
			(void *) GST_BUFFER_DATA (buf),
			(int) GST_BUFFER_SIZE (buf),
			(unsigned long long)
			GST_TIME_AS_NSECONDS (GST_BUFFER_TIMESTAMP (buf)),
			(const char *)
			caps_string,
			(void *) me->on_data_user_data_);
	  g_free (caps_string);
	}
      else
	{
	  g_debug
	    ("incompatible data frame retrieved, data %p, data size %d, timestamp %llu, caps %"GST_PTR_FORMAT,
	     GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf),
	     GST_TIME_AS_MSECONDS (GST_BUFFER_TIMESTAMP (buf)),
	     GST_BUFFER_CAPS (buf));
	}
    }
  /* if (buf) */
  /*  gst_buffer_unref (buf); */
}
예제 #8
0
static void
playbin_source_cb (GstElement *playbin,
    GParamSpec *pspec, GbpPlayer *player)
{
  GstElement *element;
  GObjectClass *klass;

  g_object_get (G_OBJECT (playbin), "source", &element, NULL);
  klass = G_OBJECT_GET_CLASS (element);

  if (g_object_class_find_property (klass, "latency")) {
    g_object_set (element, "latency",
        GST_TIME_AS_MSECONDS (player->priv->latency), NULL);
  }

  if (g_object_class_find_property (klass, "tcp-timeout")) {
    g_object_set (element, "tcp-timeout",
        GST_TIME_AS_USECONDS (player->priv->tcp_timeout), NULL);
  }
}
예제 #9
0
static GstFlowReturn
gst_vtenc_encode_frame (GstVTEnc * self, GstBuffer * buf)
{
  GstCVApi *cv = self->ctx->cv;
  GstVTApi *vt = self->ctx->vt;
  CMTime ts, duration;
  CVPixelBufferRef pbuf = NULL;
  VTStatus vt_status;
  GstFlowReturn ret = GST_FLOW_OK;
  guint i;

  self->cur_inbuf = buf;

  ts = self->ctx->cm->CMTimeMake
      (GST_TIME_AS_MSECONDS (GST_BUFFER_TIMESTAMP (buf)), 1000);
  duration = self->ctx->cm->CMTimeMake
      (GST_TIME_AS_MSECONDS (GST_BUFFER_DURATION (buf)), 1000);

  if (GST_IS_CORE_MEDIA_BUFFER (buf)) {
    GstCoreMediaBuffer *cmbuf = GST_CORE_MEDIA_BUFFER_CAST (buf);
    pbuf = gst_core_media_buffer_get_pixel_buffer (cmbuf);
  }

  if (pbuf == NULL) {
    CVReturn cv_ret;

    cv_ret = cv->CVPixelBufferCreateWithBytes (NULL,
        self->negotiated_width, self->negotiated_height,
        kCVPixelFormatType_422YpCbCr8Deprecated, GST_BUFFER_DATA (buf),
        self->negotiated_width * 2,
        (CVPixelBufferReleaseBytesCallback) gst_buffer_unref, buf, NULL, &pbuf);
    if (cv_ret != kCVReturnSuccess)
      goto cv_error;
    gst_buffer_ref (buf);
  }

  GST_OBJECT_LOCK (self);

  self->expect_keyframe = CFDictionaryContainsKey (self->options,
      *(vt->kVTEncodeFrameOptionKey_ForceKeyFrame));
  if (self->expect_keyframe)
    gst_vtenc_clear_cached_caps_downstream (self);

  vt_status = self->ctx->vt->VTCompressionSessionEncodeFrame (self->session,
      pbuf, ts, duration, self->options, NULL, NULL);

  if (vt_status != 0) {
    GST_WARNING_OBJECT (self, "VTCompressionSessionEncodeFrame returned %d",
        vt_status);
  }

  self->ctx->vt->VTCompressionSessionCompleteFrames (self->session,
      *(self->ctx->cm->kCMTimeInvalid));

  GST_OBJECT_UNLOCK (self);

  cv->CVPixelBufferRelease (pbuf);
  self->cur_inbuf = NULL;
  gst_buffer_unref (buf);

  if (self->cur_outbufs->len > 0) {
    GstCoreMediaBuffer *cmbuf =
        GST_CORE_MEDIA_BUFFER_CAST (g_ptr_array_index (self->cur_outbufs, 0));
    if (!gst_vtenc_negotiate_downstream (self, cmbuf->sample_buf))
      ret = GST_FLOW_NOT_NEGOTIATED;
  }

  for (i = 0; i != self->cur_outbufs->len; i++) {
    GstBuffer *buf = g_ptr_array_index (self->cur_outbufs, i);

    if (ret == GST_FLOW_OK) {
      gst_buffer_set_caps (buf, GST_PAD_CAPS (self->srcpad));
      ret = gst_pad_push (self->srcpad, buf);
    } else {
      gst_buffer_unref (buf);
    }
  }
  g_ptr_array_set_size (self->cur_outbufs, 0);

  return ret;

cv_error:
  {
    self->cur_inbuf = NULL;
    gst_buffer_unref (buf);

    return GST_FLOW_ERROR;
  }
}
예제 #10
0
static GstFlowReturn
gst_vtenc_encode_frame (GstVTEnc * self, GstBuffer * buf)
{
  GstVTApi *vt = self->ctx->vt;
  CMTime ts, duration;
  GstCoreMediaMeta *meta;
  CVPixelBufferRef pbuf = NULL;
  VTStatus vt_status;
  GstFlowReturn ret = GST_FLOW_OK;
  guint i;

  self->cur_inbuf = buf;

  ts = CMTimeMake (GST_TIME_AS_MSECONDS (GST_BUFFER_TIMESTAMP (buf)), 1000);
  duration = CMTimeMake
      (GST_TIME_AS_MSECONDS (GST_BUFFER_DURATION (buf)), 1000);

  meta = gst_buffer_get_core_media_meta (buf);
  if (meta != NULL) {
    pbuf = gst_core_media_buffer_get_pixel_buffer (buf);
  }

  if (pbuf == NULL) {
    GstVTEncFrame *frame;
    CVReturn cv_ret;

    frame = gst_vtenc_frame_new (buf, &self->video_info);
    if (!frame)
      goto cv_error;

    {
      const size_t num_planes = GST_VIDEO_FRAME_N_PLANES (&frame->videoframe);
      void *plane_base_addresses[GST_VIDEO_MAX_PLANES];
      size_t plane_widths[GST_VIDEO_MAX_PLANES];
      size_t plane_heights[GST_VIDEO_MAX_PLANES];
      size_t plane_bytes_per_row[GST_VIDEO_MAX_PLANES];
      OSType pixel_format_type;
      size_t i;

      for (i = 0; i < num_planes; i++) {
        plane_base_addresses[i] =
            GST_VIDEO_FRAME_PLANE_DATA (&frame->videoframe, i);
        plane_widths[i] = GST_VIDEO_FRAME_COMP_WIDTH (&frame->videoframe, i);
        plane_heights[i] = GST_VIDEO_FRAME_COMP_HEIGHT (&frame->videoframe, i);
        plane_bytes_per_row[i] =
            GST_VIDEO_FRAME_COMP_STRIDE (&frame->videoframe, i);
        plane_bytes_per_row[i] =
            GST_VIDEO_FRAME_COMP_STRIDE (&frame->videoframe, i);
      }

      switch (GST_VIDEO_INFO_FORMAT (&self->video_info)) {
        case GST_VIDEO_FORMAT_I420:
          pixel_format_type = kCVPixelFormatType_420YpCbCr8Planar;
          break;
        case GST_VIDEO_FORMAT_NV12:
          pixel_format_type = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange;
          break;
        default:
          goto cv_error;
      }

      cv_ret = CVPixelBufferCreateWithPlanarBytes (NULL,
          self->negotiated_width, self->negotiated_height,
          pixel_format_type,
          frame,
          GST_VIDEO_FRAME_SIZE (&frame->videoframe),
          num_planes,
          plane_base_addresses,
          plane_widths,
          plane_heights,
          plane_bytes_per_row, gst_pixel_buffer_release_cb, frame, NULL, &pbuf);
      if (cv_ret != kCVReturnSuccess) {
        gst_vtenc_frame_free (frame);
        goto cv_error;
      }
    }
  }

  GST_OBJECT_LOCK (self);

  self->expect_keyframe = CFDictionaryContainsKey (self->options,
      *(vt->kVTEncodeFrameOptionKey_ForceKeyFrame));
  if (self->expect_keyframe)
    gst_vtenc_clear_cached_caps_downstream (self);

  vt_status = self->ctx->vt->VTCompressionSessionEncodeFrame (self->session,
      pbuf, ts, duration, self->options, NULL, NULL);

  if (vt_status != 0) {
    GST_WARNING_OBJECT (self, "VTCompressionSessionEncodeFrame returned %d",
        vt_status);
  }

  self->ctx->vt->VTCompressionSessionCompleteFrames (self->session,
      kCMTimeInvalid);

  GST_OBJECT_UNLOCK (self);

  CVPixelBufferRelease (pbuf);
  self->cur_inbuf = NULL;
  gst_buffer_unref (buf);

  if (self->cur_outbufs->len > 0) {
    meta =
        gst_buffer_get_core_media_meta (g_ptr_array_index (self->cur_outbufs,
            0));
    if (!gst_vtenc_negotiate_downstream (self, meta->sample_buf))
      ret = GST_FLOW_NOT_NEGOTIATED;
  }

  for (i = 0; i != self->cur_outbufs->len; i++) {
    GstBuffer *buf = g_ptr_array_index (self->cur_outbufs, i);

    if (ret == GST_FLOW_OK) {
      ret = gst_pad_push (self->srcpad, buf);
    } else {
      gst_buffer_unref (buf);
    }
  }
  g_ptr_array_set_size (self->cur_outbufs, 0);

  return ret;

cv_error:
  {
    self->cur_inbuf = NULL;
    gst_buffer_unref (buf);

    return GST_FLOW_ERROR;
  }
}
static void
make_media (GstSDPMessage * sdp, GstSDPInfo * info, GstRTSPMedia * media,
    GstRTSPStream * stream, GstStructure * s, GstRTSPProfile profile)
{
  GstSDPMedia *smedia;
  const gchar *caps_str, *caps_enc, *caps_params;
  gchar *tmp;
  gint caps_pt, caps_rate;
  guint n_fields, j;
  gboolean first;
  GString *fmtp;
  GstRTSPLowerTrans ltrans;
  GSocketFamily family;
  const gchar *addrtype, *proto;
  gchar *address;
  guint ttl;
  GstClockTime rtx_time;

  gst_sdp_media_new (&smedia);

  /* get media type and payload for the m= line */
  caps_str = gst_structure_get_string (s, "media");
  gst_sdp_media_set_media (smedia, caps_str);

  gst_structure_get_int (s, "payload", &caps_pt);
  tmp = g_strdup_printf ("%d", caps_pt);
  gst_sdp_media_add_format (smedia, tmp);
  g_free (tmp);

  gst_sdp_media_set_port_info (smedia, 0, 1);

  switch (profile) {
    case GST_RTSP_PROFILE_AVP:
      proto = "RTP/AVP";
      break;
    case GST_RTSP_PROFILE_AVPF:
      proto = "RTP/AVPF";
      break;
    case GST_RTSP_PROFILE_SAVP:
      proto = "RTP/SAVP";
      break;
    case GST_RTSP_PROFILE_SAVPF:
      proto = "RTP/SAVPF";
      break;
    default:
      proto = "udp";
      break;
  }
  gst_sdp_media_set_proto (smedia, proto);

  if (info->is_ipv6) {
    addrtype = "IP6";
    family = G_SOCKET_FAMILY_IPV6;
  } else {
    addrtype = "IP4";
    family = G_SOCKET_FAMILY_IPV4;
  }

  ltrans = gst_rtsp_stream_get_protocols (stream);
  if (ltrans == GST_RTSP_LOWER_TRANS_UDP_MCAST) {
    GstRTSPAddress *addr;

    addr = gst_rtsp_stream_get_multicast_address (stream, family);
    if (addr == NULL)
      goto no_multicast;

    address = g_strdup (addr->address);
    ttl = addr->ttl;
    gst_rtsp_address_free (addr);
  } else {
    ttl = 16;
    if (info->is_ipv6)
      address = g_strdup ("::");
    else
      address = g_strdup ("0.0.0.0");
  }

  /* for the c= line */
  gst_sdp_media_add_connection (smedia, "IN", addrtype, address, ttl, 1);
  g_free (address);

  /* get clock-rate, media type and params for the rtpmap attribute */
  gst_structure_get_int (s, "clock-rate", &caps_rate);
  caps_enc = gst_structure_get_string (s, "encoding-name");
  caps_params = gst_structure_get_string (s, "encoding-params");

  if (caps_enc) {
    if (caps_params)
      tmp = g_strdup_printf ("%d %s/%d/%s", caps_pt, caps_enc, caps_rate,
          caps_params);
    else
      tmp = g_strdup_printf ("%d %s/%d", caps_pt, caps_enc, caps_rate);

    gst_sdp_media_add_attribute (smedia, "rtpmap", tmp);
    g_free (tmp);
  }

  /* the config uri */
  tmp = gst_rtsp_stream_get_control (stream);
  gst_sdp_media_add_attribute (smedia, "control", tmp);
  g_free (tmp);


  /* check for srtp */
  do {
    GstBuffer *srtpkey;
    const GValue *val;
    const gchar *srtpcipher, *srtpauth, *srtcpcipher, *srtcpauth;
    GstMIKEYMessage *msg;
    GstMIKEYPayload *payload, *pkd;
    GBytes *bytes;
    GstMapInfo info;
    const guint8 *data;
    gsize size;
    gchar *base64;
    guint8 byte;
    guint32 ssrc;

    val = gst_structure_get_value (s, "srtp-key");
    if (val == NULL)
      break;

    srtpkey = gst_value_get_buffer (val);
    if (srtpkey == NULL)
      break;

    srtpcipher = gst_structure_get_string (s, "srtp-cipher");
    srtpauth = gst_structure_get_string (s, "srtp-auth");
    srtcpcipher = gst_structure_get_string (s, "srtcp-cipher");
    srtcpauth = gst_structure_get_string (s, "srtcp-auth");

    if (srtpcipher == NULL || srtpauth == NULL || srtcpcipher == NULL ||
        srtcpauth == NULL)
      break;

    msg = gst_mikey_message_new ();
    /* unencrypted MIKEY message, we send this over TLS so this is allowed */
    gst_mikey_message_set_info (msg, GST_MIKEY_VERSION, GST_MIKEY_TYPE_PSK_INIT,
        FALSE, GST_MIKEY_PRF_MIKEY_1, 0, GST_MIKEY_MAP_TYPE_SRTP);
    /* add policy '0' for our SSRC */
    gst_rtsp_stream_get_ssrc (stream, &ssrc);
    gst_mikey_message_add_cs_srtp (msg, 0, ssrc, 0);
    /* timestamp is now */
    gst_mikey_message_add_t_now_ntp_utc (msg);
    /* add some random data */
    gst_mikey_message_add_rand_len (msg, 16);

    /* the policy '0' is SRTP with the above discovered algorithms */
    payload = gst_mikey_payload_new (GST_MIKEY_PT_SP);
    gst_mikey_payload_sp_set (payload, 0, GST_MIKEY_SEC_PROTO_SRTP);

    /* only AES-CM is supported */
    byte = 1;
    gst_mikey_payload_sp_add_param (payload, GST_MIKEY_SP_SRTP_ENC_ALG, 1,
        &byte);
    /* Encryption key length */
    byte = enc_key_length_from_cipher_name (srtpcipher);
    gst_mikey_payload_sp_add_param (payload, GST_MIKEY_SP_SRTP_ENC_KEY_LEN, 1,
        &byte);
    /* only HMAC-SHA1 */
    gst_mikey_payload_sp_add_param (payload, GST_MIKEY_SP_SRTP_AUTH_ALG, 1,
        &byte);
    /* Authentication key length */
    byte = auth_key_length_from_auth_name (srtpauth);
    gst_mikey_payload_sp_add_param (payload, GST_MIKEY_SP_SRTP_AUTH_KEY_LEN, 1,
        &byte);
    /* we enable encryption on RTP and RTCP */
    gst_mikey_payload_sp_add_param (payload, GST_MIKEY_SP_SRTP_SRTP_ENC, 1,
        &byte);
    gst_mikey_payload_sp_add_param (payload, GST_MIKEY_SP_SRTP_SRTCP_ENC, 1,
        &byte);
    /* we enable authentication on RTP and RTCP */
    gst_mikey_payload_sp_add_param (payload, GST_MIKEY_SP_SRTP_SRTP_AUTH, 1,
        &byte);
    gst_mikey_message_add_payload (msg, payload);

    /* make unencrypted KEMAC */
    payload = gst_mikey_payload_new (GST_MIKEY_PT_KEMAC);
    gst_mikey_payload_kemac_set (payload, GST_MIKEY_ENC_NULL,
        GST_MIKEY_MAC_NULL);

    /* add the key in key data */
    pkd = gst_mikey_payload_new (GST_MIKEY_PT_KEY_DATA);
    gst_buffer_map (srtpkey, &info, GST_MAP_READ);
    gst_mikey_payload_key_data_set_key (pkd, GST_MIKEY_KD_TEK, info.size,
        info.data);
    gst_buffer_unmap (srtpkey, &info);
    /* add key data to KEMAC */
    gst_mikey_payload_kemac_add_sub (payload, pkd);
    gst_mikey_message_add_payload (msg, payload);

    /* now serialize this to bytes */
    bytes = gst_mikey_message_to_bytes (msg, NULL, NULL);
    gst_mikey_message_unref (msg);
    /* and make it into base64 */
    data = g_bytes_get_data (bytes, &size);
    base64 = g_base64_encode (data, size);
    g_bytes_unref (bytes);

    tmp = g_strdup_printf ("mikey %s", base64);
    g_free (base64);

    gst_sdp_media_add_attribute (smedia, "key-mgmt", tmp);
    g_free (tmp);
  } while (FALSE);

  /* collect all other properties and add them to fmtp or attributes */
  fmtp = g_string_new ("");
  g_string_append_printf (fmtp, "%d ", caps_pt);
  first = TRUE;
  n_fields = gst_structure_n_fields (s);
  for (j = 0; j < n_fields; j++) {
    const gchar *fname, *fval;

    fname = gst_structure_nth_field_name (s, j);

    /* filter out standard properties */
    if (!strcmp (fname, "media"))
      continue;
    if (!strcmp (fname, "payload"))
      continue;
    if (!strcmp (fname, "clock-rate"))
      continue;
    if (!strcmp (fname, "encoding-name"))
      continue;
    if (!strcmp (fname, "encoding-params"))
      continue;
    if (!strcmp (fname, "ssrc"))
      continue;
    if (!strcmp (fname, "timestamp-offset"))
      continue;
    if (!strcmp (fname, "seqnum-offset"))
      continue;
    if (g_str_has_prefix (fname, "srtp-"))
      continue;
    if (g_str_has_prefix (fname, "srtcp-"))
      continue;
    /* handled later */
    if (g_str_has_prefix (fname, "x-gst-rtsp-server-rtx-time"))
      continue;

    if (!strcmp (fname, "a-framesize")) {
      /* a-framesize attribute */
      if ((fval = gst_structure_get_string (s, fname))) {
        tmp = g_strdup_printf ("%d %s", caps_pt, fval);
        gst_sdp_media_add_attribute (smedia, fname + 2, tmp);
        g_free (tmp);
      }
      continue;
    }

    if (g_str_has_prefix (fname, "a-")) {
      /* attribute */
      if ((fval = gst_structure_get_string (s, fname)))
        gst_sdp_media_add_attribute (smedia, fname + 2, fval);
      continue;
    }
    if (g_str_has_prefix (fname, "x-")) {
      /* attribute */
      if ((fval = gst_structure_get_string (s, fname)))
        gst_sdp_media_add_attribute (smedia, fname, fval);
      continue;
    }

    if ((fval = gst_structure_get_string (s, fname))) {
      g_string_append_printf (fmtp, "%s%s=%s", first ? "" : ";", fname, fval);
      first = FALSE;
    }
  }

  if (!first) {
    tmp = g_string_free (fmtp, FALSE);
    gst_sdp_media_add_attribute (smedia, "fmtp", tmp);
    g_free (tmp);
  } else {
    g_string_free (fmtp, TRUE);
  }

  update_sdp_from_tags (stream, smedia);

  if ((rtx_time = gst_rtsp_stream_get_retransmission_time (stream))) {
    /* ssrc multiplexed retransmit functionality */
    guint rtx_pt = gst_rtsp_stream_get_retransmission_pt (stream);

    if (rtx_pt == 0) {
      g_warning ("failed to find an available dynamic payload type. "
          "Not adding retransmission");
    } else {
      gchar *tmp;

      tmp = g_strdup_printf ("%d", rtx_pt);
      gst_sdp_media_add_format (smedia, tmp);
      g_free (tmp);

      tmp = g_strdup_printf ("%d rtx/%d", rtx_pt, caps_rate);
      gst_sdp_media_add_attribute (smedia, "rtpmap", tmp);
      g_free (tmp);

      tmp =
          g_strdup_printf ("%d apt=%d;rtx-time=%" G_GINT64_FORMAT, rtx_pt,
          caps_pt, GST_TIME_AS_MSECONDS (rtx_time));
      gst_sdp_media_add_attribute (smedia, "fmtp", tmp);
      g_free (tmp);
    }
  }

  gst_sdp_message_add_media (sdp, smedia);
  gst_sdp_media_free (smedia);

  return;

  /* ERRORS */
no_multicast:
  {
    gst_sdp_media_free (smedia);
    g_warning ("ignoring stream %d without multicast address",
        gst_rtsp_stream_get_index (stream));
    return;
  }
}