Пример #1
0
GF_EXPORT
GF_Err gf_rtp_streamer_append_sdp_extended(GF_RTPStreamer *rtp, u16 ESID, char *dsi, u32 dsi_len, GF_ISOFile *isofile, u32 isotrack, char *KMS_URI, u32 width, u32 height, char **out_sdp_buffer) 
{	
	u32 size;
	u16 port;
	char mediaName[30], payloadName[30];
	char sdp[20000], sdpLine[10000];

	if (!out_sdp_buffer) return GF_BAD_PARAM;

	gf_rtp_builder_get_payload_name(rtp->packetizer, payloadName, mediaName);
	gf_rtp_get_ports(rtp->channel, &port, NULL);

	sprintf(sdp, "m=%s %d RTP/%s %d\n", mediaName, port, rtp->packetizer->slMap.IV_length ? "SAVP" : "AVP", rtp->packetizer->PayloadType);
	sprintf(sdpLine, "a=rtpmap:%d %s/%d\n", rtp->packetizer->PayloadType, payloadName, rtp->packetizer->sl_config.timestampResolution);
	strcat(sdp, sdpLine);
    if (ESID && (rtp->packetizer->rtp_payt != GF_RTP_PAYT_3GPP_DIMS)) {
		sprintf(sdpLine, "a=mpeg4-esid:%d\n", ESID);
		strcat(sdp, sdpLine);		
	}

	if (width && height) {
		if (rtp->packetizer->rtp_payt == GF_RTP_PAYT_H263) {
			sprintf(sdpLine, "a=cliprect:0,0,%d,%d\n", height, width);
			strcat(sdp, sdpLine);
		}
		/*extensions for some mobile phones*/
		sprintf(sdpLine, "a=framesize:%d %d-%d\n", rtp->packetizer->PayloadType, width, height);
		strcat(sdp, sdpLine);
	}
		
	strcpy(sdpLine, "");

	/*AMR*/
	if ((rtp->packetizer->rtp_payt == GF_RTP_PAYT_AMR) || (rtp->packetizer->rtp_payt == GF_RTP_PAYT_AMR_WB)) {
		sprintf(sdpLine, "a=fmtp:%d octet-align=1\n", rtp->packetizer->PayloadType);
	}
	/*Text*/
	else if (rtp->packetizer->rtp_payt == GF_RTP_PAYT_3GPP_TEXT) {
		gf_media_format_ttxt_sdp(rtp->packetizer, payloadName, sdpLine, isofile, isotrack);
		strcat(sdpLine, "\n");
	}
	/*EVRC/SMV in non header-free mode*/
	else if ((rtp->packetizer->rtp_payt == GF_RTP_PAYT_EVRC_SMV) && (rtp->packetizer->auh_size>1)) {
		sprintf(sdpLine, "a=fmtp:%d maxptime=%d\n", rtp->packetizer->PayloadType, rtp->packetizer->auh_size*20);
	}
	/*H264/AVC*/
	else if ((rtp->packetizer->rtp_payt == GF_RTP_PAYT_H264_AVC) || (rtp->packetizer->rtp_payt == GF_RTP_PAYT_H264_SVC)) {
		GF_AVCConfig *avcc = dsi ? gf_odf_avc_cfg_read(dsi, dsi_len) : NULL;

		if (avcc) {
			sprintf(sdpLine, "a=fmtp:%d profile-level-id=%02X%02X%02X; packetization-mode=1", rtp->packetizer->PayloadType, avcc->AVCProfileIndication, avcc->profile_compatibility, avcc->AVCLevelIndication);
			if (gf_list_count(avcc->pictureParameterSets) || gf_list_count(avcc->sequenceParameterSets)) {
				u32 i, count, b64s;
				char b64[200];
				strcat(sdpLine, "; sprop-parameter-sets=");
				count = gf_list_count(avcc->sequenceParameterSets);
				for (i=0; i<count; i++) {
					GF_AVCConfigSlot *sl = (GF_AVCConfigSlot *)gf_list_get(avcc->sequenceParameterSets, i);
					b64s = gf_base64_encode(sl->data, sl->size, b64, 200);
					b64[b64s]=0;
					strcat(sdpLine, b64);
					if (i+1<count) strcat(sdpLine, ",");
				}
				if (i) strcat(sdpLine, ",");
				count = gf_list_count(avcc->pictureParameterSets);
				for (i=0; i<count; i++) {
					GF_AVCConfigSlot *sl = (GF_AVCConfigSlot *)gf_list_get(avcc->pictureParameterSets, i);
					b64s = gf_base64_encode(sl->data, sl->size, b64, 200);
					b64[b64s]=0;
					strcat(sdpLine, b64);
					if (i+1<count) strcat(sdpLine, ",");
				}
			}
			gf_odf_avc_cfg_del(avcc);
			strcat(sdpLine, "\n");
		}
	}
	else if (rtp->packetizer->rtp_payt == GF_RTP_PAYT_HEVC) {
#ifndef GPAC_DISABLE_HEVC
		GF_HEVCConfig *hevcc = dsi ? gf_odf_hevc_cfg_read(dsi, dsi_len, 0) : NULL;
		if (hevcc) {
			u32 count, i, j, b64s;
			char b64[200];
			sprintf(sdpLine, "a=fmtp:%d", rtp->packetizer->PayloadType);
			count = gf_list_count(hevcc->param_array);
			for (i = 0; i < count; i++) {
				GF_HEVCParamArray *ar = (GF_HEVCParamArray *)gf_list_get(hevcc->param_array, i);
				if (ar->type==GF_HEVC_NALU_SEQ_PARAM) {
					strcat(sdpLine, "; sprop-sps=");						
				} else if (ar->type==GF_HEVC_NALU_PIC_PARAM) {
					strcat(sdpLine, "; sprop-pps=");
				} else if (ar->type==GF_HEVC_NALU_VID_PARAM) {
					strcat(sdpLine, "; sprop-vps=");
				}
				for (j = 0; j < gf_list_count(ar->nalus); j++) {
					GF_AVCConfigSlot *sl = (GF_AVCConfigSlot *)gf_list_get(ar->nalus, j);
					b64s = gf_base64_encode(sl->data, sl->size, b64, 200);
					b64[b64s]=0;
					if (j) strcat(sdpLine, ", ");
					strcat(sdpLine, b64);
				}
			}
			gf_odf_hevc_cfg_del(hevcc);
			strcat(sdpLine, "\n");
		}
#endif
	}
	/*MPEG-4 decoder config*/
	else if (rtp->packetizer->rtp_payt==GF_RTP_PAYT_MPEG4) {
		gf_rtp_builder_format_sdp(rtp->packetizer, payloadName, sdpLine, dsi, dsi_len);
		strcat(sdpLine, "\n");

		if (rtp->packetizer->slMap.IV_length && KMS_URI) {
			if (!strnicmp(KMS_URI, "(key)", 5) || !strnicmp(KMS_URI, "(ipmp)", 6) || !strnicmp(KMS_URI, "(uri)", 5)) {
				strcat(sdpLine, "; ISMACrypKey=");
			} else {
				strcat(sdpLine, "; ISMACrypKey=(uri)");
			}
			strcat(sdpLine, KMS_URI);
			strcat(sdpLine, "\n");
		}
	}
    /*DIMS decoder config*/
    else if (rtp->packetizer->rtp_payt==GF_RTP_PAYT_3GPP_DIMS) {
        sprintf(sdpLine, "a=fmtp:%d Version-profile=%d", rtp->packetizer->PayloadType, 10);
        if (rtp->packetizer->flags & GP_RTP_DIMS_COMPRESSED) {
            strcat(sdpLine, ";content-coding=deflate");
        }
		strcat(sdpLine, "\n");
    }
	/*MPEG-4 Audio LATM*/
	else if (rtp->packetizer->rtp_payt==GF_RTP_PAYT_LATM) { 
		GF_BitStream *bs; 
		char *config_bytes; 
		u32 config_size; 

		/* form config string */ 
		bs = gf_bs_new(NULL, 32, GF_BITSTREAM_WRITE); 
		gf_bs_write_int(bs, 0, 1); /* AudioMuxVersion */ 
		gf_bs_write_int(bs, 1, 1); /* all streams same time */ 
		gf_bs_write_int(bs, 0, 6); /* numSubFrames */ 
		gf_bs_write_int(bs, 0, 4); /* numPrograms */ 
		gf_bs_write_int(bs, 0, 3); /* numLayer */ 

		/* audio-specific config  - PacketVideo patch: don't signal SBR and PS stuff, not allowed in LATM with audioMuxVersion=0*/
		if (dsi) gf_bs_write_data(bs, dsi, MIN(dsi_len, 2) ); 

		/* other data */ 
		gf_bs_write_int(bs, 0, 3); /* frameLengthType */ 
		gf_bs_write_int(bs, 0xff, 8); /* latmBufferFullness */ 
		gf_bs_write_int(bs, 0, 1); /* otherDataPresent */ 
		gf_bs_write_int(bs, 0, 1); /* crcCheckPresent */ 
		gf_bs_get_content(bs, &config_bytes, &config_size); 
		gf_bs_del(bs); 

		gf_rtp_builder_format_sdp(rtp->packetizer, payloadName, sdpLine, config_bytes, config_size); 
		gf_free(config_bytes); 
		strcat(sdpLine, "\n");
	}

	strcat(sdp, sdpLine);

	size = (u32) strlen(sdp) + (*out_sdp_buffer ? (u32) strlen(*out_sdp_buffer) : 0) + 1;
	if ( !*out_sdp_buffer) {
		*out_sdp_buffer = gf_malloc(sizeof(char)*size);
		if (! *out_sdp_buffer) return GF_OUT_OF_MEM;
		strcpy(*out_sdp_buffer, sdp);
	} else {
		*out_sdp_buffer = gf_realloc(*out_sdp_buffer, sizeof(char)*size);
		if (! *out_sdp_buffer) return GF_OUT_OF_MEM;
		strcat(*out_sdp_buffer, sdp);
	}
	return GF_OK;
} 
Пример #2
0
static GF_Err RP_ServiceCommand(GF_InputService *plug, GF_NetworkCommand *com)
{
	RTPStream *ch;
	RTPClient *priv = (RTPClient *)plug->priv;


	if (com->command_type==GF_NET_SERVICE_HAS_AUDIO) {
		u32 i;
		for (i=0; i<gf_list_count(priv->channels); i++) {
			ch = gf_list_get(priv->channels, i);
			if (ch->depacketizer->sl_map.StreamType==GF_STREAM_AUDIO)
				return GF_OK;
		}
		return GF_NOT_SUPPORTED;
	}
	if (com->command_type==GF_NET_SERVICE_MIGRATION_INFO) {
		RP_SaveSessionState(priv);
		priv->session_migration=1;
		if (priv->session_state_data) {
			com->migrate.data = priv->session_state_data;
			com->migrate.data_len = strlen(priv->session_state_data);
			return GF_OK;
		}
		return GF_NOT_SUPPORTED;
	}

	/*ignore commands other than channels one*/
	if (!com->base.on_channel) {
		if (com->command_type==GF_NET_IS_CACHABLE) return GF_OK;
		return GF_NOT_SUPPORTED;
	}

	ch = RP_FindChannel(priv, com->base.on_channel, 0, NULL, 0);
	if (!ch) return GF_STREAM_NOT_FOUND;

	switch (com->command_type) {
	case GF_NET_CHAN_SET_PULL:
		if (ch->rtp_ch || ch->rtsp || !ch->control) return GF_NOT_SUPPORTED;
		/*embedded channels work in pull mode*/
		if (strstr(ch->control, "data:application/")) return GF_OK;
		return GF_NOT_SUPPORTED;
	case GF_NET_CHAN_INTERACTIVE:
		/*looks like pure RTP / multicast etc, not interactive*/
		if (!ch->control) return GF_NOT_SUPPORTED;
		/*emulated broadcast mode*/
		else if (ch->flags & RTP_FORCE_BROADCAST) return GF_NOT_SUPPORTED;
		/*regular rtsp mode*/
		else if (ch->flags & RTP_HAS_RANGE) return GF_OK;
		/*embedded data*/
		else if (strstr(ch->control, "application")) return GF_OK;
		return GF_NOT_SUPPORTED;
	case GF_NET_CHAN_BUFFER:
		if (!(ch->rtp_ch || ch->rtsp || !ch->control)) {
			com->buffer.max = com->buffer.min = 0;
		} else {
			const char *opt;
			/*amount of buffering in ms*/
			opt = gf_modules_get_option((GF_BaseInterface *)plug, "Network", "BufferLength");
			com->buffer.max = opt ? atoi(opt) : 1000;
			/*rebuffer low limit in ms - if the amount of buffering is less than this, rebuffering will never occur*/
			opt = gf_modules_get_option((GF_BaseInterface *)plug, "Network", "RebufferLength");
			if (opt) com->buffer.min = atoi(opt);
			else com->buffer.min = 500;
			if (com->buffer.min >= com->buffer.max ) com->buffer.min = 0;
		}
		return GF_OK;
	case GF_NET_CHAN_DURATION:
		com->duration.duration = (ch->flags & RTP_HAS_RANGE) ? (ch->range_end - ch->range_start) : 0;
		return GF_OK;
	/*RTP channel config is done upon connection, once the complete SL mapping is known
	however we must store some info not carried in SDP*/
	case GF_NET_CHAN_CONFIG:
		if (com->cfg.frame_duration) ch->depacketizer->sl_hdr.au_duration = com->cfg.frame_duration;
		ch->ts_res = com->cfg.sl_config.timestampResolution;
		return GF_OK;

	case GF_NET_CHAN_PLAY:
		GF_LOG(GF_LOG_DEBUG, GF_LOG_RTP, ("[RTP] Processing play on channel @%08x - %s\n", ch, ch->rtsp ? "RTSP control" : "No control (RTP)" ));
		/*is this RTSP or direct RTP?*/
		ch->flags &= ~RTP_EOS;
		if (ch->rtsp) {
			if (ch->status==RTP_SessionResume) {
				const char *opt = gf_modules_get_option((GF_BaseInterface *) plug, "Streaming", "SessionMigrationPause");
				if (opt && !strcmp(opt, "yes")) {
					ch->status = RTP_Connected;
					com->play.start_range = ch->current_start;
				} else {
					ch->status = RTP_Running;
					return GF_OK;
				}
			}
			RP_UserCommand(ch->rtsp, ch, com);
		} else {
			ch->status = RTP_Running;
			if (ch->rtp_ch) {
				/*technically we shouldn't attempt to synchronize streams based on RTP, we should use RTCP/ However it
				may happen that the RTCP traffic is absent ...*/
				ch->check_rtp_time = RTP_SET_TIME_RTP;
				ch->rtcp_init = 0;
				gf_mx_p(priv->mx);
				RP_InitStream(ch, (ch->flags & RTP_CONNECTED) ? 1 : 0);
				gf_mx_v(priv->mx);
				gf_rtp_set_info_rtp(ch->rtp_ch, 0, 0, 0);
			} else {
				/*direct channel, store current start*/
				ch->current_start = com->play.start_range;
				ch->flags |= GF_RTP_NEW_AU;
				gf_rtp_depacketizer_reset(ch->depacketizer, 0);
			}
		}
		return GF_OK;
	case GF_NET_CHAN_STOP:
		/*is this RTSP or direct RTP?*/
		if (ch->rtsp) {
			if (! ch->owner->session_migration) {
				RP_UserCommand(ch->rtsp, ch, com);
			}
		} else {
			ch->status = RTP_Connected;
			ch->owner->last_ntp = 0;
		}
		ch->rtcp_init = 0;
		return GF_OK;
	case GF_NET_CHAN_SET_SPEED:
	case GF_NET_CHAN_PAUSE:
	case GF_NET_CHAN_RESUME:
		assert(ch->rtsp);
		RP_UserCommand(ch->rtsp, ch, com);
		return GF_OK;

	case GF_NET_CHAN_GET_DSI:
		if (ch->depacketizer && ch->depacketizer->sl_map.configSize) {
			com->get_dsi.dsi_len = ch->depacketizer->sl_map.configSize;
			com->get_dsi.dsi = (char*)gf_malloc(sizeof(char)*com->get_dsi.dsi_len);
			memcpy(com->get_dsi.dsi, ch->depacketizer->sl_map.config, sizeof(char)*com->get_dsi.dsi_len);
		} else {
			com->get_dsi.dsi = NULL;
			com->get_dsi.dsi_len = 0;
		}
		return GF_OK;


	case GF_NET_GET_STATS:
		memset(&com->net_stats, 0, sizeof(GF_NetComStats));
		if (ch->rtp_ch) {
			u32 time;
			Float bps;
			com->net_stats.pck_loss_percentage = gf_rtp_get_loss(ch->rtp_ch);
			if (ch->flags & RTP_INTERLEAVED) {
				com->net_stats.multiplex_port = gf_rtsp_get_session_port(ch->rtsp->session);
				com->net_stats.port = gf_rtp_get_low_interleave_id(ch->rtp_ch);
				com->net_stats.ctrl_port = gf_rtp_get_hight_interleave_id(ch->rtp_ch);
			} else {
				com->net_stats.multiplex_port = 0;
				gf_rtp_get_ports(ch->rtp_ch, &com->net_stats.port, &com->net_stats.ctrl_port);
			}
			if (ch->stat_stop_time) {
				time = ch->stat_stop_time - ch->stat_start_time;
			} else {
				time = gf_sys_clock() - ch->stat_start_time;
			}
			bps = 8.0f * ch->rtp_bytes; bps *= 1000; bps /= time; com->net_stats.bw_down = (u32) bps;
			bps = 8.0f * ch->rtcp_bytes; bps *= 1000; bps /= time; com->net_stats.ctrl_bw_down = (u32) bps;
			bps = 8.0f * gf_rtp_get_tcp_bytes_sent(ch->rtp_ch); bps *= 1000; bps /= time; com->net_stats.ctrl_bw_up = (u32) bps;
		}
		return GF_OK;
	}
	return GF_NOT_SUPPORTED;
}