Ejemplo n.º 1
0
void linphone_call_init_media_streams(LinphoneCall *call){
	LinphoneCore *lc=call->core;
	SalMediaDescription *md=call->localdesc;
	AudioStream *audiostream;
	
	call->audiostream=audiostream=audio_stream_new(md->streams[0].port,linphone_core_ipv6_enabled(lc));
	if (linphone_core_echo_limiter_enabled(lc)){
		const char *type=lp_config_get_string(lc->config,"sound","el_type","mic");
		if (strcasecmp(type,"mic")==0)
			audio_stream_enable_echo_limiter(audiostream,ELControlMic);
		else if (strcasecmp(type,"full")==0)
			audio_stream_enable_echo_limiter(audiostream,ELControlFull);
	}
	audio_stream_enable_gain_control(audiostream,TRUE);
	if (linphone_core_echo_cancellation_enabled(lc)){
		int len,delay,framesize;
		const char *statestr=lp_config_get_string(lc->config,"sound","ec_state",NULL);
		len=lp_config_get_int(lc->config,"sound","ec_tail_len",0);
		delay=lp_config_get_int(lc->config,"sound","ec_delay",0);
		framesize=lp_config_get_int(lc->config,"sound","ec_framesize",0);
		audio_stream_set_echo_canceller_params(audiostream,len,delay,framesize);
		if (statestr && audiostream->ec){
			ms_filter_call_method(audiostream->ec,MS_ECHO_CANCELLER_SET_STATE_STRING,(void*)statestr);
		}
	}
	audio_stream_enable_automatic_gain_control(audiostream,linphone_core_agc_enabled(lc));
	{
		int enabled=lp_config_get_int(lc->config,"sound","noisegate",0);
		audio_stream_enable_noise_gate(audiostream,enabled);
	}
	
	if (lc->a_rtp)
		rtp_session_set_transports(audiostream->session,lc->a_rtp,lc->a_rtcp);

	call->audiostream_app_evq = ortp_ev_queue_new();
	rtp_session_register_event_queue(audiostream->session,call->audiostream_app_evq);

#ifdef VIDEO_ENABLED

	if ((lc->video_conf.display || lc->video_conf.capture) && md->streams[1].port>0){
		call->videostream=video_stream_new(md->streams[1].port,linphone_core_ipv6_enabled(lc));
	if( lc->video_conf.displaytype != NULL)
		video_stream_set_display_filter_name(call->videostream,lc->video_conf.displaytype);
	video_stream_set_event_callback(call->videostream,video_stream_event_cb, call);
	if (lc->v_rtp)
		rtp_session_set_transports(call->videostream->session,lc->v_rtp,lc->v_rtcp);
	call->videostream_app_evq = ortp_ev_queue_new();
	rtp_session_register_event_queue(call->videostream->session,call->videostream_app_evq);
#ifdef TEST_EXT_RENDERER
		video_stream_set_render_callback(call->videostream,rendercb,NULL);
#endif
	}
#else
	call->videostream=NULL;
#endif
}
Ejemplo n.º 2
0
AudioStream *audio_stream_new(int loc_rtp_port, int loc_rtcp_port, bool_t ipv6){
	AudioStream *stream=(AudioStream *)ms_new0(AudioStream,1);
	MSFilterDesc *ec_desc=ms_filter_lookup_by_name("MSOslec");
	
	ms_filter_enable_statistics(TRUE);
	ms_filter_reset_statistics();

	stream->ms.type = AudioStreamType;
	stream->ms.session=create_duplex_rtpsession(loc_rtp_port,loc_rtcp_port,ipv6);
	/*some filters are created right now to allow configuration by the application before start() */
	stream->ms.rtpsend=ms_filter_new(MS_RTP_SEND_ID);
	stream->ms.ice_check_list=NULL;
	stream->ms.qi=ms_quality_indicator_new(stream->ms.session);

	if (ec_desc!=NULL)
		stream->ec=ms_filter_new_from_desc(ec_desc);
	else
#if defined(BUILD_WEBRTC_AECM)
		stream->ec=ms_filter_new(MS_WEBRTC_AEC_ID);
#else
		stream->ec=ms_filter_new(MS_SPEEX_EC_ID);
#endif

	stream->ms.evq=ortp_ev_queue_new();
	rtp_session_register_event_queue(stream->ms.session,stream->ms.evq);
	stream->play_dtmfs=TRUE;
	stream->use_gc=FALSE;
	stream->use_agc=FALSE;
	stream->use_ng=FALSE;
	stream->features=AUDIO_STREAM_FEATURE_ALL;
	return stream;
}
static void loss_rate_estimation() {
	bool_t supported = ms_filter_codec_supported("pcma");
	if( supported ) {
		LossRateEstimatorCtx ctx;
		stream_manager_t * marielle, * margaux;
		int loss_rate = 15;

		start_adaptive_stream(MSAudio, &marielle, &margaux, PCMA8_PAYLOAD_TYPE, 8000, 0, loss_rate, 0, 0);
		ctx.estimator=ortp_loss_rate_estimator_new(120, 2500, marielle->audio_stream->ms.sessions.rtp_session);
		ctx.q = ortp_ev_queue_new();
		rtp_session_register_event_queue(marielle->audio_stream->ms.sessions.rtp_session, ctx.q);
		ctx.loss_rate = loss_rate;

		/*loss rate should be the initial one*/
		wait_for_until_with_parse_events(&marielle->audio_stream->ms, &margaux->audio_stream->ms, &loss_rate, 100, 10000, event_queue_cb,&ctx,NULL,NULL);

		/*let's set some duplication. loss rate should NOT be changed */
		rtp_session_set_duplication_ratio(marielle->audio_stream->ms.sessions.rtp_session, 10);
		wait_for_until_with_parse_events(&marielle->audio_stream->ms, &margaux->audio_stream->ms, &loss_rate, 100, 10000, event_queue_cb,&ctx,NULL,NULL);

		stop_adaptive_stream(marielle,margaux);
		ortp_loss_rate_estimator_destroy(ctx.estimator);
		ortp_ev_queue_destroy(ctx.q);
	}
}
Ejemplo n.º 4
0
VideoStream *video_stream_new(int locport, bool_t use_ipv6){
	VideoStream *stream = (VideoStream *)ms_new0 (VideoStream, 1);
	stream->session=create_duplex_rtpsession(locport,use_ipv6);
	stream->evq=ortp_ev_queue_new();
	stream->rtpsend=ms_filter_new(MS_RTP_SEND_ID);
	rtp_session_register_event_queue(stream->session,stream->evq);
	return stream;
}
static void create_text_stream(text_stream_tester_t *tst, int payload_type) {
	tst->ts = text_stream_new2(tst->local_ip, tst->local_rtp, tst->local_rtcp);
	tst->local_rtp = rtp_session_get_local_port(tst->ts->ms.sessions.rtp_session);
	tst->local_rtcp = rtp_session_get_local_rtcp_port(tst->ts->ms.sessions.rtp_session);
	reset_stats(&tst->stats);
	rtp_session_set_multicast_loopback(tst->ts->ms.sessions.rtp_session, TRUE);
	tst->stats.q = ortp_ev_queue_new();
	rtp_session_register_event_queue(tst->ts->ms.sessions.rtp_session, tst->stats.q);
	tst->payload_type = payload_type;
}
Ejemplo n.º 6
0
VideoStream *video_stream_new(int locport, bool_t use_ipv6){
	VideoStream *stream = (VideoStream *)ms_new0 (VideoStream, 1);
	stream->session=create_duplex_rtpsession(locport,use_ipv6);
	stream->evq=ortp_ev_queue_new();
	stream->rtpsend=ms_filter_new(MS_RTP_SEND_ID);
	rtp_session_register_event_queue(stream->session,stream->evq);
	stream->sent_vsize.width=MS_VIDEO_SIZE_CIF_W;
	stream->sent_vsize.height=MS_VIDEO_SIZE_CIF_H;
	stream->dir=VideoStreamSendRecv;
	choose_display_name(stream);

	return stream;
}
Ejemplo n.º 7
0
AudioStream *audio_stream_new(int locport, bool_t ipv6){
	AudioStream *stream=(AudioStream *)ms_new0(AudioStream,1);
	stream->session=create_duplex_rtpsession(locport,ipv6);
	stream->rtpsend=ms_filter_new(MS_RTP_SEND_ID);
	stream->evq=ortp_ev_queue_new();
	rtp_session_register_event_queue(stream->session,stream->evq);
	stream->play_dtmfs=TRUE;
#ifndef ENABLED_MCU_MEDIA_SERVER
	stream->use_gc=FALSE;
	stream->use_agc=FALSE;
	stream->use_ng=FALSE;
#endif // ENABLED_MCU_MEDIA_SERVER

	return stream;
}
Ejemplo n.º 8
0
void media_stream_init(MediaStream *stream, MSFactory *factory, const MSMediaStreamSessions *sessions) {
	stream->sessions = *sessions;
	
	stream->evd = ortp_ev_dispatcher_new(stream->sessions.rtp_session);
	stream->evq = ortp_ev_queue_new();
	stream->factory = factory; /*the factory is used later to instanciate everything in mediastreamer2.*/
	rtp_session_register_event_queue(stream->sessions.rtp_session, stream->evq);
	
	/*we give to the zrtp and dtls sessions a backpointer to all the stream sessions*/
	if (sessions->zrtp_context != NULL) {
		ms_zrtp_set_stream_sessions(sessions->zrtp_context, &stream->sessions);
	}
	if (sessions->dtls_context != NULL) {
		ms_dtls_srtp_set_stream_sessions(sessions->dtls_context, &stream->sessions);
	}
}
Ejemplo n.º 9
0
AudioStream *audio_stream_new(int locport, bool_t ipv6){
	AudioStream *stream=(AudioStream *)ms_new0(AudioStream,1);
	stream->session=create_duplex_rtpsession(locport,ipv6);
	stream->rtpsend=ms_filter_new(MS_RTP_SEND_ID);
	stream->play_dtmfs=TRUE;
	stream->use_gc=FALSE;
	stream->use_agc=FALSE;
	stream->use_ng=FALSE;
	stream->use_nr=FALSE;
	stream->record_enabled=FALSE;
	stream->quality_cb = NULL;
	stream->userdata = NULL;
	stream->evq=ortp_ev_queue_new();
	rtp_session_register_event_queue(stream->session,stream->evq);
	return stream;
}
Ejemplo n.º 10
0
VideoStream *video_stream_new(int loc_rtp_port, int loc_rtcp_port, bool_t use_ipv6){
	VideoStream *stream = (VideoStream *)ms_new0 (VideoStream, 1);
	stream->ms.type = VideoStreamType;
	stream->ms.session=create_duplex_rtpsession(loc_rtp_port,loc_rtcp_port,use_ipv6);
	stream->ms.qi=ms_quality_indicator_new(stream->ms.session);
	stream->ms.evq=ortp_ev_queue_new();
	stream->ms.rtpsend=ms_filter_new(MS_RTP_SEND_ID);
	stream->ms.ice_check_list=NULL;
	rtp_session_register_event_queue(stream->ms.session,stream->ms.evq);
	MS_VIDEO_SIZE_ASSIGN(stream->sent_vsize, CIF);
	stream->dir=VideoStreamSendRecv;
	stream->display_filter_auto_rotate_enabled=0;
	stream->source_performs_encoding = FALSE;
	stream->output_performs_decoding = FALSE;
	choose_display_name(stream);

	return stream;
}
static void create_video_stream(video_stream_tester_t *vst, int payload_type) {
	vst->vs = video_stream_new2(vst->local_ip, vst->local_rtp, vst->local_rtcp);
	vst->vs->staticimage_webcam_fps_optimization = FALSE;
	vst->local_rtp = rtp_session_get_local_port(vst->vs->ms.sessions.rtp_session);
	vst->local_rtcp = rtp_session_get_local_rtcp_port(vst->vs->ms.sessions.rtp_session);
	reset_stats(&vst->stats);
	rtp_session_set_multicast_loopback(vst->vs->ms.sessions.rtp_session, TRUE);
	vst->stats.q = ortp_ev_queue_new();
	rtp_session_register_event_queue(vst->vs->ms.sessions.rtp_session, vst->stats.q);
	video_stream_set_event_callback(vst->vs, video_stream_event_cb, vst);
	if (vst->vconf) {
		PayloadType *pt = rtp_profile_get_payload(&rtp_profile, payload_type);
		CU_ASSERT_PTR_NOT_NULL_FATAL(pt);
		pt->normal_bitrate = vst->vconf->required_bitrate;
		video_stream_set_fps(vst->vs, vst->vconf->fps);
		video_stream_set_sent_video_size(vst->vs, vst->vconf->vsize);
	}
	vst->payload_type = payload_type;
}
Ejemplo n.º 12
0
void VodWnd::vod(const char *ip, int rtp_port, int rtcp_port)
{
    server_ip_ = ip;
    server_rtp_port_ = rtp_port;
    server_rtcp_port_ = rtcp_port;

    rtp_ = rtp_session_new(RTP_SESSION_RECVONLY);
    rtp_session_set_payload_type(rtp_, 100);
    rtp_session_set_local_addr(rtp_, util_get_myip(), 0, 0);
    rtp_session_set_remote_addr_and_port(rtp_, ip, rtp_port, rtcp_port);

    JBParameters jb;
    jb.adaptive = 1;
    jb.max_packets = 3000;
    jb.max_size = -1;
    jb.min_size = jb.nom_size = 300;
    rtp_session_set_jitter_buffer_params(rtp_, &jb);

    rtp_session_enable_jitter_buffer(rtp_, 0);

    evq_ = ortp_ev_queue_new();
    rtp_session_register_event_queue(rtp_, evq_);

    ticker_ = ms_ticker_new();

    filter_rtp_ = ms_filter_new(MS_RTP_RECV_ID);
    ms_filter_call_method(filter_rtp_, MS_RTP_RECV_SET_SESSION, rtp_);

    filter_decoder_ = ms_filter_new(MS_H264_DEC_ID);

    ZonekeyYUVSinkCallbackParam cbp;
    cbp.ctx = this;
    cbp.push = cb_yuv;
    filter_sink_ = ms_filter_new_from_name("ZonekeyYUVSink");
    ms_filter_call_method(filter_sink_, ZONEKEY_METHOD_YUV_SINK_SET_CALLBACK_PARAM, &cbp);

    ms_filter_link(filter_rtp_, 0, filter_decoder_, 0);
    ms_filter_link(filter_decoder_, 0, filter_sink_, 0);

    ms_ticker_attach(ticker_, filter_rtp_);
}
static stream_manager_t * stream_manager_new(StreamType type) {
	stream_manager_t * mgr = ms_new0(stream_manager_t,1);
	mgr->type=type;
	mgr->local_rtp=(rand() % ((2^16)-1024) + 1024) & ~0x1;
	mgr->local_rtcp=mgr->local_rtp+1;

	mgr->evq=ortp_ev_queue_new();

	if (mgr->type==AudioStreamType){
		mgr->audio_stream=audio_stream_new (mgr->local_rtp, mgr->local_rtcp,FALSE);
		rtp_session_register_event_queue(mgr->audio_stream->ms.sessions.rtp_session,mgr->evq);
	}else{
#if VIDEO_ENABLED
		mgr->video_stream=video_stream_new (mgr->local_rtp, mgr->local_rtcp,FALSE);
		rtp_session_register_event_queue(mgr->video_stream->ms.sessions.rtp_session,mgr->evq);
#else
		ms_fatal("Unsupported stream type [%s]",ms_stream_type_to_string(mgr->type));
#endif

	}
	return mgr;
}
Ejemplo n.º 14
0
static void run_media_streams(int localport, const char *remote_ip, int remoteport, int payload, const char *fmtp,
          int jitter, int bitrate, MSVideoSize vs, bool_t ec, bool_t agc, bool_t eq)
{
	AudioStream *audio=NULL;
#ifdef VIDEO_ENABLED
	VideoStream *video=NULL;
#endif
	RtpSession *session=NULL;
	PayloadType *pt;
	RtpProfile *profile=rtp_profile_clone_full(&av_profile);
	OrtpEvQueue *q=ortp_ev_queue_new();	

	ms_init();
	signal(SIGINT,stop_handler);
	pt=rtp_profile_get_payload(profile,payload);
	if (pt==NULL){
		printf("Error: no payload defined with number %i.",payload);
		exit(-1);
	}
	if (fmtp!=NULL) payload_type_set_send_fmtp(pt,fmtp);
	if (bitrate>0) pt->normal_bitrate=bitrate;

	if (pt->type!=PAYLOAD_VIDEO){
		MSSndCardManager *manager=ms_snd_card_manager_get();
		MSSndCard *capt= capture_card==NULL ? ms_snd_card_manager_get_default_capture_card(manager) :
				ms_snd_card_manager_get_card(manager,capture_card);
		MSSndCard *play= playback_card==NULL ? ms_snd_card_manager_get_default_playback_card(manager) :
				ms_snd_card_manager_get_card(manager,playback_card);
		audio=audio_stream_new(localport,ms_is_ipv6(remote_ip));
		audio_stream_enable_automatic_gain_control(audio,agc);
		audio_stream_enable_noise_gate(audio,use_ng);
		audio_stream_set_echo_canceller_params(audio,ec_len_ms,ec_delay_ms,ec_framesize);
		printf("Starting audio stream.\n");
	
		audio_stream_start_full(audio,profile,remote_ip,remoteport,remoteport+1, payload, jitter,infile,outfile,
		                        outfile==NULL ? play : NULL ,infile==NULL ? capt : NULL,infile!=NULL ? FALSE: ec);
		
		if (audio) {
			if (use_ng && ng_threshold!=-1)
				ms_filter_call_method(audio->volsend,MS_VOLUME_SET_NOISE_GATE_THRESHOLD,&ng_threshold);
			session=audio->session;
		}
	}else{
#ifdef VIDEO_ENABLED
		if (eq){
			ms_fatal("Cannot put an audio equalizer in a video stream !");
			exit(-1);
		}
		printf("Starting video stream.\n");
		video=video_stream_new(localport, ms_is_ipv6(remote_ip));
		video_stream_set_sent_video_size(video,vs);
		video_stream_use_preview_video_window(video,two_windows);
		video_stream_start(video,profile,
					remote_ip,
					remoteport,remoteport+1,
					payload,
					jitter,
					ms_web_cam_manager_get_default_cam(ms_web_cam_manager_get()));
		session=video->session;
#else
		printf("Error: video support not compiled.\n");
#endif
	}
  if (eq || ec){ /*read from stdin interactive commands */
    char commands[128];
    commands[127]='\0';
    ms_sleep(1);  /* ensure following text be printed after ortp messages */
    if (eq)
      printf("\nPlease enter equalizer requests, such as 'eq active 1', 'eq active 0', 'eq 1200 0.1 200'\n");
    if (ec)
      printf("\nPlease enter echo canceller requests: ec reset; ec <delay ms> <tail_length ms'\n");
    while(fgets(commands,sizeof(commands)-1,stdin)!=NULL){
      int active,freq,freq_width;
      int delay_ms, tail_ms;
      float gain;
      if (sscanf(commands,"eq active %i",&active)==1){
        audio_stream_enable_equalizer(audio,active);
        printf("OK\n");
      }else if (sscanf(commands,"eq %i %f %i",&freq,&gain,&freq_width)==3){
        audio_stream_equalizer_set_gain(audio,freq,gain,freq_width);
        printf("OK\n");
      }else if (sscanf(commands,"eq %i %f",&freq,&gain)==2){
        audio_stream_equalizer_set_gain(audio,freq,gain,0);
        printf("OK\n");
      }else if (strstr(commands,"dump")){
        int n=0,i;
        float *t;
        ms_filter_call_method(audio->equalizer,MS_EQUALIZER_GET_NUM_FREQUENCIES,&n);
        t=(float*)alloca(sizeof(float)*n);
        ms_filter_call_method(audio->equalizer,MS_EQUALIZER_DUMP_STATE,t);
        for(i=0;i<n;++i){
          if (fabs(t[i]-1)>0.01){
            printf("%i:%f:0 ",(i*pt->clock_rate)/(2*n),t[i]);
          }
        }
        printf("\nOK\n");
      }else if (sscanf(commands,"ec reset %i",&active)==1){
          //audio_stream_enable_equalizer(audio,active);
          //printf("OK\n");
      }else if (sscanf(commands,"ec active %i",&active)==1){
          //audio_stream_enable_equalizer(audio,active);
          //printf("OK\n");
      }else if (sscanf(commands,"ec %i %i",&delay_ms,&tail_ms)==2){
        audio_stream_set_echo_canceller_params(audio,tail_ms,delay_ms,128);
        // revisit: workaround with old method call to force echo reset
        delay_ms*=8;
        ms_filter_call_method(audio->ec,MS_FILTER_SET_PLAYBACKDELAY,&delay_ms);
        printf("OK\n");
      }else if (strstr(commands,"quit")){
        break;
      }else printf("Cannot understand this.\n");
    }
	}else{  /* no interactive stuff - continuous debug output */
		rtp_session_register_event_queue(session,q);
		while(cond)
		{
			int n;
			for(n=0;n<100;++n){
	#ifdef WIN32
				MSG msg;
				Sleep(10);
				while (PeekMessage(&msg, NULL, 0, 0,1)){
					TranslateMessage(&msg);
					DispatchMessage(&msg);
				}
	#else
				struct timespec ts;
				ts.tv_sec=0;
				ts.tv_nsec=10000000;
				nanosleep(&ts,NULL);
	#endif
	#if defined(VIDEO_ENABLED)
				if (video) video_stream_iterate(video);
	#endif
			}
			ortp_global_stats_display();
			if (session){
				printf("Bandwidth usage: download=%f kbits/sec, upload=%f kbits/sec\n",
					rtp_session_compute_recv_bandwidth(session)*1e-3,
					rtp_session_compute_send_bandwidth(session)*1e-3);
				parse_events(q);
			}
		}
					}
	
	printf("stopping all...\n");
	
	if (audio) audio_stream_stop(audio);
#ifdef VIDEO_ENABLED
	if (video) video_stream_stop(video);
#endif
	ortp_ev_queue_destroy(q);
	rtp_profile_destroy(profile);
}
Ejemplo n.º 15
0
void run_media_streams(int localport,  const char *remote_ip, int remoteport, int payload, const char *fmtp, int jitter, bool_t ec, int bitrate)
{
    AudioStream *audio=NULL;
#ifdef VIDEO_ENABLED
    VideoStream *video=NULL;
#endif
    RtpSession *session=NULL;
    PayloadType *pt;
    RtpProfile *profile=rtp_profile_clone_full(&av_profile);
    OrtpEvQueue *q=ortp_ev_queue_new();

    ms_init();
    signal(SIGINT,stop_handler);
    pt=rtp_profile_get_payload(profile,payload);
    if (pt==NULL) {
        printf("Error: no payload defined with number %i.",payload);
        exit(-1);
    }
    if (fmtp!=NULL) payload_type_set_send_fmtp(pt,fmtp);
    if (bitrate>0) pt->normal_bitrate=bitrate;

    if (pt->type!=PAYLOAD_VIDEO) {
        printf("Starting audio stream.\n");
        audio=audio_stream_start(profile,localport,remote_ip,remoteport,payload,jitter, ec);
        if (audio) session=audio->session;
    } else {
#ifdef VIDEO_ENABLED
        printf("Starting video stream.\n");
        video=video_stream_new(localport, ms_is_ipv6(remote_ip));
        video_stream_start(video,profile,
                           remote_ip,
                           remoteport,
                           payload,
                           jitter,
                           "/dev/video0");
        session=video->session;
#else
        printf("Error: video support not compiled.\n");
#endif
    }
    rtp_session_register_event_queue(session,q);
    while(cond)
    {
        /* sleep until we receive SIGINT */
#ifdef WIN32
        int n;
        MSG msg;
        for(n=0; n<100; ++n) {
            Sleep(10);
            while (PeekMessage(&msg, NULL, 0, 0,1)) {
                TranslateMessage(&msg);
                DispatchMessage(&msg);
            }
        }
#else
        sleep(1);
#endif
        ortp_global_stats_display();
        if (session) {
            printf("Bandwidth usage: download=%f kbits/sec, upload=%f kbits/sec\n",
                   rtp_session_compute_recv_bandwidth(session)*1e-3,
                   rtp_session_compute_send_bandwidth(session)*1e-3);
            parse_events(q);
        }
    }

    printf("stoping all...\n");

    if (audio) audio_stream_stop(audio);
#ifdef VIDEO_ENABLED
    if (video) video_stream_stop(video);
#endif
    ortp_ev_queue_destroy(q);
    rtp_profile_destroy(profile);
}