static void stateful_analyzer_update(MSQosAnalyzer *objbase){ MSStatefulQosAnalyzer *obj=(MSStatefulQosAnalyzer*)objbase; static time_t last_measure; if (last_measure != ms_time(0)){ obj->upload_bandwidth_count++; obj->upload_bandwidth_sum+=rtp_session_get_send_bandwidth(obj->session)/1000.0; } last_measure = ms_time(0); if (obj->burst_duration_ms>0){ switch (obj->burst_state){ case MSStatefulQosAnalyzerBurstEnable:{ obj->burst_state=MSStatefulQosAnalyzerBurstInProgress; ortp_gettimeofday(&obj->start_time, NULL); rtp_session_set_duplication_ratio(obj->session, obj->burst_ratio); obj->start_seq_number=obj->last_seq_number=obj->session->rtp.snd_seq; } case MSStatefulQosAnalyzerBurstInProgress: { struct timeval now; double elapsed; ortp_gettimeofday(&now,NULL); elapsed=((now.tv_sec-obj->start_time.tv_sec)*1000.0) + ((now.tv_usec-obj->start_time.tv_usec)/1000.0); obj->last_seq_number=obj->session->rtp.snd_seq; if (elapsed > obj->burst_duration_ms){ obj->burst_state=MSStatefulQosAnalyzerBurstDisable; rtp_session_set_duplication_ratio(obj->session, 0); } } case MSStatefulQosAnalyzerBurstDisable: { } } } }
// entscheidet ob UFO erstellt oder aktualisert wird void ufo(Game *g) { // 1:1 Chance nach MIN_UFO_PAUSE if (!g->enemyContainer.ufo.alive && (MIN_UFO_PAUSE + g->enemyContainer.ufo.lastufo) < ms_time() && (rand() % 2)) { g->enemyContainer.ufo.lastufo = ms_time(); g->enemyContainer.ufo.alive = true; g->enemyContainer.ufo.posx = -UFO_WIDTH; SDL_Rect rect = {g->enemyContainer.ufo.posx, BORDER_TOP + 5, UFO_WIDTH, UFO_HEIGHT}; drawSprite(g->screen, &rect, "ufo.bmp"); } else if (g->enemyContainer.ufo.alive) { // UFO lebt noch, Position Aktualisieren SDL_Rect rect = {g->enemyContainer.ufo.posx, BORDER_TOP + 5, UFO_WIDTH, UFO_HEIGHT}; SDL_FillRect(g->screen, &rect, SDL_MapRGB(g->screen->format, 0, 0, 0)); g->enemyContainer.ufo.posx += UFO_SPEED; rect.x = g->enemyContainer.ufo.posx; if (g->enemyContainer.ufo.posx < WIDTH) { // Zeichnen drawSprite(g->screen, &rect, "ufo.bmp"); } else { // Außerhalb g->enemyContainer.ufo.lastufo = ms_time(); g->enemyContainer.ufo.alive = false; } } else if ((MIN_UFO_PAUSE + g->enemyContainer.ufo.lastufo) < ms_time()) { // UFO Start verschieben g->enemyContainer.ufo.lastufo += ((rand() % 11) * 1000); } }
bool_t audio_stream_alive(AudioStream * stream, int timeout){ RtpSession *session=stream->session; const rtp_stats_t *stats=rtp_session_get_stats(session); if (stats->recv!=0){ if (stream->evq){ OrtpEvent *ev=ortp_ev_queue_get(stream->evq); if (ev!=NULL){ if (ortp_event_get_type(ev)==ORTP_EVENT_RTCP_PACKET_RECEIVED){ stream->last_packet_time=ms_time(NULL); } ortp_event_destroy(ev); } } if (stats->recv!=stream->last_packet_count){ stream->last_packet_count=stats->recv; stream->last_packet_time=ms_time(NULL); }else{ if (ms_time(NULL)-stream->last_packet_time>timeout){ /* more than timeout seconds of inactivity*/ return FALSE; } } } return TRUE; }
static void wait_a_bit(LinphoneCore *lc, int seconds){ time_t orig=ms_time(NULL); while(ms_time(NULL)-orig<seconds){ /* we need to call iterate to receive notifications */ linphone_core_iterate(lc); ms_usleep (50000); } }
bool_t media_stream_alive(MediaStream *ms, int timeout){ const rtp_stats_t *stats=rtp_session_get_stats(ms->sessions.rtp_session); if (stats->recv!=0){ if (stats->recv!=ms->last_packet_count){ ms->last_packet_count=stats->recv; ms->last_packet_time=ms_time(NULL); } } if (ms_time(NULL)-ms->last_packet_time>timeout){ /* more than timeout seconds of inactivity*/ return FALSE; } return TRUE; }
static void capture_avi (GtkButton * CapAVIButt, GtkWidget * AVIFNameEntry) { char *filename = gtk_entry_get_text(GTK_ENTRY(AVIFNameEntry)); char *compression="YUY2"; switch (AVIFormat) { case 1: compression="YUY2"; break; case 2: compression="DIB "; break; default: compression="YUY2"; } if(videoIn->capAVI) { printf("stoping AVI capture\n"); gtk_button_set_label(CapAVIButt,"Capture"); AVIstoptime = ms_time(); printf("AVI stop time:%d\n",AVIstoptime); videoIn->capAVI = FALSE; aviClose(); } else { if (!(videoIn->signalquit)) { /*thread exited while in AVI capture mode*/ /* we have to close AVI */ printf("close AVI since thread as exited\n"); gtk_button_set_label(CapAVIButt,"Capture"); printf("AVI stop time:%d\n",AVIstoptime); aviClose(); } else { /* thread is running so start AVI capture*/ printf("starting AVI capture to %s\n",filename); gtk_button_set_label(CapAVIButt,"Stop"); AviOut = AVI_open_output_file(filename); /*4CC compression "YUY2" (YUV) or "DIB " (RGB24) or whathever*/ AVI_set_video(AviOut, videoIn->width, videoIn->height, videoIn->fps,compression); videoIn->AVIFName = filename; AVIstarttime = ms_time(); printf("AVI start time:%d\n",AVIstarttime); videoIn->capAVI = TRUE; } } }
bool_t audio_stream_alive(AudioStream * stream, int timeout){ const rtp_stats_t *stats=rtp_session_get_stats(stream->ms.session); if (stats->recv!=0){ if (stats->recv!=stream->last_packet_count){ stream->last_packet_count=stats->recv; stream->last_packet_time=ms_time(NULL); } } if (stats->recv!=0){ if (ms_time(NULL)-stream->last_packet_time>timeout){ /* more than timeout seconds of inactivity*/ return FALSE; } } return TRUE; }
void media_stream_iterate(MediaStream *stream){ time_t curtime=ms_time(NULL); if (stream->ice_check_list) ice_check_list_process(stream->ice_check_list,stream->sessions.rtp_session); /*we choose to update the quality indicator as much as possible, since local statistics can be computed realtime. */ if (stream->state==MSStreamStarted){ if (stream->qi && curtime>stream->last_iterate_time) ms_quality_indicator_update_local(stream->qi); } stream->last_iterate_time=curtime; if (stream->rc) ms_bitrate_controller_update(stream->rc); if (stream->evq){ OrtpEvent *ev=NULL; while ((ev=ortp_ev_queue_get(stream->evq))!=NULL){ OrtpEventType evt=ortp_event_get_type(ev); if (evt==ORTP_EVENT_RTCP_PACKET_RECEIVED){ mblk_t *m=ortp_event_get_data(ev)->packet; media_stream_process_rtcp(stream,m,curtime); }else if (evt==ORTP_EVENT_RTCP_PACKET_EMITTED){ ms_message("%s_stream_iterate[%p]: local statistics available\n\tLocal's current jitter buffer size:%f ms", media_stream_type_str(stream), stream, rtp_session_get_jitter_stats(stream->sessions.rtp_session)->jitter_buffer_size_ms); }else if ((evt==ORTP_EVENT_STUN_PACKET_RECEIVED)&&(stream->ice_check_list)){ ice_handle_stun_packet(stream->ice_check_list,stream->sessions.rtp_session,ortp_event_get_data(ev)); } else if (evt == ORTP_EVENT_ZRTP_ENCRYPTION_CHANGED) { OrtpEventData *evd=ortp_event_get_data(ev); stream->sessions.is_secured=evd->info.zrtp_stream_encrypted; ms_message("%s_stream_iterate[%p]: is %s ",media_stream_type_str(stream) , stream, stream->sessions.is_secured ? "encrypted" : "not encrypted"); } ortp_event_destroy(ev); } } }
// Game Struct initialisieren void initGame(Game *g) { // Startwerte g->level = 0; g->score = 0; g->enemyShots = NULL; g->player.shot = NULL; g->player.lives = 2; // Nur 2, weil danach startNewLevel aufgerufen wird (-> ein Leben mehr) g->enemyContainer.ufo.alive = false; g->enemyContainer.ufo.posx = 0; g->enemyContainer.ufo.lastufo = ms_time(); // Spielerposition g->player.rect.x = WIDTH/2 - PLAYER_WIDTH/2; g->player.rect.y = PLAYER_Y_POS; g->player.rect.w = PLAYER_WIDTH; g->player.rect.h = PLAYER_HEIGHT; g->blocks = malloc(sizeof(Block) * BLOCK_COUNT); for (int i = 0; i < BLOCK_COUNT; i++) { for (int x = 0; x < BLOCK_WIDTH/BLOCK_TILE_WIDTH; x++) { for (int y = 0; y < BLOCK_HEIGHT/BLOCK_TILE_HEIGHT; y++) { g->blocks[i].damage[x][y] = 0; g->blocks[i].posx[x][y] = WIDTH/BLOCK_COUNT * i - BLOCK_WIDTH/2 + x*BLOCK_TILE_WIDTH + WIDTH/BLOCK_COUNT/2; g->blocks[i].posy[x][y] = PLAYER_Y_POS - 100 + y*BLOCK_TILE_HEIGHT; } } } // Nur zeichen, daher None movePlayer(g, None); }
static void collect_files_filled() { LinphoneCoreManager* marie = setup(TRUE); char * filepath = linphone_core_compress_log_collection(marie->lc); CU_ASSERT_PTR_NOT_NULL(filepath); CU_ASSERT_EQUAL(ms_time(0), check_file(filepath, FALSE)); linphone_core_manager_destroy(marie); }
TextStream* text_stream_start(TextStream *stream, RtpProfile *profile, const char *rem_rtp_addr, int rem_rtp_port, const char *rem_rtcp_addr, int rem_rtcp_port, int payload_type /* ignored */) { RtpSession *rtps = stream->ms.sessions.rtp_session; MSConnectionHelper h; rtp_session_set_profile(rtps, profile); if (rem_rtp_port > 0) rtp_session_set_remote_addr_full(rtps, rem_rtp_addr, rem_rtp_port, rem_rtcp_addr, rem_rtcp_port); if (rem_rtcp_port > 0) { rtp_session_enable_rtcp(rtps, TRUE); } else { rtp_session_enable_rtcp(rtps, FALSE); } stream->pt_t140 = rtp_profile_get_payload_number_from_mime(profile, "t140"); stream->pt_red = rtp_profile_get_payload_number_from_mime(profile, "red"); if (payload_type == stream->pt_t140) { ms_message("Text payload type is T140"); } else if (payload_type == stream->pt_red) { ms_message("Text payload type is RED"); } else { /* we dont know this kind of textstream... */ ms_warning("unkown type of textstream"); } rtp_session_set_payload_type(rtps, payload_type); if (rem_rtp_port > 0) ms_filter_call_method(stream->ms.rtpsend, MS_RTP_SEND_SET_SESSION, rtps); stream->ms.rtprecv = ms_filter_new(MS_RTP_RECV_ID); ms_filter_call_method(stream->ms.rtprecv, MS_RTP_RECV_SET_SESSION, rtps); stream->ms.sessions.rtp_session = rtps; if (stream->ms.sessions.ticker == NULL) media_stream_start_ticker(&stream->ms); stream->rttsource = ms_filter_new(MS_RTT_4103_SOURCE_ID); stream->rttsink = ms_filter_new(MS_RTT_4103_SINK_ID); ms_filter_call_method(stream->rttsource, MS_RTT_4103_SOURCE_SET_T140_PAYLOAD_TYPE_NUMBER, &stream->pt_t140); ms_filter_call_method(stream->rttsink, MS_RTT_4103_SINK_SET_T140_PAYLOAD_TYPE_NUMBER, &stream->pt_t140); if (payload_type == stream->pt_red) { ms_filter_call_method(stream->rttsource, MS_RTT_4103_SOURCE_SET_RED_PAYLOAD_TYPE_NUMBER, &stream->pt_red); ms_filter_call_method(stream->rttsink, MS_RTT_4103_SINK_SET_RED_PAYLOAD_TYPE_NUMBER, &stream->pt_red); } ms_connection_helper_start(&h); ms_connection_helper_link(&h, stream->rttsource, -1, 0); ms_connection_helper_link(&h, stream->ms.rtpsend, 0, -1); ms_connection_helper_start(&h); ms_connection_helper_link(&h, stream->ms.rtprecv, -1, 0); ms_connection_helper_link(&h, stream->rttsink, 0, -1); ms_ticker_attach_multiple(stream->ms.sessions.ticker, stream->rttsource, stream->ms.rtprecv, NULL); stream->ms.start_time = stream->ms.last_packet_time = ms_time(NULL); stream->ms.is_beginning = TRUE; stream->ms.state = MSStreamStarted; return stream; }
static void stateful_analyzer_update(MSQosAnalyzer *objbase){ MSStatefulQosAnalyzer *obj=(MSStatefulQosAnalyzer*)objbase; static time_t last_measure; /* Every seconds, save the bandwidth used. This is needed to know how much bandwidth was used when receiving a receiver report. Since the report contains the "last sequence number", it allows us to precisely know which interval to consider */ if (last_measure != ms_time(0)){ obj->upload_bandwidth_count++; obj->upload_bandwidth_sum+=rtp_session_get_send_bandwidth(obj->session)/1000.0; /* Save bandwidth used at this time */ obj->upload_bandwidth[obj->upload_bandwidth_cur].seq_number = rtp_session_get_seq_number(obj->session); obj->upload_bandwidth[obj->upload_bandwidth_cur].up_bandwidth = rtp_session_get_send_bandwidth(obj->session)/1000.0; obj->upload_bandwidth_cur = (obj->upload_bandwidth_cur+1)%BW_HISTORY; } last_measure = ms_time(0); if (obj->burst_duration_ms>0){ switch (obj->burst_state){ case MSStatefulQosAnalyzerBurstEnable:{ obj->burst_state=MSStatefulQosAnalyzerBurstInProgress; ortp_gettimeofday(&obj->start_time, NULL); rtp_session_set_duplication_ratio(obj->session, obj->burst_ratio); } case MSStatefulQosAnalyzerBurstInProgress: { struct timeval now; float elapsed; ortp_gettimeofday(&now,NULL); elapsed=((now.tv_sec-obj->start_time.tv_sec)*1000.0) + ((now.tv_usec-obj->start_time.tv_usec)/1000.0); if (elapsed > obj->burst_duration_ms){ obj->burst_state=MSStatefulQosAnalyzerBurstDisable; rtp_session_set_duplication_ratio(obj->session, 0); } } case MSStatefulQosAnalyzerBurstDisable: { } } } }
// Leben verloren void playerDead(Game *g) { freeShotList(g->enemyShots); g->enemyShots = NULL; if (g->player.shot != NULL) { free(g->player.shot); g->player.shot = NULL; } g->player.lives -= 2; // startNewLevel erhöht um 1 g->level--; g->enemyContainer.ufo.alive = false; g->enemyContainer.ufo.lastufo = ms_time(); // Spielerposition g->player.rect.x = WIDTH/2 - PLAYER_WIDTH/2; g->player.rect.y = PLAYER_Y_POS; g->player.rect.w = PLAYER_WIDTH; g->player.rect.h = PLAYER_HEIGHT; SDL_Rect area = {0, BORDER_TOP, WIDTH, HEIGHT - BORDER_TOP}; SDL_FillRect(g->screen, &area, SDL_MapRGB(g->screen->format, 0, 0, 0)); if (g->player.lives+2 == 0) { free(g->blocks); showGameOver(g); saveHighscore(g->score); showHighscore(g); SDL_Flip(g->screen); // Warten auf Tastendruck SDL_Event e; SDL_WaitEvent(&e); SDL_FillRect(g->screen, &area, SDL_MapRGB(g->screen->format, 0, 0, 0)); initGame(g); startNewLevel(g); return; } SDL_Flip(g->screen); usleep(100000); // Nur zeichen, daher None movePlayer(g, None); startNewLevel(g); }
time_t check_file(LinphoneCoreManager* mgr) { uint64_t last_log = ms_time(NULL); char* filepath = linphone_core_compress_log_collection(mgr->lc); time_t time_curr = -1; uint32_t timediff = 0; CU_ASSERT_PTR_NOT_NULL(filepath); if (filepath != NULL) { int line_count = 0; FILE *file = fopen(filepath, "r"); char *line = NULL; size_t line_size = 256; struct tm tm_curr; time_t time_prev = -1; // 1) expect to find folder name in filename path CU_ASSERT_PTR_NOT_NULL(strstr(filepath, liblinphone_tester_writable_dir_prefix)); // 2) check file contents while (getline(&line, &line_size, file) != -1) { // a) there should be at least 25 lines ++line_count; // b) logs should be ordered by date (format: 2014-11-04 15:22:12:606) if (strlen(line) > 24) { char date[24] = {'\0'}; memcpy(date, line, 23); if (strptime(date, "%Y-%m-%d %H:%M:%S", &tm_curr) != NULL) { time_curr = mktime(&tm_curr); CU_ASSERT_TRUE(time_curr >= time_prev); time_prev = time_curr; } } } CU_ASSERT_TRUE(line_count > 25); free(line); fclose(file); ms_free(filepath); } timediff = labs(time_curr - last_log); CU_ASSERT_TRUE( timediff <= 1 ); if( !(timediff <= 1) ){ ms_error("time_curr: %ld, last_log: %ld timediff: %d", time_curr, last_log, timediff ); } // return latest time in file return time_curr; }
static void reset_avg_metrics(reporting_session_report_t * report){ int i; reporting_content_metrics_t * metrics[2] = {&report->local_metrics, &report->remote_metrics}; for (i = 0; i < 2; i++) { metrics[i]->rtcp_sr_count = 0; metrics[i]->rtcp_xr_count = 0; metrics[i]->jitter_buffer.nominal = 0; metrics[i]->jitter_buffer.max = 0; metrics[i]->quality_estimates.moslq = 0; metrics[i]->quality_estimates.moscq = 0; metrics[i]->delay.round_trip_delay = 0; } report->last_report_date = ms_time(NULL); }
void audio_stream_iterate(AudioStream *stream){ if (stream->ms.evq){ OrtpEvent *ev=ortp_ev_queue_get(stream->ms.evq); if (ev!=NULL){ OrtpEventType evt=ortp_event_get_type(ev); if (evt==ORTP_EVENT_RTCP_PACKET_RECEIVED){ audio_stream_process_rtcp(stream,ortp_event_get_data(ev)->packet); stream->last_packet_time=ms_time(NULL); }else if (evt==ORTP_EVENT_RTCP_PACKET_EMITTED){ ms_message("audio_stream_iterate(): local statistics available\n\tLocal's current jitter buffer size:%f ms",rtp_session_get_jitter_stats(stream->ms.session)->jitter_buffer_size_ms); }else if ((evt==ORTP_EVENT_STUN_PACKET_RECEIVED)&&(stream->ms.ice_check_list)){ ice_handle_stun_packet(stream->ms.ice_check_list,stream->ms.session,ortp_event_get_data(ev)); } ortp_event_destroy(ev); } } media_stream_iterate(&stream->ms); }
/** * Removes a proxy configuration. * * LinphoneCore will then automatically unregister and place the proxy configuration * on a deleted list. For that reason, a removed proxy does NOT need to be freed. **/ void linphone_core_remove_proxy_config(LinphoneCore *lc, LinphoneProxyConfig *cfg){ /* check this proxy config is in the list before doing more*/ if (ms_list_find(lc->sip_conf.proxies,cfg)==NULL){ ms_error("linphone_core_remove_proxy_config: LinphoneProxyConfig %p is not known by LinphoneCore (programming error?)",cfg); return; } lc->sip_conf.proxies=ms_list_remove(lc->sip_conf.proxies,(void *)cfg); /* add to the list of destroyed proxies, so that the possible unREGISTER request can succeed authentication */ lc->sip_conf.deleted_proxies=ms_list_append(lc->sip_conf.deleted_proxies,(void *)cfg); cfg->deletion_date=ms_time(NULL); if (cfg->state==LinphoneRegistrationOk){ /* this will unREGISTER */ linphone_proxy_config_edit(cfg); } if (lc->default_proxy==cfg){ lc->default_proxy=NULL; } linphone_proxy_config_write_all_to_config_file(lc); }
static void qos_analyzer_on_action_suggested(void *user_data, int datac, const char** datav){ reporting_session_report_t *report = (reporting_session_report_t*)user_data; LinphoneCall *call = report->call; char * appendbuf; int i; int ptime = -1; int bitrate[3] = {-1, -1, -1}; int up_bw[3] = {-1, -1, -1}; int down_bw[3] = {-1, -1, -1}; MediaStream *streams[3] = { (MediaStream*) call->audiostream, (MediaStream *) call->videostream, (MediaStream *) call->textstream }; for (i = 0; i < 3; i++){ if (streams[i] != NULL){ if (streams[i]->encoder != NULL){ if (ms_filter_has_method(streams[i]->encoder,MS_FILTER_GET_BITRATE)){ ms_filter_call_method(streams[i]->encoder,MS_FILTER_GET_BITRATE,&bitrate[i]); bitrate[i] /= 1000; } } up_bw[i] = (int)(media_stream_get_up_bw(streams[i])/1000.f); down_bw[i] = (int)(media_stream_get_down_bw(streams[i])/1000.f); } } if (call->audiostream!=NULL){ if (call->audiostream->ms.encoder!=NULL){ if(ms_filter_has_method(call->audiostream->ms.encoder,MS_AUDIO_ENCODER_GET_PTIME)){ ms_filter_call_method(call->audiostream->ms.encoder,MS_AUDIO_ENCODER_GET_PTIME,&ptime); } } } appendbuf=ms_strdup_printf("%s%d;", report->qos_analyzer.timestamp?report->qos_analyzer.timestamp:"", ms_time(0)); STR_REASSIGN(report->qos_analyzer.timestamp,appendbuf); STR_REASSIGN(report->qos_analyzer.input_leg, ms_strdup_printf("%s aenc_ptime aenc_br a_dbw a_ubw venc_br v_dbw v_ubw tenc_br t_dbw t_ubw", datav[0])); appendbuf=ms_strdup_printf("%s%s %d %d %d %d %d %d %d %d %d %d;", report->qos_analyzer.input?report->qos_analyzer.input:"", datav[1], ptime, bitrate[0], down_bw[0], up_bw[0], bitrate[1], down_bw[1], up_bw[1], bitrate[2], down_bw[2], up_bw[2]); STR_REASSIGN(report->qos_analyzer.input,appendbuf); STR_REASSIGN(report->qos_analyzer.output_leg, ms_strdup(datav[2])); appendbuf=ms_strdup_printf("%s%s;", report->qos_analyzer.output?report->qos_analyzer.output:"", datav[3]); STR_REASSIGN(report->qos_analyzer.output, appendbuf); }
int video_stream_start (VideoStream *stream, RtpProfile *profile, const char *rem_rtp_ip, int rem_rtp_port, const char *rem_rtcp_ip, int rem_rtcp_port, int payload, int jitt_comp, MSWebCam *cam){ PayloadType *pt; RtpSession *rtps=stream->ms.session; MSPixFmt format; MSVideoSize disp_size; int tmp; JBParameters jbp; const int socket_buf_size=2000000; if (cam==NULL){ cam=ms_web_cam_manager_get_default_cam ( ms_web_cam_manager_get()); } pt=rtp_profile_get_payload(profile,payload); if (pt==NULL){ ms_error("videostream.c: undefined payload type."); return -1; } if ((cam != NULL) && (cam->desc->encode_to_mime_type != NULL) && (cam->desc->encode_to_mime_type(cam, pt->mime_type) == TRUE)) { stream->source_performs_encoding = TRUE; } rtp_session_set_profile(rtps,profile); if (rem_rtp_port>0) rtp_session_set_remote_addr_full(rtps,rem_rtp_ip,rem_rtp_port,rem_rtcp_ip,rem_rtcp_port); rtp_session_set_payload_type(rtps,payload); rtp_session_set_jitter_compensation(rtps,jitt_comp); rtp_session_signal_connect(stream->ms.session,"payload_type_changed", (RtpCallback)mediastream_payload_type_changed,(unsigned long)&stream->ms); rtp_session_get_jitter_buffer_params(stream->ms.session,&jbp); jbp.max_packets=1000;//needed for high resolution video rtp_session_set_jitter_buffer_params(stream->ms.session,&jbp); rtp_session_set_rtp_socket_recv_buffer_size(stream->ms.session,socket_buf_size); rtp_session_set_rtp_socket_send_buffer_size(stream->ms.session,socket_buf_size); if (stream->dir==VideoStreamSendRecv || stream->dir==VideoStreamSendOnly){ MSConnectionHelper ch; /*plumb the outgoing stream */ if (rem_rtp_port>0) ms_filter_call_method(stream->ms.rtpsend,MS_RTP_SEND_SET_SESSION,stream->ms.session); if (stream->source_performs_encoding == FALSE) { stream->ms.encoder=ms_filter_create_encoder(pt->mime_type); if ((stream->ms.encoder==NULL) ){ /* big problem: we don't have a registered codec for this payload...*/ ms_error("videostream.c: No encoder available for payload %i:%s.",payload,pt->mime_type); return -1; } } /* creates the filters */ stream->cam=cam; stream->source = ms_web_cam_create_reader(cam); stream->tee = ms_filter_new(MS_TEE_ID); if (stream->source_performs_encoding == TRUE) { stream->ms.encoder = stream->source; /* Consider the encoder is the source */ } if (pt->normal_bitrate>0){ MSVideoConfiguration *vconf_list = NULL; ms_message("Limiting bitrate of video encoder to %i bits/s",pt->normal_bitrate); ms_filter_call_method(stream->ms.encoder, MS_VIDEO_ENCODER_GET_CONFIGURATION_LIST, &vconf_list); if (vconf_list != NULL) { MSVideoConfiguration vconf = ms_video_find_best_configuration_for_bitrate(vconf_list, pt->normal_bitrate); ms_filter_call_method(stream->ms.encoder, MS_VIDEO_ENCODER_SET_CONFIGURATION, &vconf); } else { ms_filter_call_method(stream->ms.encoder, MS_FILTER_SET_BITRATE, &pt->normal_bitrate); } } if (pt->send_fmtp){ ms_filter_call_method(stream->ms.encoder,MS_FILTER_ADD_FMTP,pt->send_fmtp); } if (stream->use_preview_window){ if (stream->rendercb==NULL){ stream->output2=ms_filter_new_from_name (stream->display_name); } } configure_video_source (stream); /* and then connect all */ ms_connection_helper_start(&ch); ms_connection_helper_link(&ch, stream->source, -1, 0); if (stream->pixconv) { ms_connection_helper_link(&ch, stream->pixconv, 0, 0); } if (stream->sizeconv) { ms_connection_helper_link(&ch, stream->sizeconv, 0, 0); } ms_connection_helper_link(&ch, stream->tee, 0, 0); if (stream->source_performs_encoding == FALSE) { ms_connection_helper_link(&ch, stream->ms.encoder, 0, 0); } ms_connection_helper_link(&ch, stream->ms.rtpsend, 0, -1); if (stream->output2){ if (stream->preview_window_id!=0){ ms_filter_call_method(stream->output2, MS_VIDEO_DISPLAY_SET_NATIVE_WINDOW_ID,&stream->preview_window_id); } ms_filter_link(stream->tee,1,stream->output2,0); } } if (stream->dir==VideoStreamSendRecv || stream->dir==VideoStreamRecvOnly){ MSConnectionHelper ch; MSVideoDisplayDecodingSupport decoding_support; if (stream->rendercb!=NULL){ stream->output=ms_filter_new(MS_EXT_DISPLAY_ID); ms_filter_set_notify_callback(stream->output,ext_display_cb,stream); }else{ stream->output=ms_filter_new_from_name (stream->display_name); } /* Don't allow null output */ if(stream->output == NULL) { ms_fatal("No video display filter could be instantiated. Please check build-time configuration"); } /* Check if the output filter can perform the decoding process */ decoding_support.mime_type = pt->mime_type; decoding_support.supported = FALSE; ms_filter_call_method(stream->output, MS_VIDEO_DISPLAY_SUPPORT_DECODING, &decoding_support); stream->output_performs_decoding = decoding_support.supported; /*plumb the incoming stream */ if (stream->output_performs_decoding == TRUE) { stream->ms.decoder = stream->output; /* Consider the decoder is the output */ } else { stream->ms.decoder=ms_filter_create_decoder(pt->mime_type); if ((stream->ms.decoder==NULL) ){ /* big problem: we don't have a registered decoderfor this payload...*/ ms_error("videostream.c: No decoder available for payload %i:%s.",payload,pt->mime_type); ms_filter_destroy(stream->output); return -1; } } ms_filter_set_notify_callback(stream->ms.decoder, event_cb, stream); stream->ms.rtprecv = ms_filter_new (MS_RTP_RECV_ID); ms_filter_call_method(stream->ms.rtprecv,MS_RTP_RECV_SET_SESSION,stream->ms.session); if (stream->output_performs_decoding == FALSE) { stream->jpegwriter=ms_filter_new(MS_JPEG_WRITER_ID); if (stream->jpegwriter) stream->tee2=ms_filter_new(MS_TEE_ID); } /* set parameters to the decoder*/ if (pt->send_fmtp){ ms_filter_call_method(stream->ms.decoder,MS_FILTER_ADD_FMTP,pt->send_fmtp); } if (pt->recv_fmtp!=NULL) ms_filter_call_method(stream->ms.decoder,MS_FILTER_ADD_FMTP,(void*)pt->recv_fmtp); /*force the decoder to output YUV420P */ format=MS_YUV420P; ms_filter_call_method(stream->ms.decoder,MS_FILTER_SET_PIX_FMT,&format); /*configure the display window */ if(stream->output != NULL) { disp_size.width=MS_VIDEO_SIZE_CIF_W; disp_size.height=MS_VIDEO_SIZE_CIF_H; tmp=1; ms_filter_call_method(stream->output,MS_FILTER_SET_VIDEO_SIZE,&disp_size); ms_filter_call_method(stream->output,MS_VIDEO_DISPLAY_ENABLE_AUTOFIT,&tmp); ms_filter_call_method(stream->output,MS_FILTER_SET_PIX_FMT,&format); ms_filter_call_method(stream->output,MS_VIDEO_DISPLAY_SET_LOCAL_VIEW_MODE,&stream->corner); if (stream->window_id!=0){ ms_filter_call_method(stream->output, MS_VIDEO_DISPLAY_SET_NATIVE_WINDOW_ID,&stream->window_id); } if (stream->display_filter_auto_rotate_enabled) { ms_filter_call_method(stream->output,MS_VIDEO_DISPLAY_SET_DEVICE_ORIENTATION,&stream->device_orientation); } } /* and connect the filters */ ms_connection_helper_start (&ch); ms_connection_helper_link (&ch,stream->ms.rtprecv,-1,0); if (stream->output_performs_decoding == FALSE) { ms_connection_helper_link (&ch,stream->ms.decoder,0,0); } if (stream->tee2){ ms_connection_helper_link (&ch,stream->tee2,0,0); ms_filter_link(stream->tee2,1,stream->jpegwriter,0); } if (stream->output!=NULL) ms_connection_helper_link (&ch,stream->output,0,-1); /* the video source must be send for preview , if it exists*/ if (stream->tee!=NULL && stream->output!=NULL && stream->output2==NULL) ms_filter_link(stream->tee,1,stream->output,1); } if (stream->dir == VideoStreamSendOnly) { stream->ms.rtprecv = ms_filter_new (MS_RTP_RECV_ID); ms_filter_call_method(stream->ms.rtprecv, MS_RTP_RECV_SET_SESSION, stream->ms.session); stream->ms.voidsink = ms_filter_new(MS_VOID_SINK_ID); ms_filter_link(stream->ms.rtprecv, 0, stream->ms.voidsink, 0); } /* create the ticker */ if (stream->ms.ticker==NULL) start_ticker(&stream->ms); stream->ms.start_time=ms_time(NULL); stream->ms.is_beginning=TRUE; /* attach the graphs */ if (stream->source) ms_ticker_attach (stream->ms.ticker, stream->source); if (stream->ms.rtprecv) ms_ticker_attach (stream->ms.ticker, stream->ms.rtprecv); return 0; }
static bool_t stateful_analyzer_process_rtcp(MSQosAnalyzer *objbase, mblk_t *rtcp){ MSStatefulQosAnalyzer *obj=(MSStatefulQosAnalyzer*)objbase; rtpstats_t *cur; const report_block_t *rb=NULL; if (rtcp_is_SR(rtcp)){ rb=rtcp_SR_get_report_block(rtcp,0); }else if (rtcp_is_RR(rtcp)){ rb=rtcp_RR_get_report_block(rtcp,0); } if (rb && report_block_get_ssrc(rb)==rtp_session_get_send_ssrc(obj->session)){ double up_bw = stateful_qos_analyzer_upload_bandwidth(obj); int total_emitted=stateful_qos_analyzer_get_total_emitted(obj, rb); obj->curindex++; cur=&obj->stats[obj->curindex % STATS_HISTORY]; if (obj->clockrate==0){ PayloadType *pt=rtp_profile_get_payload(rtp_session_get_send_profile(obj->session),rtp_session_get_send_payload_type(obj->session)); if (pt!=NULL) obj->clockrate=pt->clock_rate; else return FALSE; } cur->lost_percentage=100.0*(float)report_block_get_fraction_lost(rb)/256.0; cur->int_jitter=1000.0*(float)report_block_get_interarrival_jitter(rb)/(float)obj->clockrate; cur->rt_prop=rtp_session_get_round_trip_propagation(obj->session); ms_message("MSSimpleQosAnalyzer: lost_percentage=%f, int_jitter=%f ms, rt_prop=%f sec",cur->lost_percentage,cur->int_jitter,cur->rt_prop); if (obj->curindex>2){ double loss_rate = cur->lost_percentage/100.0; int cum_loss=report_block_get_cum_packet_loss(rb); int cum_loss_curr=cum_loss - obj->cum_loss_prev; int uniq_emitted=report_block_get_high_ext_seq(rb) - obj->previous_ext_high_seq_num_rec; if (obj->previous_ext_high_seq_num_rec > 0){ loss_rate=(1. - (uniq_emitted - cum_loss_curr) * 1.f / total_emitted); ms_debug("MSQosStatefulAnalyzer[%p]: RECEIVE estimated loss rate=%f vs 'real'=%f", obj, loss_rate, report_block_get_fraction_lost(rb)/256.); } obj->latest=ms_new0(rtcpstatspoint_t, 1); obj->latest->timestamp=ms_time(0); obj->latest->bandwidth=up_bw; obj->latest->loss_percent=MAX(0,loss_rate); obj->latest->rtt=cur->rt_prop; obj->rtcpstatspoint=ms_list_insert_sorted(obj->rtcpstatspoint, obj->latest, (MSCompareFunc)sort_points); if (obj->latest->loss_percent < 1e-5){ MSList *it=obj->rtcpstatspoint; MSList *latest_pos=ms_list_find(obj->rtcpstatspoint,obj->latest); while (it!=latest_pos->next){ ((rtcpstatspoint_t *)it->data)->loss_percent=0.f; it = it->next; } } ms_debug("MSQosStatefulAnalyzer[%p]: one more %d: %f %f", obj, obj->curindex-2, obj->latest->bandwidth, obj->latest->loss_percent); if (ms_list_size(obj->rtcpstatspoint) > ESTIM_HISTORY){ #ifdef DEBUG int prev_size = ms_list_size(obj->rtcpstatspoint); #endif /*clean everything which occurred 60 sec or more ago*/ time_t clear_time = ms_time(0) - 60; obj->rtcpstatspoint = ms_list_remove_custom(obj->rtcpstatspoint, (MSCompareFunc)earlier_than, &clear_time); ms_debug("MSQosStatefulAnalyzer[%p]: Reached list maximum capacity (count=%d) --> Cleaned list (count=%d)", obj, prev_size, ms_list_size(obj->rtcpstatspoint)); } } obj->cum_loss_prev=report_block_get_cum_packet_loss(rb); obj->previous_ext_high_seq_num_rec=report_block_get_high_ext_seq(rb); } return rb!=NULL; }
int main(int argc, char *argv[]) { Game *g = (Game*) malloc(sizeof(Game)); SDL_Event event; Uint8 *keystates; Uint8 quit = false; long lastplayerupdate = ms_time(); long lastenemyupdate = ms_time(); long lastshotupdate = ms_time(); long lastufoupdate = ms_time(); srand((unsigned int) time(NULL)); // SDL initialisieren if (SDL_Init(SDL_INIT_VIDEO) == -1) { printf("Kann Video nicht initialisieren: %s\n", SDL_GetError()); exit(1); } atexit(SDL_Quit); g->screen = SDL_SetVideoMode(WIDTH, HEIGHT, 16, SDL_HWSURFACE); if (g->screen == NULL) { printf("Kann Video-Modus nicht festlegen: %s\n", SDL_GetError()); exit(1); } TTF_Init(); // Game initialisieren initGame(g); startNewLevel(g); updateScore(g); updateLives(g); showHighscore(g); updateBlocks(g); // Nächster Grafikzustand SDL_Flip(g->screen); // Loop while (!quit) { // SDL Events abfragen SDL_PollEvent(&event); // Tastenstatus laden keystates = SDL_GetKeyState(NULL); // Escape gedrückt -> beenden // TODO: Menü aufrufen statt beenden if (keystates[SDLK_ESCAPE]) { saveHighscore(g->score); quit = true; } // Nur wenn entweder Links oder Rechts, nicht beide zur selben Zeit if (keystates[SDLK_LEFT] != keystates[SDLK_RIGHT] && lastplayerupdate >= 100) { lastplayerupdate = ms_time(); // Links if (keystates[SDLK_LEFT]) { movePlayer(g, Left); } // Rechts if (keystates[SDLK_RIGHT]) { movePlayer(g, Right); } } if (keystates[SDLK_SPACE]) { shoot(g); } // UFO if (ms_time() - lastufoupdate >= UFO_UPDATE) { lastufoupdate = ms_time(); ufo(g); } // Alienposition aktualisieren? // Exponentialfunktion, die Level und Alienanzahl berücksichtigt if (ms_time() - lastenemyupdate >= ENEMY_UPDATE_BASE * pow(0.95, g->level * 3 + (ENEMY_COUNT - g->enemyContainer.aliveCount) / 4)) { lastenemyupdate = ms_time(); updateBlocks(g); moveEnemys(g); alienShot(g); } // Schüsse aktualisieren if (ms_time() - lastshotupdate >= SHOT_UPDATE) { lastshotupdate = ms_time(); updateShots(g); checkCollision(g); movePlayer(g, None); } usleep(20000); // begrenzt CPU Last // Nächster Grafikzustand SDL_Flip(g->screen); } SDL_Quit(); return 0; }
int v4lGrab (struct vdIn *vd, int channel ) { // static int frame = 0; // int len; // int status; // int count = 0; // int size; int erreur = 0; int jpegsize = 0; int qualite = 1024; struct frame_t *headerframe; // double timecourant =0; // double temps = 0; // timecourant = ms_time(); if (vd->grabMethod) { vd->vmmap.height = vd->hdrheight; vd->vmmap.width = vd->hdrwidth; vd->vmmap.format = vd->formatIn; // logger(TLOG_NOTICE, "channel:%d step 1.\n", channel); if (ioctl (vd->fd, VIDIOCSYNC,&vd->vmmap.frame) < 0) { logger(TLOG_ERROR, "rtv cvsync err\n"); erreur = -1; } thread_syn_wait(&frame_syn_ctrl[channel][vd->frame_cour].mutex_w, &frame_syn_ctrl[channel][vd->frame_cour].cond_w, &frame_syn_ctrl[channel][vd->frame_cour].flag_w); // logger(TLOG_NOTICE, "channel:%d step 3.\n", channel); jpegsize= convertframe(vd->ptframe[vd->frame_cour]+ sizeof(struct frame_t), vd->pFramebuffer + vd->videombuf.offsets[vd->vmmap.frame], vd->hdrwidth,vd->hdrheight,vd->formatIn,qualite); vd->ptframesize[vd->frame_cour] = ((jpegsize < 0)?0:jpegsize); headerframe=(struct frame_t*)vd->ptframe[vd->frame_cour]; // snprintf(headerframe->header,5,"%s","SPCA"); // headerframe->seqtimes = ms_time(); frame_time(&(headerframe->date), &(headerframe->time)); // headerframe->deltatimes=(int)(headerframe->seqtimes-timecourant); headerframe->w = vd->hdrwidth; headerframe->h = vd->hdrheight; headerframe->size = (( jpegsize < 0)?0:jpegsize); headerframe->format = vd->formatIn; // headerframe->nbframe = frame++; // logger(TLOG_NOTICE, "channel:%d step 4.\n", channel); thread_syn_flag_clr(&frame_syn_ctrl[channel][vd->frame_cour].mutex_w, &frame_syn_ctrl[channel][vd->frame_cour].cond_w, &frame_syn_ctrl[channel][vd->frame_cour].flag_w); thread_syn_flag_set(&frame_syn_ctrl[channel][vd->frame_cour].mutex_r, &frame_syn_ctrl[channel][vd->frame_cour].cond_r, &frame_syn_ctrl[channel][vd->frame_cour].flag_r); // logger(TLOG_NOTICE, "compress frame %d times %f\n",frame, headerframe->seqtimes-temps); // pthread_mutex_unlock (&vd->grabmutex); /************************************/ // logger(TLOG_NOTICE, "channel:%d step 5.\n", channel); if ((ioctl (vd->fd, VIDIOCMCAPTURE, &(vd->vmmap))) < 0) { logger(TLOG_ERROR, "rtv cmcapture err\n"); erreur = -1; } vd->vmmap.frame = (vd->vmmap.frame + 1) % vd->videombuf.frames; vd->frame_cour = (vd->frame_cour +1) % OUTFRMNUMB; // logger(TLOG_NOTICE, "channel:%d step 6.\n", channel); //logger(TLOG_NOTICE, "frame nb %d\n",vd->vmmap.frame); } #if 0 else { /* read method */ size = vd->framesizeIn; len = read (vd->fd, vd->pFramebuffer, size); if (len <= 0 ) { printf ("v4l read error\n"); printf ("len %d asked %d \n", len, size); return 0; } /* Is there someone using the frame */ while ((vd->framelock[vd->frame_cour] != 0)&& vd->signalquit) usleep(1000); pthread_mutex_lock (&vd->grabmutex); /* memcpy (vd->ptframe[vd->frame_cour]+ sizeof(struct frame_t), vd->pFramebuffer, vd->framesizeIn); jpegsize =jpeg_compress(vd->ptframe[vd->frame_cour]+ sizeof(struct frame_t),len, vd->pFramebuffer, vd->hdrwidth, vd->hdrheight, qualite); */ temps = ms_time(); jpegsize= convertframe(vd->ptframe[vd->frame_cour]+ sizeof(struct frame_t), vd->pFramebuffer , vd->hdrwidth,vd->hdrheight,vd->formatIn,qualite); headerframe=(struct frame_t*)vd->ptframe[vd->frame_cour]; snprintf(headerframe->header,5,"%s","SPCA"); headerframe->seqtimes = ms_time(); headerframe->deltatimes=(int)(headerframe->seqtimes-timecourant); headerframe->w = vd->hdrwidth; headerframe->h = vd->hdrheight; headerframe->size = (( jpegsize < 0)?0:jpegsize);; headerframe->format = vd->formatIn; headerframe->nbframe = frame++; // printf("compress frame %d times %f\n",frame, headerframe->seqtimes-temps); vd->frame_cour = (vd->frame_cour +1) % OUTFRMNUMB; pthread_mutex_unlock (&vd->grabmutex); /************************************/ } #endif return erreur; }
static void presence_information(void) { const char *bike_description = "Riding my bike"; const char *vacation_note = "I'm on vacation until July 4th"; const char *vacation_lang = "en"; const char *contact = "sip:[email protected]"; LinphoneCoreManager *marie = presence_linphone_core_manager_new("marie"); LinphoneCoreManager *pauline = presence_linphone_core_manager_new("pauline"); LinphonePresenceModel *presence; LinphonePresenceActivity *activity = NULL; LinphonePresenceNote *note = NULL; const char *description = NULL; const char *note_content = NULL; char *contact2; time_t current_timestamp, presence_timestamp; CU_ASSERT_TRUE(subscribe_to_callee_presence(marie, pauline)); /* Presence activity without description. */ presence = linphone_presence_model_new_with_activity(LinphonePresenceActivityDinner, NULL); linphone_core_set_presence_model(pauline->lc, presence); wait_for(marie->lc,pauline->lc,&marie->stat.number_of_LinphonePresenceActivityDinner,1); CU_ASSERT_EQUAL(marie->stat.number_of_LinphonePresenceActivityDinner, 1); activity = linphone_presence_model_get_activity(marie->stat.last_received_presence); CU_ASSERT_PTR_NOT_NULL(activity); CU_ASSERT_EQUAL(linphone_presence_activity_get_type(activity), LinphonePresenceActivityDinner); description = linphone_presence_activity_get_description(activity); CU_ASSERT_PTR_NULL(description); /* Presence activity with description. */ presence = linphone_presence_model_new_with_activity(LinphonePresenceActivitySteering, bike_description); linphone_core_set_presence_model(pauline->lc, presence); wait_for(marie->lc,pauline->lc,&marie->stat.number_of_LinphonePresenceActivitySteering,1); CU_ASSERT_EQUAL(marie->stat.number_of_LinphonePresenceActivitySteering, 1); activity = linphone_presence_model_get_activity(marie->stat.last_received_presence); CU_ASSERT_PTR_NOT_NULL(activity); CU_ASSERT_EQUAL(linphone_presence_activity_get_type(activity), LinphonePresenceActivitySteering); description = linphone_presence_activity_get_description(activity); CU_ASSERT_PTR_NOT_NULL(description); if (description != NULL) CU_ASSERT_EQUAL(strcmp(description, bike_description), 0); /* Presence activity with description and note. */ presence = linphone_presence_model_new_with_activity_and_note(LinphonePresenceActivityVacation, NULL, vacation_note, vacation_lang); linphone_core_set_presence_model(pauline->lc, presence); wait_for(marie->lc,pauline->lc,&marie->stat.number_of_LinphonePresenceActivityVacation,1); CU_ASSERT_EQUAL(marie->stat.number_of_LinphonePresenceActivityVacation, 1); activity = linphone_presence_model_get_activity(marie->stat.last_received_presence); CU_ASSERT_PTR_NOT_NULL(activity); CU_ASSERT_EQUAL(linphone_presence_activity_get_type(activity), LinphonePresenceActivityVacation); description = linphone_presence_activity_get_description(activity); CU_ASSERT_PTR_NULL(description); note = linphone_presence_model_get_note(marie->stat.last_received_presence, NULL); CU_ASSERT_PTR_NOT_NULL(note); if (note != NULL) { note_content = linphone_presence_note_get_content(note); CU_ASSERT_PTR_NOT_NULL(note_content); if (note_content != NULL) { CU_ASSERT_EQUAL(strcmp(note_content, vacation_note), 0); } } /* Presence contact. */ presence = linphone_presence_model_new_with_activity(LinphonePresenceActivityOnThePhone, NULL); linphone_presence_model_set_contact(presence, contact); linphone_core_set_presence_model(pauline->lc, presence); wait_for(marie->lc,pauline->lc,&marie->stat.number_of_LinphonePresenceActivityOnThePhone,1); CU_ASSERT_EQUAL(marie->stat.number_of_LinphonePresenceActivityOnThePhone, 1); contact2 = linphone_presence_model_get_contact(presence); CU_ASSERT_PTR_NOT_NULL(contact2); if (contact2 != NULL) { CU_ASSERT_EQUAL(strcmp(contact, contact2), 0); ms_free(contact2); } /* Presence timestamp. */ current_timestamp = ms_time(NULL); presence = linphone_presence_model_new_with_activity(LinphonePresenceActivityShopping, NULL); linphone_core_set_presence_model(pauline->lc, presence); wait_for(marie->lc,pauline->lc,&marie->stat.number_of_LinphonePresenceActivityShopping,1); CU_ASSERT_EQUAL(marie->stat.number_of_LinphonePresenceActivityShopping, 1); presence_timestamp = linphone_presence_model_get_timestamp(presence); CU_ASSERT_TRUE(presence_timestamp >= current_timestamp); linphone_core_manager_destroy(marie); linphone_core_manager_destroy(pauline); }
static bool_t stateful_analyzer_process_rtcp(MSQosAnalyzer *objbase, mblk_t *rtcp){ MSStatefulQosAnalyzer *obj=(MSStatefulQosAnalyzer*)objbase; const report_block_t *rb=NULL; if (rtcp_is_SR(rtcp)){ rb=rtcp_SR_get_report_block(rtcp,0); }else if (rtcp_is_RR(rtcp)){ rb=rtcp_RR_get_report_block(rtcp,0); } if (rb && report_block_get_ssrc(rb)==rtp_session_get_send_ssrc(obj->session)){ if (ortp_loss_rate_estimator_process_report_block(objbase->lre,&obj->session->rtp,rb)){ int i; float loss_rate = ortp_loss_rate_estimator_get_value(objbase->lre); float up_bw = stateful_qos_analyzer_upload_bandwidth(obj,report_block_get_high_ext_seq(rb)); obj->curindex++; /*flush bandwidth estimation measures for seq number lower than remote report block received*/ for (i=0;i<BW_HISTORY;i++){ if (obj->upload_bandwidth[i].seq_number<report_block_get_high_ext_seq(rb)){ obj->upload_bandwidth[i].seq_number=0; obj->upload_bandwidth[i].up_bandwidth=0.f; } } /* Always skip the first report, since values might be erroneous due to initialization of multiples objects (encoder/decoder/stats computing..) Instead assume loss rate is a good estimation of network capacity */ if (obj->curindex==1) { obj->network_loss_rate=loss_rate; return TRUE; } obj->latest=ms_new0(rtcpstatspoint_t, 1); obj->latest->timestamp=ms_time(0); obj->latest->bandwidth=up_bw; obj->latest->loss_percent=loss_rate; obj->latest->rtt=rtp_session_get_round_trip_propagation(obj->session); obj->rtcpstatspoint=ms_list_insert_sorted(obj->rtcpstatspoint, obj->latest, (MSCompareFunc)sort_by_bandwidth); /*if the measure was 0% loss, reset to 0% every measures below it*/ if (obj->latest->loss_percent < 1e-5){ MSList *it=obj->rtcpstatspoint; MSList *latest_pos=ms_list_find(obj->rtcpstatspoint,obj->latest); while (it!=latest_pos->next){ ((rtcpstatspoint_t *)it->data)->loss_percent=0.f; it = it->next; } } ms_message("MSStatefulQosAnalyzer[%p]: one more %d: %f %f", obj, obj->curindex-1, obj->latest->bandwidth, obj->latest->loss_percent); if (ms_list_size(obj->rtcpstatspoint) > ESTIM_HISTORY){ int prev_size = ms_list_size(obj->rtcpstatspoint); /*clean everything which occurred 60 sec or more ago*/ time_t clear_time = ms_time(0) - 60; obj->rtcpstatspoint = ms_list_remove_custom(obj->rtcpstatspoint, (MSCompareFunc)earlier_than, &clear_time); ms_message("MSStatefulQosAnalyzer[%p]: reached list maximum capacity " "(count=%d) --> Cleaned list (count=%d)", obj, prev_size, ms_list_size(obj->rtcpstatspoint)); } return TRUE; } } return FALSE; }
int audio_stream_start_full(AudioStream *stream, RtpProfile *profile, const char *rem_rtp_ip,int rem_rtp_port, const char *rem_rtcp_ip, int rem_rtcp_port, int payload,int jitt_comp, const char *infile, const char *outfile, MSSndCard *playcard, MSSndCard *captcard, bool_t use_ec) { RtpSession *rtps=stream->ms.session; PayloadType *pt,*tel_ev; int tmp; MSConnectionHelper h; int sample_rate; MSRtpPayloadPickerContext picker_context; bool_t has_builtin_ec=FALSE; rtp_session_set_profile(rtps,profile); if (rem_rtp_port>0) rtp_session_set_remote_addr_full(rtps,rem_rtp_ip,rem_rtp_port,rem_rtcp_ip,rem_rtcp_port); if (rem_rtcp_port<=0){ rtp_session_enable_rtcp(rtps,FALSE); } rtp_session_set_payload_type(rtps,payload); rtp_session_set_jitter_compensation(rtps,jitt_comp); if (rem_rtp_port>0) ms_filter_call_method(stream->ms.rtpsend,MS_RTP_SEND_SET_SESSION,rtps); stream->ms.rtprecv=ms_filter_new(MS_RTP_RECV_ID); ms_filter_call_method(stream->ms.rtprecv,MS_RTP_RECV_SET_SESSION,rtps); stream->ms.session=rtps; if((stream->features & AUDIO_STREAM_FEATURE_DTMF) != 0) stream->dtmfgen=ms_filter_new(MS_DTMF_GEN_ID); else stream->dtmfgen=NULL; rtp_session_signal_connect(rtps,"telephone-event",(RtpCallback)on_dtmf_received,(unsigned long)stream); rtp_session_signal_connect(rtps,"payload_type_changed",(RtpCallback)mediastream_payload_type_changed,(unsigned long)&stream->ms); /* creates the local part */ if (captcard!=NULL){ if (stream->soundread==NULL) stream->soundread=ms_snd_card_create_reader(captcard); has_builtin_ec=!!(ms_snd_card_get_capabilities(captcard) & MS_SND_CARD_CAP_BUILTIN_ECHO_CANCELLER); }else { stream->soundread=ms_filter_new(MS_FILE_PLAYER_ID); stream->read_resampler=ms_filter_new(MS_RESAMPLE_ID); if (infile!=NULL) audio_stream_play(stream,infile); } if (playcard!=NULL) { if (stream->soundwrite==NULL) stream->soundwrite=ms_snd_card_create_writer(playcard); }else { stream->soundwrite=ms_filter_new(MS_FILE_REC_ID); if (outfile!=NULL) audio_stream_record(stream,outfile); } /* creates the couple of encoder/decoder */ pt=rtp_profile_get_payload(profile,payload); if (pt==NULL){ ms_error("audiostream.c: undefined payload type."); return -1; } tel_ev=rtp_profile_get_payload_from_mime (profile,"telephone-event"); if ((stream->features & AUDIO_STREAM_FEATURE_DTMF_ECHO) != 0 && (tel_ev==NULL || ( (tel_ev->flags & PAYLOAD_TYPE_FLAG_CAN_RECV) && !(tel_ev->flags & PAYLOAD_TYPE_FLAG_CAN_SEND))) && ( strcasecmp(pt->mime_type,"pcmu")==0 || strcasecmp(pt->mime_type,"pcma")==0)){ /*if no telephone-event payload is usable and pcma or pcmu is used, we will generate inband dtmf*/ stream->dtmfgen_rtp=ms_filter_new (MS_DTMF_GEN_ID); } else { stream->dtmfgen_rtp=NULL; } if (ms_filter_call_method(stream->ms.rtpsend,MS_FILTER_GET_SAMPLE_RATE,&sample_rate)!=0){ ms_error("Sample rate is unknown for RTP side !"); return -1; } stream->ms.encoder=ms_filter_create_encoder(pt->mime_type); stream->ms.decoder=ms_filter_create_decoder(pt->mime_type); if ((stream->ms.encoder==NULL) || (stream->ms.decoder==NULL)){ /* big problem: we have not a registered codec for this payload...*/ ms_error("audio_stream_start_full: No decoder or encoder available for payload %s.",pt->mime_type); return -1; } if (ms_filter_has_method(stream->ms.decoder, MS_FILTER_SET_RTP_PAYLOAD_PICKER)) { ms_message(" decoder has FEC capabilities"); picker_context.filter_graph_manager=stream; picker_context.picker=&audio_stream_payload_picker; ms_filter_call_method(stream->ms.decoder,MS_FILTER_SET_RTP_PAYLOAD_PICKER, &picker_context); } if((stream->features & AUDIO_STREAM_FEATURE_VOL_SND) != 0) stream->volsend=ms_filter_new(MS_VOLUME_ID); else stream->volsend=NULL; if((stream->features & AUDIO_STREAM_FEATURE_VOL_RCV) != 0) stream->volrecv=ms_filter_new(MS_VOLUME_ID); else stream->volrecv=NULL; audio_stream_enable_echo_limiter(stream,stream->el_type); audio_stream_enable_noise_gate(stream,stream->use_ng); if (stream->use_agc){ int tmp=1; if (stream->volsend==NULL) stream->volsend=ms_filter_new(MS_VOLUME_ID); ms_filter_call_method(stream->volsend,MS_VOLUME_ENABLE_AGC,&tmp); } if (stream->dtmfgen) { ms_filter_call_method(stream->dtmfgen,MS_FILTER_SET_SAMPLE_RATE,&sample_rate); ms_filter_call_method(stream->dtmfgen,MS_FILTER_SET_NCHANNELS,&pt->channels); } if (stream->dtmfgen_rtp) { ms_filter_call_method(stream->dtmfgen_rtp,MS_FILTER_SET_SAMPLE_RATE,&sample_rate); ms_filter_call_method(stream->dtmfgen_rtp,MS_FILTER_SET_NCHANNELS,&pt->channels); } /* give the sound filters some properties */ if (ms_filter_call_method(stream->soundread,MS_FILTER_SET_SAMPLE_RATE,&sample_rate) != 0) { /* need to add resampler*/ if (stream->read_resampler == NULL) stream->read_resampler=ms_filter_new(MS_RESAMPLE_ID); } ms_filter_call_method(stream->soundread,MS_FILTER_SET_NCHANNELS,&pt->channels); if (ms_filter_call_method(stream->soundwrite,MS_FILTER_SET_SAMPLE_RATE,&sample_rate) != 0) { /* need to add resampler*/ if (stream->write_resampler == NULL) stream->write_resampler=ms_filter_new(MS_RESAMPLE_ID); } ms_filter_call_method(stream->soundwrite,MS_FILTER_SET_NCHANNELS,&pt->channels); // Override feature if ( ((stream->features & AUDIO_STREAM_FEATURE_EC) && !use_ec) || has_builtin_ec ) stream->features &=~AUDIO_STREAM_FEATURE_EC; /*configure the echo canceller if required */ if ((stream->features & AUDIO_STREAM_FEATURE_EC) == 0 && stream->ec != NULL) { ms_filter_destroy(stream->ec); stream->ec=NULL; } if (stream->ec){ if (!stream->is_ec_delay_set){ int delay_ms=ms_snd_card_get_minimal_latency(captcard); if (delay_ms!=0){ ms_message("Setting echo canceller delay with value provided by soundcard: %i ms",delay_ms); ms_filter_call_method(stream->ec,MS_ECHO_CANCELLER_SET_DELAY,&delay_ms); } } ms_filter_call_method(stream->ec,MS_FILTER_SET_SAMPLE_RATE,&sample_rate); } if (stream->features & AUDIO_STREAM_FEATURE_MIXED_RECORDING){ int val=0; int pin=1; stream->recorder=ms_filter_new(MS_FILE_REC_ID); stream->recorder_mixer=ms_filter_new(MS_AUDIO_MIXER_ID); stream->recv_tee=ms_filter_new(MS_TEE_ID); stream->send_tee=ms_filter_new(MS_TEE_ID); ms_filter_call_method(stream->recorder_mixer,MS_AUDIO_MIXER_ENABLE_CONFERENCE_MODE,&val); ms_filter_call_method(stream->recorder_mixer,MS_FILTER_SET_SAMPLE_RATE,&sample_rate); ms_filter_call_method(stream->recorder_mixer,MS_FILTER_SET_NCHANNELS,&pt->channels); ms_filter_call_method(stream->recv_tee,MS_TEE_MUTE,&pin); ms_filter_call_method(stream->send_tee,MS_TEE_MUTE,&pin); ms_filter_call_method(stream->recorder,MS_FILTER_SET_SAMPLE_RATE,&sample_rate); ms_filter_call_method(stream->recorder,MS_FILTER_SET_NCHANNELS,&pt->channels); } /* give the encoder/decoder some parameters*/ ms_filter_call_method(stream->ms.encoder,MS_FILTER_SET_SAMPLE_RATE,&sample_rate); ms_message("Payload's bitrate is %i",pt->normal_bitrate); if (pt->normal_bitrate>0){ ms_message("Setting audio encoder network bitrate to %i",pt->normal_bitrate); ms_filter_call_method(stream->ms.encoder,MS_FILTER_SET_BITRATE,&pt->normal_bitrate); } ms_filter_call_method(stream->ms.encoder,MS_FILTER_SET_NCHANNELS,&pt->channels); ms_filter_call_method(stream->ms.decoder,MS_FILTER_SET_SAMPLE_RATE,&sample_rate); ms_filter_call_method(stream->ms.decoder,MS_FILTER_SET_NCHANNELS,&pt->channels); if (pt->send_fmtp!=NULL) { char value[16]={0}; int ptime; if (ms_filter_has_method(stream->ms.encoder,MS_AUDIO_ENCODER_SET_PTIME)){ if (fmtp_get_value(pt->send_fmtp,"ptime",value,sizeof(value)-1)){ ptime=atoi(value); ms_filter_call_method(stream->ms.encoder,MS_AUDIO_ENCODER_SET_PTIME,&ptime); } } ms_filter_call_method(stream->ms.encoder,MS_FILTER_ADD_FMTP, (void*)pt->send_fmtp); } if (pt->recv_fmtp!=NULL) ms_filter_call_method(stream->ms.decoder,MS_FILTER_ADD_FMTP,(void*)pt->recv_fmtp); /*create the equalizer*/ if ((stream->features & AUDIO_STREAM_FEATURE_EQUALIZER) != 0){ stream->equalizer=ms_filter_new(MS_EQUALIZER_ID); if(stream->equalizer) { tmp=stream->eq_active; ms_filter_call_method(stream->equalizer,MS_EQUALIZER_SET_ACTIVE,&tmp); } }else stream->equalizer=NULL; /*configure resampler if needed*/ ms_filter_call_method(stream->ms.rtpsend, MS_FILTER_SET_NCHANNELS, &pt->channels); ms_filter_call_method(stream->ms.rtprecv, MS_FILTER_SET_NCHANNELS, &pt->channels); if (stream->read_resampler){ audio_stream_configure_resampler(stream->read_resampler,stream->soundread,stream->ms.rtpsend); } if (stream->write_resampler){ audio_stream_configure_resampler(stream->write_resampler,stream->ms.rtprecv,stream->soundwrite); } if (stream->ms.use_rc){ stream->ms.rc=ms_audio_bitrate_controller_new(stream->ms.session,stream->ms.encoder,0); } /* Create PLC */ if ((stream->features & AUDIO_STREAM_FEATURE_PLC) != 0) { int decoder_have_plc = 0; if (ms_filter_has_method(stream->ms.decoder, MS_AUDIO_DECODER_HAVE_PLC)) { if (ms_filter_call_method(stream->ms.decoder, MS_AUDIO_DECODER_HAVE_PLC, &decoder_have_plc) != 0) { ms_warning("MS_AUDIO_DECODER_HAVE_PLC function error: enable default plc"); } } else { ms_warning("MS_DECODER_HAVE_PLC function not implemented by the decoder: enable default plc"); } if (decoder_have_plc == 0) { stream->plc = ms_filter_new(MS_GENERIC_PLC_ID); } if (stream->plc) { ms_filter_call_method(stream->plc, MS_FILTER_SET_NCHANNELS, &pt->channels); ms_filter_call_method(stream->plc, MS_FILTER_SET_SAMPLE_RATE, &sample_rate); } } else { stream->plc = NULL; } /* create ticker */ if (stream->ms.ticker==NULL) start_ticker(&stream->ms); else{ /*we were using the dummy preload graph, destroy it*/ if (stream->dummy) stop_preload_graph(stream); } /* and then connect all */ /* tip: draw yourself the picture if you don't understand */ /*sending graph*/ ms_connection_helper_start(&h); ms_connection_helper_link(&h,stream->soundread,-1,0); if (stream->read_resampler) ms_connection_helper_link(&h,stream->read_resampler,0,0); if (stream->ec) ms_connection_helper_link(&h,stream->ec,1,1); if (stream->volsend) ms_connection_helper_link(&h,stream->volsend,0,0); if (stream->dtmfgen_rtp) ms_connection_helper_link(&h,stream->dtmfgen_rtp,0,0); if (stream->send_tee) ms_connection_helper_link(&h,stream->send_tee,0,0); ms_connection_helper_link(&h,stream->ms.encoder,0,0); ms_connection_helper_link(&h,stream->ms.rtpsend,0,-1); /*receiving graph*/ ms_connection_helper_start(&h); ms_connection_helper_link(&h,stream->ms.rtprecv,-1,0); ms_connection_helper_link(&h,stream->ms.decoder,0,0); if (stream->plc) ms_connection_helper_link(&h,stream->plc,0,0); if (stream->dtmfgen) ms_connection_helper_link(&h,stream->dtmfgen,0,0); if (stream->volrecv) ms_connection_helper_link(&h,stream->volrecv,0,0); if (stream->recv_tee) ms_connection_helper_link(&h,stream->recv_tee,0,0); if (stream->equalizer) ms_connection_helper_link(&h,stream->equalizer,0,0); if (stream->ec) ms_connection_helper_link(&h,stream->ec,0,0); if (stream->write_resampler) ms_connection_helper_link(&h,stream->write_resampler,0,0); ms_connection_helper_link(&h,stream->soundwrite,0,-1); /*call recording part, attached to both outgoing and incoming graphs*/ if (stream->recorder){ ms_filter_link(stream->send_tee,1,stream->recorder_mixer,0); ms_filter_link(stream->recv_tee,1,stream->recorder_mixer,1); ms_filter_link(stream->recorder_mixer,0,stream->recorder,0); } /*to make sure all preprocess are done before befre processing audio*/ ms_ticker_attach_multiple(stream->ms.ticker ,stream->soundread ,stream->ms.rtprecv ,NULL); stream->ms.start_time=ms_time(NULL); stream->ms.is_beginning=TRUE; return 0; }
int v4lGrab (struct vdIn *vd ) { static int frame = 0; int len; int status; int count = 0; int size; int erreur = 0; int jpegsize = 0; int qualite = 1024; struct frame_t *headerframe; double timecourant =0; double temps = 0; timecourant = ms_time(); if (vd->grabMethod) { vd->vmmap.height = vd->hdrheight; vd->vmmap.width = vd->hdrwidth; vd->vmmap.format = vd->formatIn; if (ioctl (vd->fd, VIDIOCSYNC,&vd->vmmap.frame) < 0) { perror ("cvsync err\n"); erreur = -1; } /* Is there someone using the frame */ while((vd->framelock[vd->frame_cour] != 0) && vd->signalquit) usleep(500); pthread_mutex_lock (&vd->grabmutex); /* memcpy (vd->ptframe[vd->frame_cour]+ sizeof(struct frame_t), vd->pFramebuffer + vd->videombuf.offsets[vd->vmmap.frame] , vd->framesizeIn); jpegsize =jpeg_compress(vd->ptframe[vd->frame_cour]+ sizeof(struct frame_t),vd->framesizeIn, vd->pFramebuffer + vd->videombuf.offsets[vd->vmmap.frame] ,vd->hdrwidth, vd->hdrheight, qualite); */ temps = ms_time(); jpegsize= convertframe(vd->ptframe[vd->frame_cour]+ sizeof(struct frame_t), vd->pFramebuffer + vd->videombuf.offsets[vd->vmmap.frame], vd->hdrwidth,vd->hdrheight,vd->formatIn,qualite); headerframe=(struct frame_t*)vd->ptframe[vd->frame_cour]; snprintf(headerframe->header,5,"%s","SPCA"); headerframe->seqtimes = ms_time(); headerframe->deltatimes=(int)(headerframe->seqtimes-timecourant); headerframe->w = vd->hdrwidth; headerframe->h = vd->hdrheight; headerframe->size = (( jpegsize < 0)?0:jpegsize); headerframe->format = vd->formatIn; headerframe->nbframe = frame++; // printf("compress frame %d times %f\n",frame, headerframe->seqtimes-temps); pthread_mutex_unlock (&vd->grabmutex); /************************************/ if ((ioctl (vd->fd, VIDIOCMCAPTURE, &(vd->vmmap))) < 0) { perror ("cmcapture"); printf (">>cmcapture err %d\n", status); erreur = -1; } vd->vmmap.frame = (vd->vmmap.frame + 1) % vd->videombuf.frames; vd->frame_cour = (vd->frame_cour +1) % OUTFRMNUMB; //printf("frame nb %d\n",vd->vmmap.frame); } else { /* read method */ size = vd->framesizeIn; len = read (vd->fd, vd->pFramebuffer, size); if (len <= 0 ) { printf ("v4l read error\n"); printf ("len %d asked %d \n", len, size); return 0; } /* Is there someone using the frame */ while((vd->framelock[vd->frame_cour] != 0)&& vd->signalquit) usleep(1000); pthread_mutex_lock (&vd->grabmutex); /* memcpy (vd->ptframe[vd->frame_cour]+ sizeof(struct frame_t), vd->pFramebuffer, vd->framesizeIn); jpegsize =jpeg_compress(vd->ptframe[vd->frame_cour]+ sizeof(struct frame_t),len, vd->pFramebuffer, vd->hdrwidth, vd->hdrheight, qualite); */ temps = ms_time(); jpegsize= convertframe(vd->ptframe[vd->frame_cour]+ sizeof(struct frame_t), vd->pFramebuffer , vd->hdrwidth,vd->hdrheight,vd->formatIn,qualite); headerframe=(struct frame_t*)vd->ptframe[vd->frame_cour]; snprintf(headerframe->header,5,"%s","SPCA"); headerframe->seqtimes = ms_time(); headerframe->deltatimes=(int)(headerframe->seqtimes-timecourant); headerframe->w = vd->hdrwidth; headerframe->h = vd->hdrheight; headerframe->size = (( jpegsize < 0)?0:jpegsize);; headerframe->format = vd->formatIn; headerframe->nbframe = frame++; // printf("compress frame %d times %f\n",frame, headerframe->seqtimes-temps); vd->frame_cour = (vd->frame_cour +1) % OUTFRMNUMB; pthread_mutex_unlock (&vd->grabmutex); /************************************/ } return erreur; }