Esempio n. 1
0
static void find_filters(MSList **filters, MSFilter *f ) {
    int i,found;
    MSQueue *link;
    if (f==NULL) ms_fatal("Bad graph.");
    /*ms_message("seeing %s, seen=%i",f->desc->name,f->seen);*/
    if (f->seen) {
        return;
    }
    f->seen=TRUE;
    *filters=ms_list_append(*filters,f);
    /* go upstream */
    for(i=0; i<f->desc->ninputs; i++) {
        link=f->inputs[i];
        if (link!=NULL) find_filters(filters,link->prev.filter);
    }
    /* go downstream */
    for(i=0,found=0; i<f->desc->noutputs; i++) {
        link=f->outputs[i];
        if (link!=NULL) {
            found++;
            find_filters(filters,link->next.filter);
        }
    }
    if (f->desc->noutputs>=1 && found==0) {
        ms_fatal("Bad graph: filter %s has %i outputs, none is connected.",f->desc->name,f->desc->noutputs);
    }
}
	AndroidReaderContext():frame(0),fps(5){
		ms_message("Creating AndroidReaderContext for Android VIDEO capture filter");

		ms_mutex_init(&mutex,NULL);

		JNIEnv *env = ms_get_jni_env();
		managerClass = env->FindClass("org/linphone/core/AndroidCameraRecordManager");
		managerClass = (jclass) env->NewGlobalRef(managerClass);
		if (managerClass == 0) {
			ms_fatal("cannot register android video record manager class\n");
			return;
		}

		jmethodID getInstanceMethod = env->GetStaticMethodID(managerClass,"getInstance", "()Lorg/linphone/core/AndroidCameraRecordManager;");
		if (getInstanceMethod == 0) {
			ms_fatal("cannot find  singleton getter method\n");
			return;
		}

		// Get singleton AndroidCameraRecordManager for the default camera
		recorder = env->CallStaticObjectMethod(managerClass, getInstanceMethod);
		if (recorder == 0) {
			ms_fatal("cannot instantiate  %s\n", recorder);
			return;
		}

		recorder = env->NewGlobalRef(recorder);
		if (recorder == 0) {
			ms_fatal("cannot register  %s\n", recorder);
			return;
		}

	};
Esempio n. 3
0
static void dec_open(DecData *d){
	AVCodec *codec;
	int error;
	codec=avcodec_find_decoder(CODEC_ID_H264);
	if (codec==NULL) ms_fatal("Could not find H264 decoder in ffmpeg.");
	avcodec_get_context_defaults(&d->av_context);
	error=avcodec_open(&d->av_context,codec);
	if (error!=0){
		ms_fatal("avcodec_open() failed.");
	}
}
Esempio n. 4
0
int ms_ticker_attach(MSTicker *ticker,MSFilter *f)
{
	MSList *sources=NULL;
	MSList *filters=NULL;
	MSList *it;
	
	if (f->ticker!=NULL) {
		ms_message("Filter %s is already being scheduled; nothing to do.",f->desc->name);
		return 0;
	}

	find_filters(&filters,f);
	sources=get_sources(filters);
	if (sources==NULL){
		ms_fatal("No sources found around filter %s",f->desc->name);
		ms_list_free(filters);
		return -1;
	}
	/*run preprocess on each filter: */
	for(it=filters;it!=NULL;it=it->next)
		ms_filter_preprocess((MSFilter*)it->data,ticker);
	ms_mutex_lock(&ticker->lock);
	ticker->execution_list=ms_list_concat(ticker->execution_list,sources);
	ms_mutex_unlock(&ticker->lock);
	ms_list_free(filters);
	return 0;
}
Esempio n. 5
0
LinphoneCore* configure_lc_from(LinphoneCoreVTable* v_table, const char* path, const char* file, void* user_data) {
	LinphoneCore* lc;
	LpConfig* config = NULL;
	char *filepath         = NULL;
	char *ringpath         = NULL;
	char *ringbackpath     = NULL;
	char *rootcapath       = NULL;
	char *dnsuserhostspath = NULL;
	char *nowebcampath     = NULL;

	if (path==NULL) path=".";

	if (file){
		filepath = ms_strdup_printf("%s/%s", path, file);
		if (ortp_file_exist(filepath) != 0) {
			ms_fatal("Could not find file %s in path %s, did you configured resources directory correctly?", file, path);
		}
		config = lp_config_new_with_factory(NULL,filepath);
	}


	// setup dynamic-path assets
	ringpath         = ms_strdup_printf("%s/sounds/oldphone.wav",path);
	ringbackpath     = ms_strdup_printf("%s/sounds/ringback.wav", path);
	nowebcampath     = ms_strdup_printf("%s/images/nowebcamCIF.jpg", path);
	rootcapath       = ms_strdup_printf("%s/certificates/cn/cafile.pem", path);
	dnsuserhostspath = ms_strdup_printf( "%s/%s", path, userhostsfile);


	if( config != NULL ) {
		lp_config_set_string(config, "sound", "remote_ring", ringbackpath);
		lp_config_set_string(config, "sound", "local_ring" , ringpath);
		lp_config_set_string(config, "sip",   "root_ca"    , rootcapath);
		lc = linphone_core_new_with_config(v_table, config, user_data);
	} else {
		lc = linphone_core_new(v_table,NULL,(filepath!=NULL&&filepath[0]!='\0') ? filepath : NULL, user_data);

		linphone_core_set_ring(lc, ringpath);
		linphone_core_set_ringback(lc, ringbackpath);
		linphone_core_set_root_ca(lc,rootcapath);
	}

	sal_enable_test_features(lc->sal,TRUE);
	sal_set_dns_user_hosts_file(lc->sal, dnsuserhostspath);
	linphone_core_set_static_picture(lc,nowebcampath);

	linphone_core_enable_ipv6(lc, liblinphone_tester_ipv6_enabled);

	ms_free(ringpath);
	ms_free(ringbackpath);
	ms_free(nowebcampath);
	ms_free(rootcapath);
	ms_free(dnsuserhostspath);

	if( filepath ) ms_free(filepath);

	if( config ) lp_config_unref(config);

	return lc;
}
Esempio n. 6
0
static uint64_t get_cur_time(void *unused){
#if defined(_WIN32_WCE)
	DWORD timemillis = GetTickCount();
	return timemillis;
#elif defined(WIN32)
	return timeGetTime() ;
#elif defined(__MACH__) && defined(__GNUC__) && (__GNUC__ >= 3)
	struct timeval tv;
	gettimeofday(&tv, NULL);
	return (tv.tv_sec*1000LL) + (tv.tv_usec/1000LL);
#elif defined(__MACH__)
	struct timespec ts;
	struct timeb time_val;
	
	ftime (&time_val);
	ts.tv_sec = time_val.time;
	ts.tv_nsec = time_val.millitm * 1000000;
	return (ts.tv_sec*1000LL) + (ts.tv_nsec/1000000LL);
#else
	struct timespec ts;
	if (clock_gettime(CLOCK_REALTIME,&ts)<0){
		ms_fatal("clock_gettime() doesn't work: %s",strerror(errno));
	}
	return (ts.tv_sec*1000LL) + (ts.tv_nsec/1000000LL);
#endif
}
Esempio n. 7
0
    msandroid_sound_write_data() :audio_track_class(0),audio_track(0),write_chunk_size(0),writtenBytes(0),last_sample_date(0) {
        bufferizer = ms_bufferizer_new();
        ms_cond_init(&cond,0);
        JNIEnv *jni_env = NULL;
        JavaVM *jvm = ms_get_jvm();
        if (jvm->AttachCurrentThread(&jni_env, NULL)!=0) {
            ms_fatal("AttachCurrentThread() failed !");
            return;
        }

        //JNIEnv *jni_env = ms_get_jni_env();
        jclass temp_class = 0;
        temp_class = jni_env->FindClass("android/media/AudioTrack");
        audio_track_class = (jclass)jni_env->NewGlobalRef(temp_class);
        if (audio_track_class == 0) {
            ms_error("cannot find  android/media/AudioTrack\n");
            return;
        }
        jmethodID hwrate_id = jni_env->GetStaticMethodID(audio_track_class,"getNativeOutputSampleRate", "(I)I");
        if (hwrate_id == 0) {
            ms_error("cannot find  int AudioRecord.getNativeOutputSampleRate(int streamType)");
            return;
        }
        rate = jni_env->CallStaticIntMethod(audio_track_class,hwrate_id,0 /*STREAM_VOICE_CALL*/);
        ms_message("Hardware sample rate is %i",rate);
        jvm->DetachCurrentThread();
    };
Esempio n. 8
0
int ms_ticker_attach_multiple(MSTicker *ticker,MSFilter *f,...)
{
	MSList *sources=NULL;
	MSList *filters=NULL;
	MSList *it;
	MSList *total_sources=NULL;
	va_list l;

	va_start(l,f);

	do{
		if (f->ticker==NULL) {
			filters=ms_filter_find_neighbours(f);
			sources=get_sources(filters);
			if (sources==NULL){
				ms_fatal("No sources found around filter %s",f->desc->name);
				ms_list_free(filters);
				break;
			}
			/*run preprocess on each filter: */
			for(it=filters;it!=NULL;it=it->next)
				ms_filter_preprocess((MSFilter*)it->data,ticker);
			ms_list_free(filters);
			total_sources=ms_list_concat(total_sources,sources);			
		}else ms_message("Filter %s is already being scheduled; nothing to do.",f->desc->name);
	}while ((f=va_arg(l,MSFilter*))!=NULL);
	va_end(l);
	if (total_sources){
		ms_mutex_lock(&ticker->lock);
		ticker->execution_list=ms_list_concat(ticker->execution_list,total_sources);
		ms_mutex_unlock(&ticker->lock);
	}
	return 0;
}
Esempio n. 9
0
static void sound_read_preprocess(MSFilter *f) {
    msandroid_sound_read_data *d=(msandroid_sound_read_data*)f->data;
    ms_debug("andsnd_read_preprocess");
    if (!d->started)
        sound_read_setup(f);
    ms_ticker_set_time_func(f->ticker,(uint64_t (*)(void*))ms_ticker_synchronizer_get_corrected_time, d->ticker_synchronizer);

    if (d->builtin_aec && d->audio_record) {
        //JNIEnv *env=ms_get_jni_env();

        JNIEnv *env = NULL;
        JavaVM *jvm = ms_get_jvm();
        if (jvm->AttachCurrentThread(&env, NULL)!=0) {
            ms_fatal("AttachCurrentThread() failed !");
            return;
        }
        jmethodID getsession_id=0;
        int sessionId=-1;
        getsession_id = env->GetMethodID(d->audio_record_class,"getAudioSessionId", "()I");
        if(getsession_id==0) {
            ms_error("cannot find AudioRecord.getAudioSessionId() method");
            jvm->DetachCurrentThread();
            return;
        }
        sessionId = env->CallIntMethod(d->audio_record,getsession_id);
        ms_message("AudioRecord.getAudioSessionId() returned %i", sessionId);
        if (sessionId==-1) {
            jvm->DetachCurrentThread();
            return;
        }
        d->aec = enable_hardware_echo_canceller(env, sessionId);
        jvm->DetachCurrentThread();
    }
}
Esempio n. 10
0
MSScalerContext *ms_scaler_create_context(int src_w, int src_h, MSPixFmt src_fmt,
                                          int dst_w, int dst_h, MSPixFmt dst_fmt, int flags){
	if (scaler_impl)
		return scaler_impl->create_context(src_w,src_h,src_fmt,dst_w,dst_h,dst_fmt, flags);
	ms_fatal("No scaler implementation built-in, please supply one with ms_video_set_scaler_impl ()");
	return NULL;
}
Esempio n. 11
0
int ms_picture_init_from_mblk_with_size(MSPicture *buf, mblk_t *m, MSPixFmt fmt, int w, int h){
	if (m->b_cont!=NULL) m=m->b_cont; /*skip potential video header */
	switch(fmt){
		case MS_YUV420P:
			return ms_yuv_buf_init_from_mblk_with_size(buf,m,w,h);
		break;
		case MS_YUY2:
		case MS_YUYV:
		case MS_UYVY:
			memset(buf,0,sizeof(*buf));
			buf->w=w;
			buf->h=h;
			buf->planes[0]=m->b_rptr;
			buf->strides[0]=w*2;
		break;
		case MS_RGB24:
		case MS_RGB24_REV:
			memset(buf,0,sizeof(*buf));
			buf->w=w;
			buf->h=h;
			buf->planes[0]=m->b_rptr;
			buf->strides[0]=w*3;
		break;
		default:
			ms_fatal("FIXME: unsupported format %i",fmt);
			return -1;
	}
	return 0;
}
Esempio n. 12
0
void ms_filter_register(MSFilterDesc *desc) {
    if (desc->id==MS_FILTER_NOT_SET_ID) {
        ms_fatal("MSFilterId for %s not set !",desc->name);
    }
    /*lastly registered encoder/decoders may replace older ones*/
    desc_list=ms_list_prepend(desc_list,desc);
}
Esempio n. 13
0
void msandroid_sound_write_preprocess(MSFilter *f) {
    ms_debug("andsnd_write_preprocess");
    msandroid_sound_write_data *d=(msandroid_sound_write_data*)f->data;
    jmethodID constructor_id=0;

    int rc;
    jmethodID min_buff_size_id;

    //JNIEnv *jni_env = ms_get_jni_env();

    JNIEnv *jni_env = NULL;
    JavaVM *jvm = ms_get_jvm();
    if (jvm->AttachCurrentThread(&jni_env, NULL)!=0) {
        ms_fatal("AttachCurrentThread() failed !");
        return;
    }

    if (d->audio_track_class == 0) {
        jvm->DetachCurrentThread();
        return;
    }

    constructor_id = jni_env->GetMethodID(d->audio_track_class,"<init>", "(IIIIII)V");
    if (constructor_id == 0) {
        ms_error("cannot find  AudioTrack(int streamType, int sampleRateInHz, \
		int channelConfig, int audioFormat, int bufferSizeInBytes, int mode)");
        jvm->DetachCurrentThread();
        return;
    }
Esempio n. 14
0
int ms_ticker_detach(MSTicker *ticker,MSFilter *f){
	MSList *sources=NULL;
	MSList *filters=NULL;
	MSList *it;

	if (f->ticker==NULL) {
		ms_message("Filter %s is not scheduled; nothing to do.",f->desc->name);
		return 0;
	}

	ms_mutex_lock(&ticker->lock);

	filters=ms_filter_find_neighbours(f);
	sources=get_sources(filters);
	if (sources==NULL){
		ms_fatal("No sources found around filter %s",f->desc->name);
		ms_list_free(filters);
		ms_mutex_unlock(&ticker->lock);
		return -1;
	}

	for(it=sources;it!=NULL;it=ms_list_next(it)){
		ticker->execution_list=ms_list_remove(ticker->execution_list,it->data);
	}
	ms_mutex_unlock(&ticker->lock);
	ms_list_for_each(filters,(void (*)(void*))ms_filter_postprocess);
	ms_list_free(filters);
	ms_list_free(sources);
	return 0;
}
Esempio n. 15
0
static void android_display_process(MSFilter *f){
	AndroidDisplay *ad=(AndroidDisplay*)f->data;
	MSPicture pic;
	mblk_t *m;

	ms_filter_lock(f);
	if (ad->jbitmap!=0 && !ad->orientation_change_pending){
		if ((m=ms_queue_peek_last(f->inputs[0]))!=NULL){
			if (ms_yuv_buf_init_from_mblk (&pic,m)==0){
				MSVideoSize wsize={ad->bmpinfo.width,ad->bmpinfo.height};
				MSVideoSize vsize={pic.w, pic.h};
				MSRect vrect;
				MSPicture dest={0};
				void *pixels=NULL;
				JNIEnv *jenv=ms_get_jni_env();

				if (!ms_video_size_equal(vsize,ad->vsize)){
					ms_message("Video to display has size %ix%i",vsize.width,vsize.height);
					ad->vsize=vsize;
					if (ad->sws){
						ms_scaler_context_free(ad->sws);
						ad->sws=NULL;
					}
					/*select_orientation(ad,wsize,vsize);*/
				}

				ms_layout_compute(wsize,vsize,vsize,-1,0,&vrect, NULL);

				if (ad->sws==NULL){
					ad->sws=ms_scaler_create_context (vsize.width,vsize.height,MS_YUV420P,
					                           vrect.w,vrect.h,MS_RGB565,MS_SCALER_METHOD_BILINEAR);
					if (ad->sws==NULL){
						ms_fatal("Could not obtain sws context !");
					}
				}

				if (sym_AndroidBitmap_lockPixels(jenv,ad->jbitmap,&pixels)==0){

					if (pixels!=NULL){
						dest.planes[0]=(uint8_t*)pixels+(vrect.y*ad->bmpinfo.stride)+(vrect.x*2);
						dest.strides[0]=ad->bmpinfo.stride;
						ms_scaler_process(ad->sws,pic.planes,pic.strides,dest.planes,dest.strides);
					}else ms_warning("Pixels==NULL in android bitmap !");

					sym_AndroidBitmap_unlockPixels(jenv,ad->jbitmap);
				}else{
					ms_error("AndroidBitmap_lockPixels() failed !");
				}

				(*jenv)->CallVoidMethod(jenv,ad->android_video_window,ad->update_id);

			}
		}
	}
	ms_filter_unlock(f);

	ms_queue_flush(f->inputs[0]);
	ms_queue_flush(f->inputs[1]);
}
Esempio n. 16
0
static MSCPoint just_after(MSFilter *f){
	MSQueue *q;
	MSCPoint pnull={0};
	if ((q=f->outputs[0])!=NULL){
		return q->next;
	}
	ms_fatal("No filter after %s",f->desc->name);
	return pnull;
}
Esempio n. 17
0
int liblinphone_tester_setup() {
	if (manager_count != 0) {
		// crash in some linphone core have not been destroyed because if we continue
		// it will crash in CUnit AND we should NEVER keep a manager alive
		ms_fatal("%d linphone core manager still alive!", manager_count);
		return 1;
	}
	return 0;
}
Esempio n. 18
0
/*return null, destroy op if ref count =0*/
void* sal_op_unref(SalOp* op) {
	op->ref--;
	if (op->ref==0) {
		sal_op_release_impl(op);
	}else if (op->ref<0){
		ms_fatal("SalOp [%p]: too many unrefs.",op);
	}
	return NULL;
}
Esempio n. 19
0
void ms_factory_register_filter(MSFactory* factory, MSFilterDesc* desc ) {
	if (desc->id==MS_FILTER_NOT_SET_ID){
		ms_fatal("MSFilterId for %s not set !",desc->name);
	}
	desc->flags|=MS_FILTER_IS_ENABLED; /*by default a registered filter is enabled*/

	/*lastly registered encoder/decoders may replace older ones*/
	factory->desc_list=ms_list_prepend(factory->desc_list,desc);
}
Esempio n. 20
0
static MSCPoint just_before(MSFilter *f){
	MSQueue *q;
	MSCPoint pnull={0};
	if ((q=f->inputs[0])!=NULL){
		return q->prev;
	}
	ms_fatal("No filter before %s",f->desc->name);
	return pnull;
}
Esempio n. 21
0
static int find_free_pin(MSFilter *mixer){
	int i;
	for(i=0;i<mixer->desc->ninputs;++i){
		if (mixer->inputs[i]==NULL){
			return i;
		}
	}
	ms_fatal("No more free pin in mixer filter");
	return -1;
}
Esempio n. 22
0
JNIEnv *ms_get_jni_env(void){
	JNIEnv *env=NULL;
	if (ms2_vm==NULL){
		ms_fatal("Calling ms_get_jni_env() while no jvm has been set using ms_set_jvm().");
	}else{
#ifndef _WIN32
		env=(JNIEnv*)pthread_getspecific(jnienv_key);
		if (env==NULL){
			if ((*ms2_vm)->AttachCurrentThread(ms2_vm,&env,NULL)!=0){
				ms_fatal("AttachCurrentThread() failed !");
				return NULL;
			}
			pthread_setspecific(jnienv_key,env);
		}
#else
		ms_fatal("ms_get_jni_env() not implemented on windows.");
#endif
	}
	return env;
}
Esempio n. 23
0
const char* sal_transport_to_string(SalTransport transport) {
	switch (transport) {
		case SalTransportUDP:return "udp";
		case SalTransportTCP: return "tcp";
		case SalTransportTLS:return "tls";
		case SalTransportDTLS:return "dtls";
		default: {
			ms_fatal("Unexpected transport [%i]",transport);
			return NULL;
		}    
	}
}
Esempio n. 24
0
int ms_filter_call_method(MSFilter *f, unsigned int id, void *arg){
	MSFilterMethod *methods=f->desc->methods;
	int i;
	unsigned int magic=MS_FILTER_METHOD_GET_FID(id);
	if (magic!=MS_FILTER_BASE_ID && magic!=f->desc->id) {
		ms_fatal("Method type checking failed when calling %u on filter %s",id,f->desc->name);
		return -1;
	}
	for(i=0;methods!=NULL && methods[i].method!=NULL; i++){
		unsigned int mm=MS_FILTER_METHOD_GET_FID(methods[i].id);
		if (mm!=f->desc->id && mm!=MS_FILTER_BASE_ID) {
			ms_fatal("Bad method definition on filter %s. fid=%u , mm=%u",f->desc->name,f->desc->id,mm);
			return -1;
		}
		if (methods[i].id==id){
			return methods[i].method(f,arg);
		}
	}
	if (magic!=MS_FILTER_BASE_ID) ms_error("no such method on filter %s",f->desc->name);
	return -1;
}
Esempio n. 25
0
void kiss_fft_stride(kiss_fft_cfg st,const kiss_fft_cpx *fin,kiss_fft_cpx *fout,int in_stride)
{
    if (fin == fout) 
    {
       ms_fatal("In-place FFT not supported");
       /*CHECKBUF(tmpbuf,ntmpbuf,st->nfft);
       kf_work(tmpbuf,fin,1,in_stride, st->factors,st);
       SPEEX_MOVE(fout,tmpbuf,st->nfft);*/
    } else {
       kf_shuffle( fout, fin, 1,in_stride, st->factors,st);
       kf_work( fout, fin, 1,in_stride, st->factors,st, 1, in_stride, 1);
    }
}
Esempio n. 26
0
static int parse_event(uint8_t *rptr, MSFilter **f, unsigned int *id, void **data, int *argsize){
	int evsize;
	int header_size = sizeof(MSEventHeader);

	if (((intptr_t)rptr % 4) != 0) ms_fatal("Unaligned access");
	*f = ((MSEventHeader *)rptr)->filter;
	*id = ((MSEventHeader *)rptr)->ev_id;

	*argsize = (*id) & 0xff;
	evsize = round_size((*argsize)) + header_size;
	*data = rptr + header_size;
	return evsize;
}
static void start_adaptive_stream(StreamType type, stream_manager_t ** pmarielle, stream_manager_t ** pmargaux,
	int payload, int initial_bitrate, int target_bw, float loss_rate, int latency, float dup_ratio) {
	OrtpNetworkSimulatorParams params={0};
	params.enabled=TRUE;
	params.loss_rate=loss_rate;
	params.max_bandwidth=target_bw;
	params.latency=latency;
	int pause_time=0;
	MediaStream *marielle_ms,*margaux_ms;
#if VIDEO_ENABLED
	MSWebCam * marielle_webcam=ms_web_cam_manager_get_default_cam (ms_web_cam_manager_get());
#endif
	stream_manager_t *marielle=*pmarielle=stream_manager_new(type);
	stream_manager_t *margaux=*pmargaux=stream_manager_new(type);

	if (type == AudioStreamType){
		marielle_ms=&marielle->audio_stream->ms;
		margaux_ms=&margaux->audio_stream->ms;
	}else{
		marielle_ms=&marielle->video_stream->ms;
		margaux_ms=&margaux->video_stream->ms;
	}

	/* Disable avpf. */
	PayloadType* pt = rtp_profile_get_payload(&rtp_profile, VP8_PAYLOAD_TYPE);
	CU_ASSERT_PTR_NOT_NULL_FATAL(pt);
	payload_type_unset_flag(pt, PAYLOAD_TYPE_RTCP_FEEDBACK_ENABLED);


	media_stream_enable_adaptive_bitrate_control(marielle_ms,TRUE);
	rtp_session_set_duplication_ratio(marielle_ms->sessions.rtp_session, dup_ratio);

	if (marielle->type == AudioStreamType){
		audio_manager_start(marielle,payload,margaux->local_rtp,initial_bitrate,HELLO_16K_1S_FILE,NULL);
		ms_filter_call_method(marielle->audio_stream->soundread,MS_FILE_PLAYER_LOOP,&pause_time);

		audio_manager_start(margaux,payload,marielle->local_rtp,0,NULL,RECORDED_16K_1S_FILE);
	}else{
#if VIDEO_ENABLED
		video_manager_start(marielle,payload,margaux->local_rtp,0,marielle_webcam);
		video_stream_set_direction(margaux->video_stream,VideoStreamRecvOnly);

		video_manager_start(margaux,payload,marielle->local_rtp,0,NULL);
#else
		ms_fatal("Unsupported stream type [%s]",ms_stream_type_to_string(marielle->type));
#endif
	}

	rtp_session_enable_network_simulation(margaux_ms->sessions.rtp_session,&params);
}
Esempio n. 28
0
static void* msandroid_read_cb(msandroid_sound_read_data* d) {
    mblk_t *m;
    int nread;
    jmethodID read_id=0;
    jmethodID record_id=0;

    set_high_prio();

    //JNIEnv *jni_env = ms_get_jni_env();
    JNIEnv *jni_env = NULL;
    JavaVM *jvm = ms_get_jvm();
    if (jvm->AttachCurrentThread(&jni_env, NULL)!=0) {
        ms_fatal("AttachCurrentThread() failed !");
        goto end;
    }
    record_id = jni_env->GetMethodID(d->audio_record_class,"startRecording", "()V");
    if(record_id==0) {
        ms_error("cannot find AudioRecord.startRecording() method");
        goto end;
    }
    //start recording
    ms_message("Start recording");
    jni_env->CallVoidMethod(d->audio_record,record_id);

    // int read (byte[] audioData, int offsetInBytes, int sizeInBytes)
    read_id = jni_env->GetMethodID(d->audio_record_class,"read", "([BII)I");
    if(read_id==0) {
        ms_error("cannot find AudioRecord.read() method");
        goto end;
    }

    while (d->started && (nread=jni_env->CallIntMethod(d->audio_record,read_id,d->read_buff,0, d->read_chunk_size))>0) {
        m = allocb(nread,0);
        jni_env->GetByteArrayRegion(d->read_buff, 0,nread, (jbyte*)m->b_wptr);
        //ms_error("%i octets read",nread);
        m->b_wptr += nread;
        d->read_samples+=nread/(2*d->nchannels);
        compute_timespec(d);
        ms_mutex_lock(&d->mutex);
        ms_bufferizer_put (&d->rb,m);
        ms_mutex_unlock(&d->mutex);
    };

    goto end;
end: {
        jvm->DetachCurrentThread();
        ms_thread_exit(NULL);
        return 0;
    }
}
Esempio n. 29
0
static void sound_read_postprocess(MSFilter *f) {
    msandroid_sound_read_data *d=(msandroid_sound_read_data*)f->data;
    jmethodID stop_id=0;
    jmethodID release_id=0;

    //JNIEnv *jni_env = ms_get_jni_env();
    JNIEnv *jni_env = NULL;
    JavaVM *jvm = ms_get_jvm();
    if (jvm->AttachCurrentThread(&jni_env, NULL)!=0) {
        ms_fatal("AttachCurrentThread() failed !");
        return;
    }
    ms_ticker_set_time_func(f->ticker,NULL,NULL);
    d->read_samples=0;

    //stop recording
    stop_id = jni_env->GetMethodID(d->audio_record_class,"stop", "()V");
    if(stop_id==0) {
        ms_error("cannot find AudioRecord.stop() method");
        goto end;
    }

    if (d->aec) {
        delete_hardware_echo_canceller(jni_env, d->aec);
        d->aec = NULL;
    }

    d->started = false;
    if (d->thread_id !=0) ms_thread_join(d->thread_id,0);

    if (d->audio_record) {
        jni_env->CallVoidMethod(d->audio_record,stop_id);

        //release recorder
        release_id = jni_env->GetMethodID(d->audio_record_class,"release", "()V");
        if(release_id==0) {
            ms_error("cannot find AudioRecord.release() method");
            goto end;
        }
        jni_env->CallVoidMethod(d->audio_record,release_id);
    }
    goto end;
end: {
        if (d->audio_record) jni_env->DeleteGlobalRef(d->audio_record);
        jni_env->DeleteGlobalRef(d->audio_record_class);
        if (d->read_buff) jni_env->DeleteGlobalRef(d->read_buff);
        jvm->DetachCurrentThread();
        return;
    }
}
Esempio n. 30
0
static mblk_t *crop_or_pad(V4lState *s, mblk_t *pic){
	int size=s->vsize.width*s->vsize.height;
	mblk_t *newpic;
	if (s->pix_fmt==MS_YUV420P) size=size*3/2;
	else if (s->pix_fmt==MS_YUYV) size=size*2;
	else if (s->pix_fmt==MS_UYVY) size=size*2;
	else if (s->pix_fmt==MS_RGB24) size=size*3;
	else ms_fatal("crop_or_pad: unsupported pixel format.");
	newpic=allocb(size,0);
	memset(newpic->b_wptr,0,size);
	pic_copy(newpic->b_wptr, s->vsize.width, s->vsize.height,
		pic->b_rptr,s->got_vsize.width,s->got_vsize.height,s->pix_fmt);
	newpic->b_wptr+=size;
	return newpic;
}