/* API: Destroy factory */
static pj_status_t android_destroy(pjmedia_aud_dev_factory *f)
{
    struct android_aud_factory *pa = (struct android_aud_factory*)f;
    pj_pool_t *pool;
    int err;

    PJ_LOG(4,(THIS_FILE, "Android sound library shutting down.."));

    JNIEnv *jni_env = 0;
    ATTACH_JVM(jni_env);
    jmethodID release_method = 0;

    if(g_audio_record)
    {
        //release recording - we assume the release method exists
        release_method = jni_env->GetMethodID(g_record_class,"release", "()V");
        jni_env->CallVoidMethod(g_audio_record, release_method);

        jni_env->DeleteGlobalRef(g_audio_record);
        jni_env->DeleteGlobalRef(g_record_class);
        g_audio_record = NULL;
        g_record_class = NULL;
        PJ_LOG(3,(THIS_FILE, "zzc ---> Released recorder"));
    }
    else
    {
        PJ_LOG(2,(THIS_FILE, "zzc Nothing to release !!! rec"));
    }

    pool = pa->pool;
    pa->pool = NULL;
    pj_pool_release(pool);

    return PJ_SUCCESS;
}
Exemple #2
0
PJ_DECL(pj_status_t) csipsimple_destroy(unsigned flags) {
	destroy_ringback_tone();

#if PJMEDIA_HAS_VIDEO
	unsigned i;
	for (i = 0; i < css_var.extra_vid_codecs_cnt; i++) {
		dynamic_factory *codec = &css_var.extra_vid_codecs_destroy[i];
		pj_status_t (*destroy_factory)() = get_library_factory(codec);
		if(destroy_factory != NULL){
			pj_status_t status = destroy_factory();
			if(status != PJ_SUCCESS) {
				PJ_LOG(2, (THIS_FILE,"Error loading dynamic codec plugin"));
			}
    	}
	}
#endif

	if (css_var.pool) {
		pj_pool_release(css_var.pool);
		css_var.pool = NULL;
	}
	if(css_var.context){
		JNIEnv *jni_env = 0;
		ATTACH_JVM(jni_env);
		(*jni_env)->DeleteGlobalRef(jni_env, css_var.context);
		DETACH_JVM(jni_env);
	}
	return (pj_status_t) pjsua_destroy2(flags);
}
/* API: stop stream. */
static pj_status_t strm_stop(pjmedia_aud_stream *s)
{
	struct android_aud_stream *stream = (struct android_aud_stream*)s;
	int i;
	//We assume that all jni calls are safe ... that's acceptable
	if(stream->quit_flag == 0){
		PJ_LOG(3, (THIS_FILE, "Stopping stream"));
	}else{
		PJ_LOG(2, (THIS_FILE, "Already stopped.... nothing to do here"));
		return PJ_SUCCESS;
	}

	JNIEnv *jni_env = 0;
	ATTACH_JVM(jni_env);
	jmethodID method_id;

	stream->quit_flag = 1;

	/*
	if (result != 0) {
		PJ_LOG(1, (THIS_FILE, "Not able to attach the jvm"));
		return PJ_ENOMEM;
	}
	*/

	if(stream->record){
		//stop recording
		method_id = jni_env->GetMethodID(stream->record_class, "stop", "()V");
		jni_env->CallVoidMethod(stream->record, method_id);

		if(stream->rec_thread){
			pj_thread_join(stream->rec_thread);
			pj_thread_destroy(stream->rec_thread);
			stream->rec_thread = NULL;
		}
	}


	if(stream->track){
		method_id = jni_env->GetMethodID(stream->track_class,"flush", "()V");
		jni_env->CallVoidMethod(stream->track, method_id);
		method_id = jni_env->GetMethodID(stream->track_class, "stop", "()V");
		jni_env->CallVoidMethod(stream->track, method_id);

		if(stream->play_thread){
			pj_thread_join(stream->play_thread);
			pj_thread_destroy(stream->play_thread);
			stream->play_thread = NULL;
		}
	}



	PJ_LOG(4,(THIS_FILE, "Stopping Done"));

	DETACH_JVM(jni_env);
	return PJ_SUCCESS;

}
/* API: destroy stream. */
static pj_status_t strm_destroy(pjmedia_aud_stream *s)
{

	PJ_LOG(4,(THIS_FILE, "Destroying stream"));

	//Stop the stream
	strm_stop(s);

	struct android_aud_stream *stream = (struct android_aud_stream*)s;
	JNIEnv *jni_env = 0;
	ATTACH_JVM(jni_env);
	jmethodID release_method=0;

	if(stream->record){
		//release recording - we assume the release method exists
		release_method = jni_env->GetMethodID(stream->record_class,"release", "()V");
		jni_env->CallVoidMethod(stream->record, release_method);

		jni_env->DeleteGlobalRef(stream->record);
		jni_env->DeleteGlobalRef(stream->record_class);
		stream->record = NULL;
		stream->record_class = NULL;
		PJ_LOG(3,(THIS_FILE, "---> Released recorder"));
	}else{
		PJ_LOG(2,(THIS_FILE, "Nothing to release !!! rec"));
	}

	if(stream->track){
		//release recording - we assume the release method exists
		release_method = jni_env->GetMethodID(stream->track_class,"release", "()V");
		jni_env->CallVoidMethod(stream->track, release_method);

		jni_env->DeleteGlobalRef(stream->track);
		jni_env->DeleteGlobalRef(stream->track_class);
		stream->track = NULL;
		stream->track_class = NULL;
		PJ_LOG(3,(THIS_FILE, "---> Released track"));
	}else{
		PJ_LOG(2,(THIS_FILE, "Nothing to release !!! track"));
	}

	//Unset media in call
	on_teardown_audio_wrapper();

//	pj_sem_destroy(stream->audio_launch_sem);
	pj_pool_release(stream->pool);
	PJ_LOG(3,(THIS_FILE, "Stream is destroyed"));

	DETACH_JVM(jni_env);
	return PJ_SUCCESS;
}
pj_status_t set_android_thread_priority(int priority){
	jclass process_class;
	jmethodID set_prio_method;
	jthrowable exc;
	JNIEnv *jni_env = 0;
	ATTACH_JVM(jni_env);
	pj_status_t result = PJ_SUCCESS;

	//Get pointer to the java class
	process_class = (jclass)jni_env->NewGlobalRef(jni_env->FindClass("android/os/Process"));
	if (process_class == 0) {
		PJ_LOG(1, (THIS_FILE, "Not able to find os process class"));
		result = PJ_EIGNORED;
		goto on_finish;
	}

	PJ_LOG(4, (THIS_FILE, "We have the class for process"));

	//Get the set priority function
	set_prio_method = jni_env->GetStaticMethodID(process_class, "setThreadPriority", "(I)V");
	if (set_prio_method == 0) {
		PJ_LOG(1, (THIS_FILE, "Not able to find setThreadPriority method"));
		result = PJ_EIGNORED;
		goto on_finish;
	}
	PJ_LOG(4, (THIS_FILE, "We have the method for setThreadPriority"));

	//Call it
	jni_env->CallStaticVoidMethod(process_class, set_prio_method, priority);

	exc = jni_env->ExceptionOccurred();
	if (exc) {
		jni_env->ExceptionDescribe();
		jni_env->ExceptionClear();
		PJ_LOG(2, (THIS_FILE, "Impossible to set priority using java API, fallback to setpriority"));
		setpriority(PRIO_PROCESS, 0, priority);
	}

	on_finish:
		DETACH_JVM(jni_env);
		return result;

}
/* API: start stream. */
static pj_status_t strm_start(pjmedia_aud_stream *s)
{
	struct android_aud_stream *stream = (struct android_aud_stream*)s;


	PJ_LOG(4,(THIS_FILE, "Starting %s stream..", stream->name.ptr));
	stream->quit_flag = 0;

	JNIEnv *jni_env = 0;
	ATTACH_JVM(jni_env);

	pj_status_t status;

	//Start threads
	if(stream->record){

		status = pj_thread_create(stream->pool, "android_recorder", &AndroidRecorderCallback, stream, 0, 0,  &stream->rec_thread);
		if (status != PJ_SUCCESS) {
			goto on_error;
		}
//		pj_sem_wait(stream->audio_launch_sem);
	}

	if(stream->track){
		status = pj_thread_create(stream->pool, "android_track", &AndroidTrackCallback, stream, 0, 0,  &stream->play_thread);
		if (status != PJ_SUCCESS) {
			goto on_error;
		}
//		pj_sem_wait(stream->audio_launch_sem);
	}

	PJ_LOG(4,(THIS_FILE, "Starting done"));

	status = PJ_SUCCESS;

on_error:
	DETACH_JVM(jni_env);
	if(status != PJ_SUCCESS){
		strm_destroy(&stream->base);
	}
	return status;
}
Exemple #7
0
//Wrap start & stop
PJ_DECL(pj_status_t) csipsimple_init(pjsua_config *ua_cfg,
		pjsua_logging_config *log_cfg, pjsua_media_config *media_cfg,
		csipsimple_config *css_cfg, jobject context) {
	pj_status_t result;
	unsigned i;

	/* Create memory pool for application. */
	if(css_var.pool == NULL){
		css_var.pool = pjsua_pool_create("css", 1000, 1000);
		PJ_ASSERT_RETURN(css_var.pool, PJ_ENOMEM);
	}
	// Finalize configuration
	log_cfg->cb = &pj_android_log_msg;

	// Static cfg
	extern pj_bool_t pjsip_use_compact_form;
	extern pj_bool_t pjsip_include_allow_hdr_in_dlg;
	extern pj_bool_t pjmedia_add_rtpmap_for_static_pt;
	extern pj_bool_t pjmedia_add_bandwidth_tias_in_sdp;
	extern pj_bool_t pjsua_no_update;
	extern pj_bool_t pjmedia_webrtc_use_ns;

	pjsua_no_update = css_cfg->use_no_update ? PJ_TRUE : PJ_FALSE;

	pjsip_use_compact_form =
			css_cfg->use_compact_form_headers ? PJ_TRUE : PJ_FALSE;
	/* do not transmit Allow header */
	pjsip_include_allow_hdr_in_dlg =
			css_cfg->use_compact_form_headers ? PJ_FALSE : PJ_TRUE;
	/* Do not include rtpmap for static payload types (<96) */
	pjmedia_add_rtpmap_for_static_pt =
			css_cfg->use_compact_form_sdp ? PJ_FALSE : PJ_TRUE;
	/* Do not enable bandwidth information inclusion in sdp */
	pjmedia_add_bandwidth_tias_in_sdp =
			css_cfg->add_bandwidth_tias_in_sdp ? PJ_TRUE : PJ_FALSE;
	/* Use noise suppressor ? */
	pjmedia_webrtc_use_ns =
			css_cfg->use_noise_suppressor ? PJ_TRUE : PJ_FALSE;

	css_tcp_keep_alive_interval = css_cfg->tcp_keep_alive_interval;
	css_tls_keep_alive_interval = css_cfg->tls_keep_alive_interval;

	// Transaction timeouts
	pjsip_sip_cfg_var.tsx.t1 = css_cfg->tsx_t1_timeout;
	pjsip_sip_cfg_var.tsx.t2 = css_cfg->tsx_t2_timeout;
	pjsip_sip_cfg_var.tsx.t4 = css_cfg->tsx_t4_timeout;
	pjsip_sip_cfg_var.tsx.td = css_cfg->tsx_td_timeout;
	pjsip_sip_cfg_var.endpt.disable_tcp_switch = css_cfg->disable_tcp_switch;
    pjsip_sip_cfg_var.endpt.disable_rport = css_cfg->disable_rport;

	// Audio codec cfg
	css_var.extra_aud_codecs_cnt = css_cfg->extra_aud_codecs_cnt;
	for (i = 0; i < css_cfg->extra_aud_codecs_cnt; i++) {
		dynamic_factory *css_codec = &css_var.extra_aud_codecs[i];
		dynamic_factory *cfg_codec = &css_cfg->extra_aud_codecs[i];

		pj_strdup_with_null(css_var.pool, &css_codec->shared_lib_path,
				&cfg_codec->shared_lib_path);
		pj_strdup_with_null(css_var.pool, &css_codec->init_factory_name,
				&cfg_codec->init_factory_name);
	}

	// Video codec cfg -- For now only destroy is useful but for future
	// hopefully vid codec mgr will behaves as audio does
	// Also in this case destroy will become obsolete
	css_var.extra_vid_codecs_cnt = css_cfg->extra_vid_codecs_cnt;
	for (i = 0; i < css_cfg->extra_vid_codecs_cnt; i++) {
		dynamic_factory *css_codec = &css_var.extra_vid_codecs[i];
		dynamic_factory *cfg_codec = &css_cfg->extra_vid_codecs[i];

		pj_strdup_with_null(css_var.pool, &css_codec->shared_lib_path,
				&cfg_codec->shared_lib_path);
		pj_strdup_with_null(css_var.pool, &css_codec->init_factory_name,
				&cfg_codec->init_factory_name);


		css_codec = &css_var.extra_vid_codecs_destroy[i];
		cfg_codec = &css_cfg->extra_vid_codecs_destroy[i];

		pj_strdup_with_null(css_var.pool, &css_codec->shared_lib_path,
				&cfg_codec->shared_lib_path);
		pj_strdup_with_null(css_var.pool, &css_codec->init_factory_name,
				&cfg_codec->init_factory_name);

	}

	// ZRTP cfg
	css_var.default_use_zrtp = css_cfg->use_zrtp;
	ua_cfg->cb.on_create_media_transport = &on_transport_created_wrapper;

#if defined(PJMEDIA_HAS_ZRTP) && PJMEDIA_HAS_ZRTP!=0
	pj_ansi_snprintf(css_var.zid_file, sizeof(css_var.zid_file),
			"%.*s/simple.zid", css_cfg->storage_folder.slen,
			css_cfg->storage_folder.ptr);
#endif

	JNIEnv *jni_env = 0;
	ATTACH_JVM(jni_env);
	css_var.context = (*jni_env)->NewGlobalRef(jni_env, context);
	DETACH_JVM(jni_env);

	result = (pj_status_t) pjsua_init(ua_cfg, log_cfg, media_cfg);
	if (result == PJ_SUCCESS) {
		/* Ringback tone */
	    init_ringback_tone();

		/* Init audio device */
		pj_status_t added_audio = PJ_ENOTFOUND;
		if (css_cfg->audio_implementation.init_factory_name.slen > 0) {
			pjmedia_aud_dev_factory* (*init_factory)(
					pj_pool_factory *pf) = get_library_factory(&css_cfg->audio_implementation);
			if(init_factory != NULL) {
				pjmedia_aud_register_factory(init_factory);
				added_audio = PJ_SUCCESS;
				PJ_LOG(4, (THIS_FILE, "Loaded audio dev"));
			}
		}

		// Fallback to default audio dev if no one found
		if (added_audio != PJ_SUCCESS) {
			pjmedia_aud_register_factory(&pjmedia_android_factory);
		}

		// Init video device
#if PJMEDIA_HAS_VIDEO
		// load renderer
		if (css_cfg->video_render_implementation.init_factory_name.slen > 0) {
			pjmedia_vid_dev_factory* (*init_factory)(
					pj_pool_factory *pf) = get_library_factory(&css_cfg->video_render_implementation);
			if(init_factory != NULL) {
				pjmedia_vid_register_factory(init_factory, NULL);
				PJ_LOG(4, (THIS_FILE, "Loaded video render dev"));
			}
		}
		// load capture
		if (css_cfg->video_capture_implementation.init_factory_name.slen > 0) {
			pjmedia_vid_dev_factory* (*init_factory)(
								pj_pool_factory *pf) = get_library_factory(&css_cfg->video_capture_implementation);
			if(init_factory != NULL) {
				pjmedia_vid_register_factory(init_factory, NULL);
				PJ_LOG(4, (THIS_FILE, "Loaded video capture dev"));
			}
		}

		// Load ffmpeg converter
		pjmedia_converter_mgr* cvrt_mgr = pjmedia_converter_mgr_instance();
		if(css_cfg->vid_converter.init_factory_name.slen > 0){
			pj_status_t (*init_factory)(pjmedia_converter_mgr* cvrt_mgr) = get_library_factory(&css_cfg->vid_converter);
			if(init_factory != NULL) {
				init_factory(cvrt_mgr);
				PJ_LOG(4, (THIS_FILE, "Loaded video converter"));
			}
		}


		// Load video codecs
		pjmedia_vid_codec_mgr* vid_mgr = pjmedia_vid_codec_mgr_instance();

		for (i = 0; i < css_var.extra_vid_codecs_cnt; i++) {
			dynamic_factory *codec = &css_var.extra_vid_codecs[i];
			pj_status_t (*init_factory)(pjmedia_vid_codec_mgr *mgr,
                    pj_pool_factory *pf) = get_library_factory(codec);
			if(init_factory != NULL){
				pj_status_t status = init_factory(vid_mgr, &pjsua_var.cp.factory);
				if(status != PJ_SUCCESS) {
					PJ_LOG(2, (THIS_FILE,"Error loading dynamic codec plugin"));
				}
	    	}
		}

#endif
		}

	return result;
}
/* API: create stream */
static pj_status_t android_create_stream(pjmedia_aud_dev_factory *f,
		const pjmedia_aud_param *param,
		pjmedia_aud_rec_cb rec_cb,
		pjmedia_aud_play_cb play_cb,
		void *user_data,
		pjmedia_aud_stream **p_aud_strm)
{

	PJ_LOG(4,(THIS_FILE, "Creating stream"));
	struct android_aud_factory *pa = (struct android_aud_factory*)f;
	pj_pool_t *pool;
	struct android_aud_stream *stream;
	pj_status_t status;
	int has_set_in_call = 0;
	int state = 0;

	PJ_ASSERT_RETURN(play_cb && rec_cb && p_aud_strm, PJ_EINVAL);


	// Only supports for mono channel for now
	PJ_ASSERT_RETURN(param->channel_count == 1, PJ_EINVAL);


	pool = pj_pool_create(pa->pf, "sndstream", 1024, 1024, NULL);
	if (!pool) {
		return PJ_ENOMEM;
	}

	stream = PJ_POOL_ZALLOC_T(pool, struct android_aud_stream);
	stream->pool = pool;
	pj_strdup2_with_null(pool, &stream->name, "Android stream");
	stream->dir = PJMEDIA_DIR_CAPTURE_PLAYBACK;
	stream->param = *param;
	stream->user_data = user_data;
	stream->samples_per_sec = param->clock_rate;
	stream->samples_per_frame = param->samples_per_frame;
	stream->bytes_per_sample = param->bits_per_sample / 8;
	stream->channel_count = param->channel_count;
	stream->rec_cb = rec_cb;
	stream->play_cb = play_cb;

	PJ_LOG(3, (THIS_FILE, "Create stream : %d samples/sec, %d samples/frame, %d bytes/sample", stream->samples_per_sec, stream->samples_per_frame, stream->bytes_per_sample));

/*
	if(pj_sem_create(pool, NULL, 0, 2, &stream->audio_launch_sem) != PJ_SUCCESS){
		pj_pool_release(pool);
		return PJ_ENOMEM;
	}
*/

	int inputBuffSize=0, inputBuffSizePlay, inputBuffSizeRec;
	int sampleFormat;

	//TODO : return codes should be better
	JNIEnv *jni_env = 0;
	ATTACH_JVM(jni_env);
	jmethodID constructor_method=0, get_min_buffer_size_method = 0, method_id = 0;


	status = on_setup_audio_wrapper(param->clock_rate);
	if(status != PJ_SUCCESS){
		return PJMEDIA_EAUD_INVOP;
	}
	has_set_in_call = 1;

/*
	if (attachResult != 0) {
		PJ_LOG(1, (THIS_FILE, "Not able to attach the jvm"));
		pj_pool_release(pool);
		return PJ_ENOMEM;
	}
*/
	if (param->bits_per_sample == 8) {
		sampleFormat = 3; //ENCODING_PCM_8BIT
	} else if (param->bits_per_sample == 16) {
		sampleFormat = 2; //ENCODING_PCM_16BIT
	} else {
		pj_pool_release(pool);
		return PJMEDIA_EAUD_SAMPFORMAT;
	}

	PJ_LOG(3, (THIS_FILE, "Sample format is : %d for %d ", sampleFormat, param->bits_per_sample));



	if (stream->dir & PJMEDIA_DIR_CAPTURE) {
		//Get pointer to the java class
		stream->record_class = (jclass)jni_env->NewGlobalRef(jni_env->FindClass("android/media/AudioRecord"));
		if (stream->record_class == 0) {
			PJ_LOG(2, (THIS_FILE, "Not able to find audio record class"));
			goto on_error;
		}

		PJ_LOG(3, (THIS_FILE, "We have the class"));

		//Get the min buffer function
		get_min_buffer_size_method = jni_env->GetStaticMethodID(stream->record_class, "getMinBufferSize", "(III)I");
		if (get_min_buffer_size_method == 0) {
			PJ_LOG(2, (THIS_FILE, "Not able to find audio record getMinBufferSize method"));
			goto on_error;
		}
		PJ_LOG(3, (THIS_FILE, "We have the buffer method"));
		//Call it
		inputBuffSizeRec = jni_env->CallStaticIntMethod(stream->record_class, get_min_buffer_size_method,
				param->clock_rate, 2, sampleFormat);

		if(inputBuffSizeRec <= 0){
			PJ_LOG(2, (THIS_FILE, "Min buffer size is not a valid value"));
			goto on_error;
		}

		if(inputBuffSizeRec <= 4096){
			inputBuffSizeRec = 4096 * 3/2;
		}
		int frameSizeInBytes = (param->bits_per_sample == 8) ? 1 : 2;
		if ( inputBuffSizeRec % frameSizeInBytes != 0 ){
			inputBuffSizeRec ++;
		}

		PJ_LOG(3, (THIS_FILE, "Min record buffer %d", inputBuffSizeRec));

		if(inputBuffSizeRec > inputBuffSize){
			inputBuffSize = inputBuffSizeRec;
		}

	}

	if (stream->dir & PJMEDIA_DIR_PLAYBACK) {
		//Get pointer to the java class
		stream->track_class = (jclass)jni_env->NewGlobalRef(jni_env->FindClass("android/media/AudioTrack"));
		if (stream->track_class == 0) {
			PJ_LOG(2, (THIS_FILE, "Not able to find audio track class"));
			goto on_error;
		}

		PJ_LOG(3, (THIS_FILE, "We have the track class"));

		//Get the min buffer function
		get_min_buffer_size_method = jni_env->GetStaticMethodID(stream->track_class, "getMinBufferSize", "(III)I");
		if (get_min_buffer_size_method == 0) {
			PJ_LOG(2, (THIS_FILE, "Not able to find audio record getMinBufferSize method"));
			goto on_error;
		}
		PJ_LOG(3, (THIS_FILE, "We have the buffer method"));
		//Call it
		inputBuffSizePlay = jni_env->CallStaticIntMethod(stream->track_class, get_min_buffer_size_method,
				param->clock_rate, 2, sampleFormat);

		if(inputBuffSizePlay < 0){
			PJ_LOG(2, (THIS_FILE, "Min buffer size is not a valid value"));
			goto on_error;
		}

		//Not sure that's a good idea

		if(inputBuffSizePlay < 2*2*1024*param->clock_rate/8000){
			inputBuffSizePlay = 2*2*1024*param->clock_rate/8000;
		}

		int frameSizeInBytes = (param->bits_per_sample == 8) ? 1 : 2;
		if ( inputBuffSizePlay % frameSizeInBytes != 0 ){
			inputBuffSizePlay ++;
		}

		//inputBuffSizePlay = inputBuffSizePlay << 1;
		PJ_LOG(3, (THIS_FILE, "Min play buffer %d", inputBuffSizePlay));

		if(inputBuffSizePlay > inputBuffSize){
			inputBuffSize = inputBuffSizePlay;
		}
	}

	PJ_LOG(3, (THIS_FILE, "Min buffer %d", inputBuffSize));



	if (stream->dir & PJMEDIA_DIR_CAPTURE) {
		//Get pointer to the constructor
		constructor_method = jni_env->GetMethodID(stream->record_class,"<init>", "(IIIII)V");
		if (constructor_method == 0) {
			PJ_LOG(2, (THIS_FILE, "Not able to find audio record class constructor"));
			goto on_error;
		}

		int mic_source = on_set_micro_source_wrapper();
		if(mic_source == 0){
			mic_source = 1;
			char sdk_version[PROP_VALUE_MAX];
			__system_property_get("ro.build.version.sdk", sdk_version);

			pj_str_t pj_sdk_version = pj_str(sdk_version);
			int sdk_v = pj_strtoul(&pj_sdk_version);
			if(sdk_v >= 10){
				mic_source = 7;
			}
		}
		PJ_LOG(3, (THIS_FILE, "Use micro source : %d", mic_source));

		stream->record =  jni_env->NewObject(stream->record_class, constructor_method,
					mic_source, // Mic input source:  1 = MIC / 7 = VOICE_COMMUNICATION
					param->clock_rate,
					2, // CHANNEL_CONFIGURATION_MONO
					sampleFormat,
					inputBuffSizeRec);


		if (stream->record == 0) {
			PJ_LOG(1, (THIS_FILE, "Not able to instantiate record class"));
			goto on_error;
		}
		jthrowable exc = jni_env->ExceptionOccurred();
		if (exc) {
			jni_env->ExceptionDescribe();
			jni_env->ExceptionClear();
			PJ_LOG(2, (THIS_FILE, "The micro source was probably not valid"));
			// Try to fallback on MIC source -- lazy failure
			if(mic_source != 1){
				PJ_LOG(4, (THIS_FILE, "Try default source"));
				stream->record =  jni_env->NewObject(stream->record_class, constructor_method,
							1, // Mic input source:  1 = MIC / 7 = VOICE_COMMUNICATION
							param->clock_rate,
							2, // CHANNEL_CONFIGURATION_MONO
							sampleFormat,
							inputBuffSizeRec);
				if (stream->record == 0) {
					PJ_LOG(1, (THIS_FILE, "Not able to instantiate record class"));
					goto on_error;
				}
			}else{
				PJ_LOG(1, (THIS_FILE, "Not able to instantiate record class"));
				goto on_error;
			}
		}
		// Check state
		method_id = jni_env->GetMethodID(stream->record_class,"getState", "()I");
		state = jni_env->CallIntMethod(stream->record, method_id);
		if(state == 0){ /* STATE_UNINITIALIZED */
			// Try to fallback on MIC source -- lazy failure
			if(mic_source != 1){
				PJ_LOG(4, (THIS_FILE, "Try default source"));
				stream->record =  jni_env->NewObject(stream->record_class, constructor_method,
							1, // Mic input source:  1 = MIC / 7 = VOICE_COMMUNICATION
							param->clock_rate,
							2, // CHANNEL_CONFIGURATION_MONO
							sampleFormat,
							inputBuffSizeRec);
				if (stream->record == 0) {
					PJ_LOG(1, (THIS_FILE, "Not able to instantiate record class"));
					goto on_error;
				}
			}else{
				PJ_LOG(1, (THIS_FILE, "Not able to instantiate record class"));
				goto on_error;
			}
		}

		stream->record = jni_env->NewGlobalRef(stream->record);

		PJ_LOG(3, (THIS_FILE, "We have capture the instance done"));

	}




	if (stream->dir & PJMEDIA_DIR_PLAYBACK) {

		//Get pointer to the constructor
		constructor_method = jni_env->GetMethodID(stream->track_class,"<init>", "(IIIIII)V");
		if (constructor_method == 0) {
			PJ_LOG(2, (THIS_FILE, "Not able to find audio track class constructor"));
			goto on_error;
		}

		stream->track =  jni_env->NewObject(stream->track_class, constructor_method,
					0, // VOICE_CALL
				//	3, //MUSIC
					param->clock_rate,
					2, // CHANNEL_CONFIGURATION_MONO
					sampleFormat,
					inputBuffSizePlay /**2*/,
					1); // MODE_STREAM


		stream->track = jni_env->NewGlobalRef(stream->track);
		if (stream->track == 0) {
			PJ_LOG(1, (THIS_FILE, "Not able to instantiate track class"));
			goto on_error;
		}

		//TODO check if initialized properly

		PJ_LOG(3, (THIS_FILE, "We have the track instance done"));

	}




	//OK, done
	*p_aud_strm = &stream->base;
	(*p_aud_strm)->op = &android_strm_op;
	DETACH_JVM(jni_env);

	return PJ_SUCCESS;

on_error:

	if(has_set_in_call == 1){
		on_teardown_audio_wrapper();
	}
	DETACH_JVM(jni_env);
	pj_pool_release(pool);
	return PJ_ENOMEM;
}
static int PJ_THREAD_FUNC AndroidTrackCallback(void* userData){
	struct android_aud_stream *stream = (struct android_aud_stream*) userData;
	JNIEnv *jni_env = 0;
	ATTACH_JVM(jni_env);
	jmethodID write_method=0, play_method=0;
	//jmethodID get_state_method=0;
	pj_status_t status = 0;
	//jint track_state;
	int size =  stream->samples_per_frame * stream->bytes_per_sample;
	int nframes = stream->samples_per_frame / stream->channel_count;
	jbyte* buf;
	jbyteArray outputBuffer;
	pj_timestamp tstamp;

	PJ_LOG(3,(THIS_FILE, "<< Enter player thread"));

	if(!stream->track){
		goto on_break;
	}

	//Get methods ids
	write_method = jni_env->GetMethodID(stream->track_class,"write", "([BII)I");
	play_method = jni_env->GetMethodID(stream->track_class,"play", "()V");
	/*
	get_state_method =  jni_env->GetMethodID(stream->track_class,"getState", "()I");
	if(get_state_method==0) {
		goto on_break;
	}*/

	/*
	track_state = jni_env->CallIntMethod(stream->track, get_state_method);
	PJ_LOG(3,(THIS_FILE, "Player state is now %d", track_state));
	if((int)track_state != 1){
		PJ_LOG(1, (THIS_FILE, "Bad player state !!! %d", track_state));
		goto on_break;
	}*/

	outputBuffer = jni_env->NewByteArray(size);
	if (outputBuffer == 0) {
		PJ_LOG(2, (THIS_FILE, "Not able to allocate a buffer for input play process"));
		goto on_break;
	}

	buf = jni_env->GetByteArrayElements(outputBuffer, 0);

	set_android_thread_priority(THREAD_PRIORITY_URGENT_AUDIO);
	//setpriority(PRIO_PROCESS, 0, -19 /*ANDROID_PRIORITY_URGENT_AUDIO*/);

	//start playing
	jni_env->CallVoidMethod(stream->track, play_method);

	//Init everything
	tstamp.u64 = 0;
	pj_bzero (buf, size);

//	pj_sem_post(stream->audio_launch_sem);

	while ( !stream->quit_flag ) {
		pj_bzero (buf, size);
		pjmedia_frame frame;

		frame.type = PJMEDIA_FRAME_TYPE_AUDIO;
		frame.size = size;
		frame.buf = (void *) buf;
		frame.timestamp.u64 = tstamp.u64;
		frame.bit_info = 0;

		//Fill frame from pj
		status = (*stream->play_cb)(stream->user_data, &frame);
		if (status != PJ_SUCCESS){
			goto on_finish;
		}

		if (frame.type != PJMEDIA_FRAME_TYPE_AUDIO){
			pj_bzero(frame.buf, frame.size);
			PJ_LOG(3, (THIS_FILE, "Hey, not an audio frame !!!"));
			continue;
		}

	//	PJ_LOG(4,(THIS_FILE, "Valid play frame get from network stack"));
		/*
		if(size != frame.size){
			PJ_LOG(2, (THIS_FILE, "Frame size doesn't match : %d vs %d", frame.size, size) );
		}
		*/
		//PJ_LOG(4, (THIS_FILE, "New audio track frame to treat : %d <size : %d>", frame.type, frame.size));

		//Write to the java buffer
		//jni_env->SetByteArrayRegion(outputBuffer, 0, frame.size, (jbyte*)frame.buf);

		//Write to the device output
		status = jni_env->CallIntMethod(stream->track, write_method,
				outputBuffer,
				0,
				frame.size);

		if(status < 0){
			PJ_LOG(1, (THIS_FILE, "Error while writing %d ", status));
			//goto on_finish;
			continue;
		}else if(size != status){
			PJ_LOG(2, (THIS_FILE, "Not everything written"));
		}

	//	PJ_LOG(4,(THIS_FILE, "Valid play frame sent to the audio layer"));

		tstamp.u64 += nframes;
	};

	on_finish:
	jni_env->ReleaseByteArrayElements(outputBuffer, buf, 0);
		jni_env->DeleteLocalRef(outputBuffer);


	on_break:
		DETACH_JVM(jni_env);
//		pj_sem_post(stream->audio_launch_sem);
		PJ_LOG(3,(THIS_FILE, ">> Play thread stopped"));
		stream->play_thread_exited = 1;
		return 0;
}
static int PJ_THREAD_FUNC AndroidRecorderCallback(void* userData){
	struct android_aud_stream *stream = (struct android_aud_stream*) userData;
	JNIEnv *jni_env = 0;
	ATTACH_JVM(jni_env);

	jmethodID read_method=0, record_method=0;
	int bytesRead;
	int size =  stream->samples_per_frame * stream->bytes_per_sample;
	int nframes = stream->samples_per_frame / stream->channel_count;
	jbyte* buf;
	pj_status_t status = 0;
	jbyteArray inputBuffer;
	pj_timestamp tstamp, now, last_frame;

	int elapsed_time = 0;
	//Frame time in ms
	int frame_time = nframes * 1000 / stream->samples_per_sec;
	int missed_time = frame_time;
	int to_wait = 0;

	PJ_LOG(3,(THIS_FILE, "<< Enter recorder thread"));

	if(!stream->record){
		goto on_break;
	}


	//Get methods ids
	read_method = jni_env->GetMethodID(stream->record_class,"read", "([BII)I");
	record_method = jni_env->GetMethodID(stream->record_class,"startRecording", "()V");
	if(read_method==0 || record_method==0) {
		goto on_break;
	}

	//Create a buffer for frames read
	inputBuffer = jni_env->NewByteArray(size);
	if (inputBuffer == 0) {
		PJ_LOG(2, (THIS_FILE, "Not able to allocate a buffer for input read process"));
		goto on_break;
	}


	//start recording
	//setpriority(PRIO_PROCESS, 0, -19 /*ANDROID_PRIORITY_AUDIO*/);
	// set priority is probably not enough cause does not change the thread group in scheduler
	// Temporary solution is to call the java api to set the thread priority.
	// A cool solution would be to port (if possible) the code from the android os regarding set_sched groups
	set_android_thread_priority(THREAD_PRIORITY_URGENT_AUDIO);

	buf = jni_env->GetByteArrayElements(inputBuffer, 0);

	//Init everything
	tstamp.u64 = 0;
	pj_bzero (buf, size);


	jni_env->CallVoidMethod(stream->record, record_method);
	pj_get_timestamp(&last_frame);

	while ( !stream->quit_flag ) {
		pj_bzero (buf, size);

#if COMPATIBLE_ALSA
		pj_get_timestamp(&now);
		// Time between now and last frame next frame (ms)
		elapsed_time = pj_elapsed_msec(&last_frame, &now);

		pj_get_timestamp(&last_frame);

		//PJ_LOG (4, (THIS_FILE, "Elapsed time is %d | missed time is %d | frame time %d", elapsed_time, missed_time, frame_time));
		//Update missed time
		// Positif if we are late
		// negatif if we are earlier
		// dividing by 2 is empiric result
		// on N1 if not we get buffer overflow I assume that it fill packets faster than the frequency
		missed_time =  missed_time/2 + elapsed_time - frame_time;

		//PJ_LOG (4, (THIS_FILE, "And now :: Elapsed time is %d | missed time is %d", elapsed_time, missed_time));

		//If we go faster than the buffer filling we have to wait no
		if( missed_time <= 0 ){
			//if(elapsed_time < frame_time){
				to_wait = - missed_time - 2;
				if(to_wait > 0){
			//		PJ_LOG (4, (THIS_FILE, "Wait for %d / %d", to_wait, frame_time));
					pj_thread_sleep(to_wait);
				}
			//}
		}
/*
		//PJ_LOG (4, (THIS_FILE, "Next frame %d", next_frame_in));
		if (next_frame_in-2 > 0) {
			//PJ_LOG (4, (THIS_FILE, "Wait for buffer %d", next_frame_in));
			pj_thread_sleep(next_frame_in-5);
			//Reset the delay we have regarding next frame
			retard = 0;
		}else{
			if(next_frame_in < 0){
				retard += next_frame_in;
			}
		}
*/
#endif

		bytesRead = jni_env->CallIntMethod(stream->record, read_method,
					inputBuffer,
					0,
					size);


		if(bytesRead<=0){
			PJ_LOG (3, (THIS_FILE, "Record thread : error while reading data... is there something we can do here? %d", bytesRead));
			continue;
		}
		if(stream->quit_flag){
			break;
		}
		if(bytesRead != size){
			PJ_LOG(3, (THIS_FILE, "Overrun..."));
			continue;
		}

	//	PJ_LOG(4,(THIS_FILE, "Valid record frame read"));
		//jni_env->GetByteArrayRegion(inputBuffer, 0, size, buf );

		pjmedia_frame frame;

		frame.type = PJMEDIA_FRAME_TYPE_AUDIO;
		frame.size =  size;
		frame.bit_info = 0;
		frame.buf = (void*) buf;
		frame.timestamp.u64 = tstamp.u64;

	//	PJ_LOG(3, (THIS_FILE, "New audio record frame to treat : %d <size : %d>", frame.type, frame.size));

		status = (*stream->rec_cb)(stream->user_data, &frame);
	//	PJ_LOG(4,(THIS_FILE, "Valid record frame sent to network stack"));

		if (status != PJ_SUCCESS){
			PJ_LOG(1, (THIS_FILE, "Error in record callback"));
			goto on_finish;
		}


		//Update for next step
		tstamp.u64 += nframes;
	};


	on_finish:
		jni_env->ReleaseByteArrayElements(inputBuffer, buf, 0);
		jni_env->DeleteLocalRef(inputBuffer);

	on_break:
		DETACH_JVM(jni_env);
		PJ_LOG(3,(THIS_FILE, ">> Record thread stopped"));
//		pj_sem_post(stream->audio_launch_sem);
		stream->rec_thread_exited = 1;
		return 0;
}
/* API: Init factory */
static pj_status_t android_init(pjmedia_aud_dev_factory *f)
{
    int mic_source;
    int state = 0;
    jthrowable exc;

    PJ_UNUSED_ARG(f);

    PJ_LOG(4,(THIS_FILE, "Android sound library initialized"));
    PJ_LOG(4,(THIS_FILE, "Sound device count=%d", android_get_dev_count(f)));

    JNIEnv *jni_env = 0;
    ATTACH_JVM(jni_env);
    jmethodID constructor_method = 0, method_id = 0;

    g_record_class = (jclass)jni_env->NewGlobalRef(jni_env->FindClass("android/media/AudioRecord"));
    if (g_record_class == 0)
    {
        PJ_LOG(2, (THIS_FILE, "zzc Not able to find audio record class"));
        goto on_error;
    }
    //Get pointer to the constructor
    constructor_method = jni_env->GetMethodID(g_record_class,"<init>", "(IIIII)V");
    if (constructor_method == 0)
    {
        PJ_LOG(2, (THIS_FILE, "zzc Not able to find audio record class constructor"));
        goto on_error;
    }

    mic_source = on_set_micro_source_wrapper();
    if(mic_source == 0)
    {
        mic_source = 1;
        char sdk_version[PROP_VALUE_MAX];
        __system_property_get("ro.build.version.sdk", sdk_version);

        pj_str_t pj_sdk_version = pj_str(sdk_version);
        int sdk_v = pj_strtoul(&pj_sdk_version);
        if(sdk_v >= 10)
        {
            mic_source = 7;
        }
    }
    PJ_LOG(3, (THIS_FILE, "zzc Use micro source : %d", mic_source));

    g_audio_record = jni_env->NewObject(g_record_class, constructor_method,
        1, // Mic input source:  1 = MIC / 7 = VOICE_COMMUNICATION
        16000,
        //2, // CHANNEL_CONFIGURATION_MONO
        16, // lxd CHANNEL_IN_MONO
        2,
        //6144
        16000//lxd
        );
    if (g_audio_record == 0)
    {
        PJ_LOG(1, (THIS_FILE, "zzc Not able to instantiate record class"));
        goto on_error;
    }
    exc = jni_env->ExceptionOccurred();
    if (exc)
    {
        jni_env->ExceptionDescribe();
        jni_env->ExceptionClear();
        PJ_LOG(2, (THIS_FILE, "zzc The micro source was probably not valid"));
        // Try to fallback on MIC source -- lazy failure
        if(mic_source != 1)
        {
            PJ_LOG(4, (THIS_FILE, "zzc Try default source"));
            g_audio_record = jni_env->NewObject(g_record_class, constructor_method,
            		1, // Mic input source:  1 = MIC / 7 = VOICE_COMMUNICATION
					16000,
					//2, // CHANNEL_CONFIGURATION_MONO
					16, // lxd CHANNEL_IN_MONO
					2,
					//6144
					16000//lxd
					);
            if (g_audio_record == 0)
            {
                PJ_LOG(1, (THIS_FILE, "zzc Not able to instantiate record class"));
                goto on_error;
            }
        }
        else
        {
            PJ_LOG(1, (THIS_FILE, "zzc Not able to instantiate record class"));
            goto on_error;
        }
    }
    // Check state
    method_id = jni_env->GetMethodID(g_record_class,"getState", "()I");
    state = jni_env->CallIntMethod(g_audio_record, method_id);
    if(state == 0){ /* STATE_UNINITIALIZED */
        // Try to fallback on MIC source -- lazy failure
        if(mic_source != 1){
            PJ_LOG(4, (THIS_FILE, "Try default source"));
            g_audio_record =  jni_env->NewObject(g_record_class, constructor_method,
            		1, // Mic input source:  1 = MIC / 7 = VOICE_COMMUNICATION
					16000,
					//2, // CHANNEL_CONFIGURATION_MONO
					16, // lxd CHANNEL_IN_MONO
					2,
					//6144
					16000//lxd
					);
            if (g_audio_record == 0) {
                PJ_LOG(1, (THIS_FILE, "Not able to instantiate record class"));
                goto on_error;
            }
        }else{
            PJ_LOG(1, (THIS_FILE, "Not able to instantiate record class"));
            goto on_error;
        }
    }

    g_audio_record = jni_env->NewGlobalRef(g_audio_record);

    return PJ_SUCCESS;

on_error:

    on_teardown_audio_wrapper();
    DETACH_JVM(jni_env);
    return PJMEDIA_ESNDINDEVID;
}