예제 #1
0
static void video_capture_uninit(MSFilter *f) {
	ms_message("Uninit of Android VIDEO capture filter");
	AndroidReaderContext* d = getContext(f);
	JNIEnv *env = ms_get_jni_env();
	env->DeleteGlobalRef(d->helperClass);
	delete d;
}
uint8_t* AMediaCodec_getOutputBuffer(AMediaCodec *codec, size_t idx, size_t *out_size){
	JNIEnv *env = ms_get_jni_env();
	jobject object;
	uint8_t *buf = NULL;
	jclass mediaCodecClass = env->FindClass("android/media/MediaCodec");
	if (mediaCodecClass == NULL){
		ms_error("Couldn't find android/media/MediaCodec class !");
		env->ExceptionClear();
		return NULL;
	}

	jmethodID methodID = env->GetMethodID(mediaCodecClass,"getOutputBuffers","()[Ljava/nio/ByteBuffer;");
	if (methodID != NULL){
		object = env->CallObjectMethod(codec->jcodec, methodID);
		handle_java_exception();
		if(object != NULL){
			jobjectArray jbuffers = reinterpret_cast<jobjectArray>(object);
			jobject jbuf = env->GetObjectArrayElement(jbuffers,idx);
			buf = (uint8_t *) env->GetDirectBufferAddress(jbuf);
			env->DeleteLocalRef(jbuf);
			env->DeleteLocalRef(object);
		}
	} else {
		ms_error("getOutputBuffers() not found in class mediacodec !");
		env->ExceptionClear();
		env->DeleteLocalRef(mediaCodecClass);
		return NULL;
	}
	env->DeleteLocalRef(mediaCodecClass);
	return buf;
}
	AndroidReaderContext():frame(0),fps(5){
		ms_message("Creating AndroidReaderContext for Android VIDEO capture filter");

		ms_mutex_init(&mutex,NULL);

		JNIEnv *env = ms_get_jni_env();
		managerClass = env->FindClass("org/linphone/core/AndroidCameraRecordManager");
		managerClass = (jclass) env->NewGlobalRef(managerClass);
		if (managerClass == 0) {
			ms_fatal("cannot register android video record manager class\n");
			return;
		}

		jmethodID getInstanceMethod = env->GetStaticMethodID(managerClass,"getInstance", "()Lorg/linphone/core/AndroidCameraRecordManager;");
		if (getInstanceMethod == 0) {
			ms_fatal("cannot find  singleton getter method\n");
			return;
		}

		// Get singleton AndroidCameraRecordManager for the default camera
		recorder = env->CallStaticObjectMethod(managerClass, getInstanceMethod);
		if (recorder == 0) {
			ms_fatal("cannot instantiate  %s\n", recorder);
			return;
		}

		recorder = env->NewGlobalRef(recorder);
		if (recorder == 0) {
			ms_fatal("cannot register  %s\n", recorder);
			return;
		}

	};
예제 #4
0
static void video_capture_init(MSFilter *f) {
	AndroidReaderContext* d = new AndroidReaderContext(f, 0);
	ms_message("Init of Android VIDEO capture filter (%p)", d);
	JNIEnv *env = ms_get_jni_env();
	d->helperClass = getHelperClassGlobalRef(env);
	f->data = d;
}
AMediaCodec* AMediaCodec_createEncoderByType(const char *mime_type){
	AMediaCodec *codec=ms_new0(AMediaCodec,1);
	JNIEnv *env = ms_get_jni_env();
	jobject jcodec = NULL;

	jclass mediaCodecClass = env->FindClass("android/media/MediaCodec");
	if (mediaCodecClass == NULL){
		ms_error("Couldn't find android/media/MediaCodec class !");
		env->ExceptionClear();
		return NULL;
	}

	jmethodID methodID = env->GetStaticMethodID(mediaCodecClass, "createEncoderByType", "(Ljava/lang/String;)Landroid/media/MediaCodec;");
	if (methodID != NULL){
		jstring msg = env->NewStringUTF(mime_type);
		jcodec = env->CallStaticObjectMethod(mediaCodecClass, methodID, msg);
		handle_java_exception();
		if (jcodec){
			jcodec=env->NewGlobalRef(jcodec);
			ms_message("Codec %s successfully created.", mime_type);
		}else{
			ms_error("Failed to create codec !");
			return NULL;
		}
		env->DeleteLocalRef(msg);
	}else{
		ms_error("createEncoderByType() not found in class MediaCodec !");
		env->ExceptionClear();
		return NULL;
	}
	env->DeleteLocalRef(mediaCodecClass);
	codec->jcodec = jcodec;
	return codec;
}
bool AMediaFormat_getInt32(AMediaFormat *format, const char *name, int32_t *out){
	JNIEnv *env = ms_get_jni_env();
	jclass mediaFormatClass = env->FindClass("android/media/MediaFormat");
	if (mediaFormatClass==NULL){
		ms_error("Couldn't find android/media/MediaFormat class !");
		env->ExceptionClear(); //very important.
		return NULL;
	}
	jmethodID setStringID = env->GetMethodID(mediaFormatClass,"getInteger","(Ljava/lang/String;)I");
	if(format == NULL) {
		ms_error("Format nul");
		return AMEDIA_ERROR_BASE;
	}
	if (setStringID != NULL){
		jstring jkey = env->NewStringUTF(name);
		jint jout = env->CallIntMethod(format->jformat,setStringID,jkey);
		*out = jout;
		env->DeleteLocalRef(jkey);
	} else {
		ms_error("getInteger() not found in class format !");
		env->ExceptionClear(); //very important.
		env->DeleteLocalRef(mediaFormatClass);
		return AMEDIA_ERROR_BASE;
	}
	env->DeleteLocalRef(mediaFormatClass);
	return AMEDIA_OK;
}
//STUB
AMediaFormat *AMediaFormat_new(){
	AMediaFormat *format=ms_new0(AMediaFormat,1);
	JNIEnv *env = ms_get_jni_env();
	jobject jformat = NULL;
	jclass mediaFormatClass = env->FindClass("android/media/MediaFormat");
	if (mediaFormatClass==NULL){
		ms_error("Couldn't find android/media/MediaFormat class !");
		env->ExceptionClear(); //very important.
		return NULL;
	}

	jmethodID createID = env->GetStaticMethodID(mediaFormatClass,"createVideoFormat","(Ljava/lang/String;II)Landroid/media/MediaFormat;");
	if (createID!=NULL){
		jstring msg = env->NewStringUTF("video/avc");
		jformat=env->CallStaticObjectMethod(mediaFormatClass,createID,msg,240,320);
		if (jformat){
			ms_message("format successfully created.");
		}else{
			ms_error("Failed to create format !");
		}
		env->DeleteLocalRef(msg);
	}else{
		ms_error("create() not found in class format !");
		env->ExceptionClear(); //very important.
	}

	format->jformat = jformat;
	env->DeleteLocalRef(mediaFormatClass);
	return format;
}
media_status_t AMediaCodec_releaseOutputBuffer(AMediaCodec *codec, size_t idx, bool render){
	JNIEnv *env = ms_get_jni_env();
	jclass mediaCodecClass = env->FindClass("android/media/MediaCodec");
	if (mediaCodecClass==NULL){
    		ms_error("Couldn't find android/media/MediaCodec class !");
    		env->ExceptionClear(); //very important.
    		return AMEDIA_ERROR_BASE;
    }

	jmethodID stopID = env->GetMethodID(mediaCodecClass,"releaseOutputBuffer","(IZ)V");
	if (stopID != NULL){
		env->CallVoidMethod(codec->jcodec,stopID,(int)idx,FALSE);
		handle_java_exception();
	if (env->ExceptionCheck()) {
				env->ExceptionDescribe();
				env->ExceptionClear();
				ms_error("Exception");
			}
	} else {
		ms_error("releaseOutputBuffer() not found in class format !");
		env->ExceptionClear(); //very important.
		env->DeleteLocalRef(mediaCodecClass);
		return AMEDIA_ERROR_BASE;
	}
	env->DeleteLocalRef(mediaCodecClass);
	return AMEDIA_OK;
}
static void video_capture_postprocess(MSFilter *f){
	ms_message("Postprocessing of Android VIDEO capture filter");
	AndroidReaderContext* d = getContext(f);
	JNIEnv *env = ms_get_jni_env();
	jmethodID stopMethod = env->GetMethodID(d->managerClass,"invalidateParameters", "()V");
	env->CallVoidMethod(d->recorder, stopMethod);
}
예제 #10
0
static int android_display_set_window(MSFilter *f, void *arg){
	AndroidDisplay *ad=(AndroidDisplay*)f->data;
	unsigned long id=*(unsigned long*)arg;
	int err;
	JNIEnv *jenv=ms_get_jni_env();
	jobject window=(jobject)id;

	ms_filter_lock(f);
	if (window!=NULL)
		ad->jbitmap=(*jenv)->CallObjectMethod(jenv,window,ad->get_bitmap_id);
	else
		ad->jbitmap=NULL;
	ad->android_video_window=window;
	if (ad->jbitmap!=NULL){
		err=sym_AndroidBitmap_getInfo(jenv,ad->jbitmap,&ad->bmpinfo);
		if (err!=0){
			ms_error("AndroidBitmap_getInfo() failed.");
			ad->jbitmap=0;
			ms_filter_unlock(f);
			return -1;
		}
	}
	if (ad->sws){
		ms_scaler_context_free(ad->sws);
		ad->sws=NULL;
	}
	ad->orientation_change_pending=FALSE;
	ms_filter_unlock(f);
	if (ad->jbitmap!=NULL) ms_message("New java bitmap given with w=%i,h=%i,stride=%i,format=%i",
	           ad->bmpinfo.width,ad->bmpinfo.height,ad->bmpinfo.stride,ad->bmpinfo.format);
	return 0;
}
static int android_display_set_window(MSFilter *f, void *arg){
	AndroidDisplay *ad=(AndroidDisplay*)f->data;
	unsigned long id=*(unsigned long*)arg;
	JNIEnv *jenv=ms_get_jni_env();
	jobject window=(jobject)id;

	ms_filter_lock(f);
	
	if (window) {
		unsigned int ptr = (unsigned int)ad->ogl;
		ms_message("Sending opengles_display pointer as long: %p -> %u", ad->ogl, ptr);
		(*jenv)->CallVoidMethod(jenv,window,ad->set_opengles_display_id, ptr);
		ad->ogl_free_ready = FALSE;
	} else {
		if (window != ad->android_video_window) {
			ms_message("Clearing opengles_display (%p : %d)", ad->ogl, ad->ogl_free_ready);
			/* when context is lost GL resources are freed by Android */
			ogl_display_uninit(ad->ogl, FALSE);
			if (ad->ogl_free_ready) {
				ms_free(ad->ogl);
				ad->ogl = 0;
			} else {
				ad->ogl_free_ready = TRUE;
			}
			/* clear native ptr, to prevent rendering to occur now that ptr is invalid */
			(*jenv)->CallVoidMethod(jenv,ad->android_video_window,ad->set_opengles_display_id, 0);
		}
	}

	ad->android_video_window=window;

	ms_filter_unlock(f);

	return 0;
}
static void android_display_process(MSFilter *f){
	AndroidDisplay *ad=(AndroidDisplay*)f->data;
	MSPicture pic;
	mblk_t *m;

	ms_filter_lock(f);
	if (ad->android_video_window){
		if ((m=ms_queue_peek_last(f->inputs[0]))!=NULL){
			if (ms_yuv_buf_init_from_mblk (&pic,m)==0){
				/* schedule display of frame */
				if (!ad->ogl || !ad->ogl_free_ready) {
                    /* m is dupb'ed inside ogl_display */
					ogl_display_set_yuv_to_display(ad->ogl, m);
				} else {
					ms_warning("%s: opengldisplay not ready (%p)", __FUNCTION__, ad->ogl);
				}
				
				JNIEnv *jenv=ms_get_jni_env();
				(*jenv)->CallVoidMethod(jenv,ad->android_video_window,ad->request_render_id);
			}
		}
	}
	ms_filter_unlock(f);

	ms_queue_flush(f->inputs[0]);
	if (f->inputs[1] != NULL)
		ms_queue_flush(f->inputs[1]);
}
static unsigned int get_supported_rate(unsigned int prefered_rate) {
	JNIEnv *jni_env = ms_get_jni_env();
	jclass audio_record_class = jni_env->FindClass("android/media/AudioRecord");
	int size = jni_env->CallStaticIntMethod(audio_record_class
											,jni_env->GetStaticMethodID(audio_record_class,"getMinBufferSize", "(III)I")
											,prefered_rate
											,2/*CHANNEL_CONFIGURATION_MONO*/
											,2/*  ENCODING_PCM_16BIT */);


	if (size > 0) {
		return prefered_rate;
	} else {
		ms_warning("Cannot configure recorder with rate [%i]",prefered_rate);
		if (prefered_rate>48000) {
			return get_supported_rate(48000);
		}
		switch (prefered_rate) {
		case 12000:
		case 24000: return get_supported_rate(48000);
		case 48000: return get_supported_rate(44100);
		case 44100: return get_supported_rate(16000);
		case 16000: return get_supported_rate(8000);
				default: {
					ms_error("This Android sound card doesn't support any standard sample rate");
					return 0;
				}
		}

		return 0;
	}

}
예제 #14
0
static void video_capture_detect(MSWebCamManager *obj){
	ms_message("Detecting Android VIDEO cards");
	JNIEnv *env = ms_get_jni_env();
	jclass helperClass = getHelperClassGlobalRef(env);

	// create 3 int arrays - assuming 2 webcams at most
	jintArray indexes = (jintArray)env->NewIntArray(2);
	jintArray frontFacing = (jintArray)env->NewIntArray(2);
	jintArray orientation = (jintArray)env->NewIntArray(2);

	jmethodID method = env->GetStaticMethodID(helperClass,"detectCameras", "([I[I[I)I");

	int count = env->CallStaticIntMethod(helperClass, method, indexes, frontFacing, orientation);

	ms_message("%d cards detected", count);
	for(int i=0; i<count; i++) {
		MSWebCam *cam = ms_web_cam_new(&ms_android_video_capture_desc);
		AndroidWebcamConfig* c = new AndroidWebcamConfig();
		env->GetIntArrayRegion(indexes, i, 1, &c->id);
		env->GetIntArrayRegion(frontFacing, i, 1, &c->frontFacing);
		env->GetIntArrayRegion(orientation, i, 1, &c->orientation);
		cam->data = c;
		cam->name = ms_strdup("Android video name");
		char* idstring = (char*) malloc(15);
		snprintf(idstring, 15, "Android%d", c->id);
		cam->id = idstring;
		ms_web_cam_manager_add_cam(obj,cam);
		ms_message("camera created: id=%d frontFacing=%d orientation=%d [msid:%s]\n", c->id, c->frontFacing, c->orientation, idstring);
	}

	env->DeleteGlobalRef(helperClass);
	ms_message("Detection of Android VIDEO cards done");
}
AMediaFormat* AMediaCodec_getOutputFormat(AMediaCodec *codec){
	AMediaFormat *format=ms_new0(AMediaFormat,1);
	JNIEnv *env = ms_get_jni_env();
	jobject jformat = NULL;
	jclass mediaCodecClass = env->FindClass("android/media/MediaCodec");
	if (mediaCodecClass==NULL){
		ms_error("Couldn't find android/media/MediaCodec class !");
		env->ExceptionClear(); //very important.
		return NULL;
	}

	jmethodID methodID = env->GetMethodID(mediaCodecClass,"getOutputFormat","()Landroid/media/MediaFormat;");
	if (methodID!=NULL){
		jformat=env->CallObjectMethod(codec->jcodec,methodID);
		handle_java_exception();
		if (jformat == NULL){
			ms_error("Failed to create format !");
			return NULL;
		}
	}else{
		ms_error("getOutputFormat() not found in class format !");
		env->ExceptionClear(); //very important.
	}
	env->DeleteLocalRef(mediaCodecClass);
	format->jformat = jformat;
	return format;
}
예제 #16
0
void video_capture_preprocess(MSFilter *f){
	ms_message("Preprocessing of Android VIDEO capture filter");

	AndroidReaderContext *d = getContext(f);
	ms_mutex_lock(&d->mutex);

	snprintf(d->fps_context, sizeof(d->fps_context), "Captured mean fps=%%f, expected=%f", d->fps);
	ms_video_init_framerate_controller(&d->fpsControl, d->fps);
	ms_video_init_average_fps(&d->averageFps, d->fps_context);

	JNIEnv *env = ms_get_jni_env();

	jmethodID method = env->GetStaticMethodID(d->helperClass,"startRecording", "(IIIIIJ)Ljava/lang/Object;");

	ms_message("Starting Android camera '%d' (rotation:%d)", ((AndroidWebcamConfig*)d->webcam->data)->id, d->rotation);
	jobject cam = env->CallStaticObjectMethod(d->helperClass, method,
			((AndroidWebcamConfig*)d->webcam->data)->id,
			d->hwCapableSize.width,
			d->hwCapableSize.height,
			(jint)d->fps,
			d->rotationSavedDuringVSize,
			(jlong)d);
	d->androidCamera = env->NewGlobalRef(cam);

	if (d->previewWindow) {
		method = env->GetStaticMethodID(d->helperClass,"setPreviewDisplaySurface", "(Ljava/lang/Object;Ljava/lang/Object;)V");
		env->CallStaticVoidMethod(d->helperClass, method, d->androidCamera, d->previewWindow);
	}
	ms_message("Preprocessing of Android VIDEO capture filter done");
	ms_mutex_unlock(&d->mutex);
}
예제 #17
0
static void android_display_process(MSFilter *f){
	AndroidDisplay *ad=(AndroidDisplay*)f->data;
	MSPicture pic;
	mblk_t *m;

	ms_filter_lock(f);
	if (ad->jbitmap!=0 && !ad->orientation_change_pending){
		if ((m=ms_queue_peek_last(f->inputs[0]))!=NULL){
			if (ms_yuv_buf_init_from_mblk (&pic,m)==0){
				MSVideoSize wsize={ad->bmpinfo.width,ad->bmpinfo.height};
				MSVideoSize vsize={pic.w, pic.h};
				MSRect vrect;
				MSPicture dest={0};
				void *pixels=NULL;
				JNIEnv *jenv=ms_get_jni_env();

				if (!ms_video_size_equal(vsize,ad->vsize)){
					ms_message("Video to display has size %ix%i",vsize.width,vsize.height);
					ad->vsize=vsize;
					if (ad->sws){
						ms_scaler_context_free(ad->sws);
						ad->sws=NULL;
					}
					/*select_orientation(ad,wsize,vsize);*/
				}

				ms_layout_compute(wsize,vsize,vsize,-1,0,&vrect, NULL);

				if (ad->sws==NULL){
					ad->sws=ms_scaler_create_context (vsize.width,vsize.height,MS_YUV420P,
					                           vrect.w,vrect.h,MS_RGB565,MS_SCALER_METHOD_BILINEAR);
					if (ad->sws==NULL){
						ms_fatal("Could not obtain sws context !");
					}
				}

				if (sym_AndroidBitmap_lockPixels(jenv,ad->jbitmap,&pixels)==0){

					if (pixels!=NULL){
						dest.planes[0]=(uint8_t*)pixels+(vrect.y*ad->bmpinfo.stride)+(vrect.x*2);
						dest.strides[0]=ad->bmpinfo.stride;
						ms_scaler_process(ad->sws,pic.planes,pic.strides,dest.planes,dest.strides);
					}else ms_warning("Pixels==NULL in android bitmap !");

					sym_AndroidBitmap_unlockPixels(jenv,ad->jbitmap);
				}else{
					ms_error("AndroidBitmap_lockPixels() failed !");
				}

				(*jenv)->CallVoidMethod(jenv,ad->android_video_window,ad->update_id);

			}
		}
	}
	ms_filter_unlock(f);

	ms_queue_flush(f->inputs[0]);
	ms_queue_flush(f->inputs[1]);
}
void msandroid_sound_init(MSSndCard *card){
	/*get running sdk version*/
	JNIEnv *jni_env = ms_get_jni_env();
	jclass version_class = jni_env->FindClass("android/os/Build$VERSION");
	jfieldID fid = jni_env->GetStaticFieldID(version_class, "SDK_INT", "I");
	sdk_version=jni_env->GetStaticIntField(version_class, fid);
	ms_message("SDK version [%i] detected",sdk_version);
}
예제 #19
0
static int video_capture_set_autofocus(MSFilter *f, void* data){
	JNIEnv *env = ms_get_jni_env();
	AndroidReaderContext* d = (AndroidReaderContext*) f->data;
	jmethodID method = env->GetStaticMethodID(d->helperClass,"activateAutoFocus", "(Ljava/lang/Object;)V");
	env->CallStaticObjectMethod(d->helperClass, method, d->androidCamera);
	
	return 0;
}
int handle_java_exception(){
	JNIEnv *env = ms_get_jni_env();
	if (env->ExceptionCheck()) {
		env->ExceptionDescribe();
		env->ExceptionClear();
		return -1;
	}
	return 0;
}
	~msandroid_sound_write_data() {
		ms_bufferizer_flush(bufferizer);
		ms_bufferizer_destroy(bufferizer);
		ms_cond_destroy(&cond);
		if (audio_track_class!=0){
			JNIEnv *env = ms_get_jni_env();
			env->DeleteGlobalRef(audio_track_class);
		}
	}
예제 #22
0
	~AndroidReaderContext(){
		ms_mutex_destroy(&mutex);
		JNIEnv *env = ms_get_jni_env();
		env->DeleteGlobalRef(recorder);
		env->DeleteGlobalRef(managerClass);

		if (frame != 0) {
			freeb(frame);
		}
	};
예제 #23
0
static void select_orientation(AndroidDisplay *ad, MSVideoSize wsize, MSVideoSize vsize){
	int wo,vo;
	JNIEnv *jenv=ms_get_jni_env();
	wo=vsize_get_orientation(wsize);
	vo=vsize_get_orientation(vsize);
	if (wo!=vo){
		ms_message("Requesting orientation change !");
		(*jenv)->CallVoidMethod(jenv,ad->android_video_window,ad->request_orientation_id,vo);
		ad->orientation_change_pending=TRUE;
	}
}
예제 #24
0
// Java will give us a pointer to capture preview surface.
static int video_set_native_preview_window(MSFilter *f, void *arg) {
	AndroidReaderContext* d = (AndroidReaderContext*) f->data;
	ms_mutex_lock(&d->mutex);

	jobject w = *((jobject*)arg);

	if (w == d->previewWindow) {
		ms_mutex_unlock(&d->mutex);
		return 0;
	}

	JNIEnv *env = ms_get_jni_env();

	jmethodID method = env->GetStaticMethodID(d->helperClass,"setPreviewDisplaySurface", "(Ljava/lang/Object;Ljava/lang/Object;)V");

	if (d->androidCamera) {
		if (d->previewWindow == 0) {
			ms_message("Preview capture window set for the 1st time (win: %p rotation:%d)\n", w, d->rotation);
		} else {
			ms_message("Preview capture window changed (oldwin: %p newwin: %p rotation:%d)\n", d->previewWindow, w, d->rotation);

			env->CallStaticVoidMethod(d->helperClass,
						env->GetStaticMethodID(d->helperClass,"stopRecording", "(Ljava/lang/Object;)V"),
						d->androidCamera);
			env->DeleteGlobalRef(d->androidCamera);
			d->androidCamera = env->NewGlobalRef(
			env->CallStaticObjectMethod(d->helperClass,
						env->GetStaticMethodID(d->helperClass,"startRecording", "(IIIIIJ)Ljava/lang/Object;"),
						((AndroidWebcamConfig*)d->webcam->data)->id,
						d->hwCapableSize.width,
						d->hwCapableSize.height,
						(jint)d->fps,
						(d->rotation != UNDEFINED_ROTATION) ? d->rotation:0,
						(jlong)d));

		}
		// if previewWindow AND camera are valid => set preview window
		if (w && d->androidCamera)
			env->CallStaticVoidMethod(d->helperClass, method, d->androidCamera, w);
	} else {
		ms_message("Preview capture window set but camera not created yet; remembering it for later use\n");
	}
	if (d->previewWindow) {
		ms_message("Deleting previous preview window %p", d->previewWindow);
		env->DeleteGlobalRef(d->previewWindow);
	}
	d->previewWindow = w;

	ms_mutex_unlock(&d->mutex);
	return 0;
}
void AMediaFormat_setInt32(AMediaFormat *format, const char* name, int32_t value){
	JNIEnv *env = ms_get_jni_env();
	jclass mediaFormatClass = env->FindClass("android/media/MediaFormat");
	jmethodID setStringID = env->GetMethodID(mediaFormatClass,"setInteger","(Ljava/lang/String;I)V");
	if (setStringID != NULL){
		jstring jkey = env->NewStringUTF(name);
		env->CallVoidMethod(format->jformat,setStringID,jkey,value);
		env->DeleteLocalRef(jkey);
	} else {
		ms_error("setstring() not found in class format !");
		env->ExceptionClear(); //very important.
	}
	env->DeleteLocalRef(mediaFormatClass);
}
예제 #26
0
static void android_display_init(MSFilter *f){
	AndroidDisplay *ad=(AndroidDisplay*)ms_new0(AndroidDisplay,1);
	JNIEnv *jenv=NULL;
	jclass wc;

	jenv=ms_get_jni_env();
	wc=(*jenv)->FindClass(jenv,"org/linphone/mediastream/video/AndroidVideoWindowImpl");
	if (wc==0){
		ms_fatal("Could not find org/linphone/mediastream/video/AndroidVideoWindowImpl class !");
	}
	ad->get_bitmap_id=(*jenv)->GetMethodID(jenv,wc,"getBitmap", "()Landroid/graphics/Bitmap;");
	ad->update_id=(*jenv)->GetMethodID(jenv,wc,"update","()V");
	ad->request_orientation_id=(*jenv)->GetMethodID(jenv,wc,"requestOrientation","(I)V");
	(*jenv)->DeleteLocalRef(jenv,wc);
	f->data=ad;
}
예제 #27
0
static void video_capture_postprocess(MSFilter *f){
	ms_message("Postprocessing of Android VIDEO capture filter");
	AndroidReaderContext* d = getContext(f);
	ms_mutex_lock(&d->mutex);
	JNIEnv *env = ms_get_jni_env();

	if (d->androidCamera) {
		jmethodID method = env->GetStaticMethodID(d->helperClass,"stopRecording", "(Ljava/lang/Object;)V");

		env->CallStaticVoidMethod(d->helperClass, method, d->androidCamera);
		env->DeleteGlobalRef(d->androidCamera);
	}
	d->androidCamera = 0;
	d->previewWindow = 0;
	ms_mutex_unlock(&d->mutex);
}
static void android_display_uninit(MSFilter *f){
	AndroidDisplay *ad=(AndroidDisplay*)f->data;
	JNIEnv *jenv=ms_get_jni_env();
	ms_message("%s %p %p", __FUNCTION__, f, ad->ogl);
	
	if (ad->ogl) {
		/* clear native ptr, to prevent rendering to occur now that ptr is invalid */
		if (ad->android_video_window)
			(*jenv)->CallVoidMethod(jenv,ad->android_video_window,ad->set_opengles_display_id, 0);
		ogl_display_uninit(ad->ogl,FALSE);
		ms_free(ad->ogl);
	}
	if (ad->android_video_window) (*jenv)->DeleteGlobalRef(jenv, ad->android_video_window);

	ms_free(ad);
}
	msandroid_sound_write_data() :audio_track_class(0),audio_track(0),write_chunk_size(0),writtenBytes(0),last_sample_date(0){
		bufferizer = ms_bufferizer_new();
		ms_cond_init(&cond,0);
		JNIEnv *jni_env = ms_get_jni_env();
		audio_track_class = (jclass)jni_env->NewGlobalRef(jni_env->FindClass("android/media/AudioTrack"));
		if (audio_track_class == 0) {
			ms_error("cannot find  android/media/AudioTrack\n");
			return;
		}
		jmethodID hwrate_id = jni_env->GetStaticMethodID(audio_track_class,"getNativeOutputSampleRate", "(I)I");
		if (hwrate_id == 0) {
			ms_error("cannot find  int AudioRecord.getNativeOutputSampleRate(int streamType)");
			return;
		}
		rate = jni_env->CallStaticIntMethod(audio_track_class,hwrate_id,0 /*STREAM_VOICE_CALL*/);
		ms_message("Hardware sample rate is %i",rate);
	};
ssize_t AMediaCodec_dequeueOutputBuffer(AMediaCodec *codec, AMediaCodecBufferInfo *info, int64_t timeoutUs) {
	JNIEnv *env = ms_get_jni_env();
	jint jindex=-1;
	jobject jinfo = NULL;

	jfieldID size;
	jfieldID offset;
	jfieldID flags;
	jclass mediaBufferInfoClass = env->FindClass("android/media/MediaCodec$BufferInfo");
    jmethodID methodID = env->GetMethodID(mediaBufferInfoClass,"<init>","()V");
	if (methodID != NULL){
		jinfo = env->NewObject(mediaBufferInfoClass,methodID);
		size = env->GetFieldID(mediaBufferInfoClass, "size" , "I");
		flags = env->GetFieldID(mediaBufferInfoClass, "flags" , "I");
		offset = env->GetFieldID(mediaBufferInfoClass, "offset" , "I");
		handle_java_exception();
	} else {
		ms_error("init not found in class MediaCodec$BufferInfo !");
    	env->ExceptionClear();
    	return -1;
	}
	env->DeleteLocalRef(mediaBufferInfoClass);

	jclass mediaCodecClass = env->FindClass("android/media/MediaCodec");
	methodID = env->GetMethodID(mediaCodecClass,"dequeueOutputBuffer","(Landroid/media/MediaCodec$BufferInfo;J)I");
	if (methodID != NULL){
		jindex = env->CallIntMethod(codec->jcodec, methodID ,jinfo, timeoutUs);
		if (env->ExceptionCheck()) {
			env->ExceptionDescribe();
			env->ExceptionClear();
			ms_error("Exception");
		}
	} else {
		ms_error("dequeueOutputBuffer() not found in class format !");
		env->ExceptionClear(); //very important.
		return -1;
	}

	info->size = env->GetIntField(jinfo,size);
	info->offset = env->GetIntField(jinfo,offset);
	info->flags = env->GetIntField(jinfo,flags);
	env->DeleteLocalRef(mediaCodecClass);
	env->DeleteLocalRef(jinfo);
	return (ssize_t) jindex;
}