/* API: Destroy stream. */ static pj_status_t and_stream_destroy(pjmedia_vid_dev_stream *s) { and_stream *strm = (and_stream*)s; JNIEnv *jni_env; pj_bool_t with_attach; PJ_ASSERT_RETURN(strm != NULL, PJ_EINVAL); with_attach = jni_get_env(&jni_env); if (strm->is_running) and_stream_stop(s); if (strm->jcam) { (*jni_env)->DeleteGlobalRef(jni_env, strm->jcam); strm->jcam = NULL; } jni_detach_env(with_attach); pjmedia_vid_dev_conv_destroy_converter(&strm->conv); if (strm->pool) pj_pool_release(strm->pool); PJ_LOG(4, (THIS_FILE, "Android camera stream destroyed")); return PJ_SUCCESS; }
/* API: Start stream. */ static pj_status_t and_stream_start(pjmedia_vid_dev_stream *s) { and_stream *strm = (and_stream*)s; JNIEnv *jni_env; pj_bool_t with_attach; jint res; pj_status_t status = PJ_SUCCESS; PJ_LOG(4, (THIS_FILE, "Starting Android camera stream")); with_attach = jni_get_env(&jni_env); /* Call PjCamera::Start() method */ res = (*jni_env)->CallIntMethod(jni_env, strm->jcam, jobjs.cam.m_start); if (res < 0) { PJ_LOG(3, (THIS_FILE, "Failed to start camera (err=%d)", res)); status = PJMEDIA_EVID_SYSERR; goto on_return; } strm->is_running = PJ_TRUE; on_return: jni_detach_env(with_attach); return status; }
static void VLCJniObject_eventCallback(const libvlc_event_t *ev, void *data) { vlcjni_object *p_obj = data; java_event jevent; JNIEnv *env = NULL; jevent.type = -1; jevent.arg1 = jevent.arg2 = 0; if (!p_obj->p_owner->pf_event_cb(p_obj, ev, &jevent)) return; if (!(env = jni_get_env(THREAD_NAME))) return; if (p_obj->p_owner->weak) (*env)->CallVoidMethod(env, p_obj->p_owner->weak, fields.VLCObject.dispatchEventFromNativeID, jevent.type, jevent.arg1, jevent.arg2); else (*env)->CallStaticVoidMethod(env, fields.VLCObject.clazz, fields.VLCObject.dispatchEventFromWeakNativeID, p_obj->p_owner->weakCompat, jevent.type, jevent.arg1, jevent.arg2); }
static int AndroidWindow_SetSurface(vout_display_sys_t *sys, android_window *p_window, jobject jsurf) { if (p_window->p_handle && jsurf != p_window->jsurf) { if (p_window->p_handle_priv) { sys->anwp.disconnect(p_window->p_handle_priv); p_window->p_handle_priv = NULL; } sys->anw.winRelease(p_window->p_handle); p_window->p_handle = NULL; } p_window->jsurf = jsurf; if (!p_window->p_handle && !p_window->b_opaque) { JNIEnv *p_env; if (!(p_env = jni_get_env(THREAD_NAME))) return -1; p_window->p_handle = sys->anw.winFromSurface(p_env, p_window->jsurf); if (!p_window->p_handle) return -1; } return 0; }
static void jni_deinit_ids() { JNIEnv *jni_env; pj_bool_t with_attach = jni_get_env(&jni_env); if (jobjs.cam.cls) { (*jni_env)->DeleteGlobalRef(jni_env, jobjs.cam.cls); jobjs.cam.cls = NULL; } if (jobjs.cam_info.cls) { (*jni_env)->DeleteGlobalRef(jni_env, jobjs.cam_info.cls); jobjs.cam_info.cls = NULL; } jni_detach_env(with_attach); }
/* API: Stop stream. */ static pj_status_t and_stream_stop(pjmedia_vid_dev_stream *s) { and_stream *strm = (and_stream*)s; JNIEnv *jni_env; pj_bool_t with_attach; pj_status_t status = PJ_SUCCESS; PJ_ASSERT_RETURN(strm != NULL, PJ_EINVAL); PJ_LOG(4, (THIS_FILE, "Stopping Android camera stream")); with_attach = jni_get_env(&jni_env); /* Call PjCamera::Stop() method */ (*jni_env)->CallVoidMethod(jni_env, strm->jcam, jobjs.cam.m_stop); strm->is_running = PJ_FALSE; jni_detach_env(with_attach); return status; }
static void vlc_event_callback(const libvlc_event_t *ev, void *data) { JNIEnv *env; bool isAttached = false; if (eventHandlerInstance == NULL) return; if (jni_get_env(&env) < 0) { if (jni_attach_thread(&env, THREAD_NAME) < 0) return; isAttached = true; } /* Creating the bundle in C allows us to subscribe to more events * and get better flexibility for each event. For example, we can * have totally different types of data for each event, instead of, * for example, only an integer and/or string. */ jclass clsBundle = (*env)->FindClass(env, "android/os/Bundle"); jmethodID clsCtor = (*env)->GetMethodID(env, clsBundle, "<init>", "()V" ); jobject bundle = (*env)->NewObject(env, clsBundle, clsCtor); jmethodID putInt = (*env)->GetMethodID(env, clsBundle, "putInt", "(Ljava/lang/String;I)V" ); jmethodID putLong = (*env)->GetMethodID(env, clsBundle, "putLong", "(Ljava/lang/String;J)V" ); jmethodID putFloat = (*env)->GetMethodID(env, clsBundle, "putFloat", "(Ljava/lang/String;F)V" ); jmethodID putString = (*env)->GetMethodID(env, clsBundle, "putString", "(Ljava/lang/String;Ljava/lang/String;)V" ); if (ev->type == libvlc_MediaPlayerPositionChanged) { jstring sData = (*env)->NewStringUTF(env, "data"); (*env)->CallVoidMethod(env, bundle, putFloat, sData, ev->u.media_player_position_changed.new_position); (*env)->DeleteLocalRef(env, sData); } else if (ev->type == libvlc_MediaPlayerTimeChanged) { jstring sData = (*env)->NewStringUTF(env, "data"); (*env)->CallVoidMethod(env, bundle, putLong, sData, ev->u.media_player_time_changed.new_time); (*env)->DeleteLocalRef(env, sData); } else if(ev->type == libvlc_MediaPlayerVout) { /* For determining the vout/ES track change */ jstring sData = (*env)->NewStringUTF(env, "data"); (*env)->CallVoidMethod(env, bundle, putInt, sData, ev->u.media_player_vout.new_count); (*env)->DeleteLocalRef(env, sData); } else if(ev->type == libvlc_MediaListItemAdded || ev->type == libvlc_MediaListItemDeleted ) { jstring item_uri = (*env)->NewStringUTF(env, "item_uri"); jstring item_index = (*env)->NewStringUTF(env, "item_index"); char* mrl = libvlc_media_get_mrl( ev->type == libvlc_MediaListItemAdded ? ev->u.media_list_item_added.item : ev->u.media_list_item_deleted.item ); jstring item_uri_value = (*env)->NewStringUTF(env, mrl); jint item_index_value; if(ev->type == libvlc_MediaListItemAdded) item_index_value = ev->u.media_list_item_added.index; else item_index_value = ev->u.media_list_item_deleted.index; (*env)->CallVoidMethod(env, bundle, putString, item_uri, item_uri_value); (*env)->CallVoidMethod(env, bundle, putInt, item_index, item_index_value); (*env)->DeleteLocalRef(env, item_uri); (*env)->DeleteLocalRef(env, item_uri_value); (*env)->DeleteLocalRef(env, item_index); free(mrl); } /* Get the object class */ jclass cls = (*env)->GetObjectClass(env, eventHandlerInstance); if (!cls) { LOGE("EventHandler: failed to get class reference"); goto end; } /* Find the callback ID */ jmethodID methodID = (*env)->GetMethodID(env, cls, "callback", "(ILandroid/os/Bundle;)V"); if (methodID) { (*env)->CallVoidMethod(env, eventHandlerInstance, methodID, ev->type, bundle); } else { LOGE("EventHandler: failed to get the callback method"); } end: (*env)->DeleteLocalRef(env, bundle); if (isAttached) jni_detach_thread(); }
/* API: set capability */ static pj_status_t and_stream_set_cap(pjmedia_vid_dev_stream *s, pjmedia_vid_dev_cap cap, const void *pval) { and_stream *strm = (and_stream*)s; JNIEnv *jni_env; pj_bool_t with_attach; pj_status_t status = PJ_SUCCESS; PJ_ASSERT_RETURN(s && pval, PJ_EINVAL); switch (cap) { case PJMEDIA_VID_DEV_CAP_SWITCH: { pjmedia_vid_dev_switch_param *p = (pjmedia_vid_dev_switch_param*) pval; and_dev_info *adi; int res; /* Just return if current and target device are the same */ if (strm->param.cap_id == p->target_id) return PJ_SUCCESS; /* Verify target capture ID */ if (p->target_id < 0 || p->target_id >= strm->factory->dev_count) return PJ_EINVAL; /* Ok, let's do the switch */ adi = &strm->factory->dev_info[p->target_id]; PJ_LOG(4, (THIS_FILE, "Switching camera to %s..", adi->info.name)); /* Call PjCamera::Start() method */ with_attach = jni_get_env(&jni_env); res = (*jni_env)->CallIntMethod(jni_env, strm->jcam, jobjs.cam.m_switch, adi->dev_idx); if (res < 0) { PJ_LOG(3, (THIS_FILE, "Failed to switch camera (err=%d)", res)); status = PJMEDIA_EVID_SYSERR; } else { strm->param.cap_id = p->target_id; /* If successful, set the orientation as well */ and_stream_set_cap(s, PJMEDIA_VID_DEV_CAP_ORIENTATION, &strm->param.orient); } jni_detach_env(with_attach); break; } case PJMEDIA_VID_DEV_CAP_ORIENTATION: { pjmedia_orient orient = *(pjmedia_orient *)pval; pjmedia_orient eff_ori; and_dev_info *adi; pj_assert(orient >= PJMEDIA_ORIENT_UNKNOWN && orient <= PJMEDIA_ORIENT_ROTATE_270DEG); if (orient == PJMEDIA_ORIENT_UNKNOWN) return PJ_EINVAL; pj_memcpy(&strm->param.orient, pval, sizeof(strm->param.orient)); if (!strm->conv.conv) { status = pjmedia_vid_dev_conv_create_converter( &strm->conv, strm->pool, &strm->param.fmt, strm->cam_size, strm->param.fmt.det.vid.size, PJ_TRUE, MAINTAIN_ASPECT_RATIO); if (status != PJ_SUCCESS) return status; } eff_ori = strm->param.orient; adi = &strm->factory->dev_info[strm->param.cap_id]; /* Normalize the orientation for back-facing camera */ if (!adi->facing) { if (eff_ori == PJMEDIA_ORIENT_ROTATE_90DEG) eff_ori = PJMEDIA_ORIENT_ROTATE_270DEG; else if (eff_ori == PJMEDIA_ORIENT_ROTATE_270DEG) eff_ori = PJMEDIA_ORIENT_ROTATE_90DEG; } pjmedia_vid_dev_conv_set_rotation(&strm->conv, eff_ori); PJ_LOG(4, (THIS_FILE, "Video capture orientation set to %d", strm->param.orient)); break; } default: status = PJMEDIA_EVID_INVCAP; break; } return status; }
/* API: create stream */ static pj_status_t and_factory_create_stream( pjmedia_vid_dev_factory *ff, pjmedia_vid_dev_param *param, const pjmedia_vid_dev_cb *cb, void *user_data, pjmedia_vid_dev_stream **p_vid_strm) { and_factory *f = (and_factory*)ff; pj_pool_t *pool; and_stream *strm; and_dev_info *adi; const pjmedia_video_format_detail *vfd; const pjmedia_video_format_info *vfi; pjmedia_video_apply_fmt_param vafp; pj_uint32_t and_fmt; unsigned convert_to_i420 = 0; pj_status_t status = PJ_SUCCESS; JNIEnv *jni_env; pj_bool_t with_attach; jobject jcam; PJ_ASSERT_RETURN(f && param && p_vid_strm, PJ_EINVAL); PJ_ASSERT_RETURN(param->fmt.type == PJMEDIA_TYPE_VIDEO && param->fmt.detail_type == PJMEDIA_FORMAT_DETAIL_VIDEO && param->dir == PJMEDIA_DIR_CAPTURE, PJ_EINVAL); pj_bzero(&vafp, sizeof(vafp)); adi = &f->dev_info[param->cap_id]; vfd = pjmedia_format_get_video_format_detail(¶m->fmt, PJ_TRUE); vfi = pjmedia_get_video_format_info(NULL, param->fmt.id); if (param->fmt.id == PJMEDIA_FORMAT_I420 && adi->forced_i420) { /* Not really support I420, need to convert it from YV12/NV21 */ if (adi->has_nv21) { and_fmt = pj_fmt_to_and(PJMEDIA_FORMAT_NV21); convert_to_i420 = 1; } else if (adi->has_yv12) { and_fmt = pj_fmt_to_and(PJMEDIA_FORMAT_YV12); convert_to_i420 = 2; } else pj_assert(!"Bug!"); } else { and_fmt = pj_fmt_to_and(param->fmt.id); } if (!vfi || !and_fmt) return PJMEDIA_EVID_BADFORMAT; vafp.size = vfd->size; if (vfi->apply_fmt(vfi, &vafp) != PJ_SUCCESS) return PJMEDIA_EVID_BADFORMAT; /* Create and Initialize stream descriptor */ pool = pj_pool_create(f->pf, "and-dev", 512, 512, NULL); PJ_ASSERT_RETURN(pool != NULL, PJ_ENOMEM); strm = PJ_POOL_ZALLOC_T(pool, and_stream); pj_memcpy(&strm->param, param, sizeof(*param)); strm->pool = pool; strm->factory = f; pj_memcpy(&strm->vid_cb, cb, sizeof(*cb)); strm->user_data = user_data; pj_memcpy(&strm->vafp, &vafp, sizeof(vafp)); strm->ts_inc = PJMEDIA_SPF2(param->clock_rate, &vfd->fps, 1); /* Allocate buffer for YV12 -> I420 conversion */ if (convert_to_i420) { pj_assert(vfi->plane_cnt > 1); strm->convert_to_i420 = convert_to_i420; strm->convert_buf = pj_pool_alloc(pool, vafp.plane_bytes[1]); } /* Native preview */ if (param->flags & PJMEDIA_VID_DEV_CAP_INPUT_PREVIEW) { } with_attach = jni_get_env(&jni_env); /* Instantiate PjCamera */ strm->cam_size.w = (vfd->size.w > vfd->size.h? vfd->size.w: vfd->size.h); strm->cam_size.h = (vfd->size.w > vfd->size.h? vfd->size.h: vfd->size.w); jcam = (*jni_env)->NewObject(jni_env, jobjs.cam.cls, jobjs.cam.m_init, adi->dev_idx, /* idx */ strm->cam_size.w, /* w */ strm->cam_size.h, /* h */ and_fmt, /* fmt */ vfd->fps.num*1000/ vfd->fps.denum, /* fps */ (jlong)(intptr_t)strm, /* user data */ NULL /* SurfaceView */ ); if (jcam == NULL) { PJ_LOG(3, (THIS_FILE, "Unable to create PjCamera instance")); status = PJMEDIA_EVID_SYSERR; goto on_return; } strm->jcam = (jobject)(*jni_env)->NewGlobalRef(jni_env, jcam); (*jni_env)->DeleteLocalRef(jni_env, jcam); if (strm->jcam == NULL) { PJ_LOG(3, (THIS_FILE, "Unable to create global ref to PjCamera")); status = PJMEDIA_EVID_SYSERR; goto on_return; } /* Video orientation. * If we send in portrait, we need to set up orientation converter * as well. */ if ((param->flags & PJMEDIA_VID_DEV_CAP_ORIENTATION) || (vfd->size.h > vfd->size.w)) { if (param->orient == PJMEDIA_ORIENT_UNKNOWN) param->orient = PJMEDIA_ORIENT_NATURAL; and_stream_set_cap(&strm->base, PJMEDIA_VID_DEV_CAP_ORIENTATION, ¶m->orient); } on_return: jni_detach_env(with_attach); /* Success */ if (status == PJ_SUCCESS) { strm->base.op = &stream_op; *p_vid_strm = &strm->base; } return status; }
/* API: refresh the list of devices */ static pj_status_t and_factory_refresh(pjmedia_vid_dev_factory *ff) { and_factory *f = (and_factory*)ff; pj_status_t status = PJ_SUCCESS; JNIEnv *jni_env; pj_bool_t with_attach, found_front = PJ_FALSE; int i, dev_count = 0; /* Clean up device info and pool */ f->dev_count = 0; pj_pool_reset(f->dev_pool); with_attach = jni_get_env(&jni_env); /* dev_count = PjCameraInfo::GetCameraCount() */ dev_count = (*jni_env)->CallStaticIntMethod(jni_env, jobjs.cam_info.cls, jobjs.cam_info.m_get_cnt); if (dev_count < 0) { PJ_LOG(3, (THIS_FILE, "Failed to get camera count")); status = PJMEDIA_EVID_SYSERR; goto on_return; } /* Start querying device info */ f->dev_info = (and_dev_info*) pj_pool_calloc(f->dev_pool, dev_count, sizeof(and_dev_info)); for (i = 0; i < dev_count; i++) { and_dev_info *adi = &f->dev_info[f->dev_count]; pjmedia_vid_dev_info *vdi = &adi->info; jobject jdev_info; jobject jtmp; int facing, max_fmt_cnt = PJMEDIA_VID_DEV_INFO_FMT_CNT; /* jdev_info = PjCameraInfo::GetCameraInfo(i) */ jdev_info = (*jni_env)->CallStaticObjectMethod( jni_env, jobjs.cam_info.cls, jobjs.cam_info.m_get_info, i); if (jdev_info == NULL) continue; /* Get camera facing: 0=back 1=front */ facing = (*jni_env)->GetIntField(jni_env, jdev_info, jobjs.cam_info.f_facing); if (facing < 0) goto on_skip_dev; /* Set device ID, direction, and has_callback info */ adi->dev_idx = i; vdi->id = f->dev_count; vdi->dir = PJMEDIA_DIR_CAPTURE; vdi->has_callback = PJ_TRUE; vdi->caps = PJMEDIA_VID_DEV_CAP_SWITCH | PJMEDIA_VID_DEV_CAP_ORIENTATION; /* Set driver & name info */ pj_ansi_strncpy(vdi->driver, "Android", sizeof(vdi->driver)); adi->facing = facing; if (facing == 0) { pj_ansi_strncpy(vdi->name, "Back camera", sizeof(vdi->name)); } else { pj_ansi_strncpy(vdi->name, "Front camera", sizeof(vdi->name)); } /* Get supported sizes */ jtmp = (*jni_env)->GetObjectField(jni_env, jdev_info, jobjs.cam_info.f_sup_size); if (jtmp) { jintArray jiarray = (jintArray*)jtmp; jint *sizes; jsize cnt, j; cnt = (*jni_env)->GetArrayLength(jni_env, jiarray); sizes = (*jni_env)->GetIntArrayElements(jni_env, jiarray, 0); adi->sup_size_cnt = cnt/2; adi->sup_size = pj_pool_calloc(f->dev_pool, adi->sup_size_cnt, sizeof(adi->sup_size[0])); for (j = 0; j < adi->sup_size_cnt; j++) { adi->sup_size[j].w = sizes[j*2]; adi->sup_size[j].h = sizes[j*2+1]; } (*jni_env)->ReleaseIntArrayElements(jni_env, jiarray, sizes, 0); (*jni_env)->DeleteLocalRef(jni_env, jtmp); } else { goto on_skip_dev; } /* Get supported formats */ jtmp = (*jni_env)->GetObjectField(jni_env, jdev_info, jobjs.cam_info.f_sup_fmt); if (jtmp) { jintArray jiarray = (jintArray*)jtmp; jint *fmts; jsize cnt, j; pj_bool_t has_i420 = PJ_FALSE; cnt = (*jni_env)->GetArrayLength(jni_env, jiarray); fmts = (*jni_env)->GetIntArrayElements(jni_env, jiarray, 0); for (j = 0; j < cnt; j++) { int k; pjmedia_format_id fmt = and_fmt_to_pj((pj_uint32_t)fmts[j]); /* Check for any duplicate */ for (k = 0; k < vdi->fmt_cnt; k++) { if (fmt == 0 || fmt == vdi->fmt[k].id) { fmt = 0; break; } } /* Make sure we recognize this format */ if (fmt == 0) continue; /* Check formats for I420 conversion */ if (fmt == PJMEDIA_FORMAT_I420) has_i420 = PJ_TRUE; else if (fmt == PJMEDIA_FORMAT_YV12) adi->has_yv12 = PJ_TRUE; else if (fmt == PJMEDIA_FORMAT_NV21) adi->has_nv21 = PJ_TRUE; for (k = 0; k < adi->sup_size_cnt && vdi->fmt_cnt < max_fmt_cnt-1; k++) { /* Landscape video */ pjmedia_format_init_video(&vdi->fmt[vdi->fmt_cnt++], fmt, adi->sup_size[k].w, adi->sup_size[k].h, DEFAULT_FPS, 1); /* Portrait video */ pjmedia_format_init_video(&vdi->fmt[vdi->fmt_cnt++], fmt, adi->sup_size[k].h, adi->sup_size[k].w, DEFAULT_FPS, 1); } } (*jni_env)->ReleaseIntArrayElements(jni_env, jiarray, fmts, JNI_ABORT); (*jni_env)->DeleteLocalRef(jni_env, jtmp); /* Pretend to support I420/IYUV, only if we support YV12/NV21 */ if (!has_i420 && (adi->has_yv12 || adi->has_nv21) && vdi->fmt_cnt < PJ_ARRAY_SIZE(vdi->fmt)) { int k; adi->forced_i420 = PJ_TRUE; for (k = 0; k < adi->sup_size_cnt && vdi->fmt_cnt < max_fmt_cnt-1; k++) { pjmedia_format_init_video(&vdi->fmt[vdi->fmt_cnt++], PJMEDIA_FORMAT_I420, adi->sup_size[k].w, adi->sup_size[k].h, DEFAULT_FPS, 1); pjmedia_format_init_video(&vdi->fmt[vdi->fmt_cnt++], PJMEDIA_FORMAT_I420, adi->sup_size[k].h, adi->sup_size[k].w, DEFAULT_FPS, 1); } } } else { goto on_skip_dev; } /* If this is front camera, set it as first/default (if not yet) */ if (facing == 1) { if (!found_front && f->dev_count > 0) { /* Swap this front cam info with one whose idx==0 */ and_dev_info tmp_adi; pj_memcpy(&tmp_adi, &f->dev_info[0], sizeof(tmp_adi)); pj_memcpy(&f->dev_info[0], adi, sizeof(tmp_adi)); pj_memcpy(adi, &tmp_adi, sizeof(tmp_adi)); f->dev_info[0].info.id = 0; f->dev_info[f->dev_count].info.id = f->dev_count; } found_front = PJ_TRUE; } f->dev_count++; on_skip_dev: (*jni_env)->DeleteLocalRef(jni_env, jdev_info); } PJ_LOG(4, (THIS_FILE, "Android video capture initialized with %d device(s):", f->dev_count)); for (i = 0; i < f->dev_count; i++) { and_dev_info *adi = &f->dev_info[i]; char tmp_str[2048], *p; int j, plen, slen; PJ_LOG(4, (THIS_FILE, "%2d: %s", i, f->dev_info[i].info.name)); /* Print supported formats */ p = tmp_str; plen = sizeof(tmp_str); for (j = 0; j < adi->info.fmt_cnt; j++) { char tmp_str2[5]; const pjmedia_video_format_detail *vfd = pjmedia_format_get_video_format_detail(&adi->info.fmt[j], 0); pjmedia_fourcc_name(adi->info.fmt[j].id, tmp_str2); slen = pj_ansi_snprintf(p, plen, "%s/%dx%d ", tmp_str2, vfd->size.w, vfd->size.h); if (slen < 0 || slen >= plen) break; plen -= slen; p += slen; } PJ_LOG(4, (THIS_FILE, " supported format = %s", tmp_str)); } on_return: jni_detach_env(with_attach); return status; }
/* Get Java object IDs (via FindClass, GetMethodID, GetFieldID, etc). * Note that this function should be called from library-loader thread, * otherwise FindClass, etc, may fail, see: * http://developer.android.com/training/articles/perf-jni.html#faq_FindClass */ static pj_status_t jni_init_ids() { JNIEnv *jni_env; pj_status_t status = PJ_SUCCESS; pj_bool_t with_attach = jni_get_env(&jni_env); #define GET_CLASS(class_path, class_name, cls) \ cls = (*jni_env)->FindClass(jni_env, class_path); \ if (cls == NULL || (*jni_env)->ExceptionCheck(jni_env)) { \ (*jni_env)->ExceptionClear(jni_env); \ PJ_LOG(3, (THIS_FILE, "[JNI] Unable to find class '" \ class_name "'")); \ status = PJMEDIA_EVID_SYSERR; \ goto on_return; \ } else { \ jclass tmp = cls; \ cls = (jclass)(*jni_env)->NewGlobalRef(jni_env, tmp); \ (*jni_env)->DeleteLocalRef(jni_env, tmp); \ if (cls == NULL) { \ PJ_LOG(3, (THIS_FILE, "[JNI] Unable to get global ref for " \ "class '" class_name "'")); \ status = PJMEDIA_EVID_SYSERR; \ goto on_return; \ } \ } #define GET_METHOD_ID(cls, class_name, method_name, signature, id) \ id = (*jni_env)->GetMethodID(jni_env, cls, method_name, signature); \ if (id == 0) { \ PJ_LOG(3, (THIS_FILE, "[JNI] Unable to find method '" method_name \ "' in class '" class_name "'")); \ status = PJMEDIA_EVID_SYSERR; \ goto on_return; \ } #define GET_SMETHOD_ID(cls, class_name, method_name, signature, id) \ id = (*jni_env)->GetStaticMethodID(jni_env, cls, method_name, signature); \ if (id == 0) { \ PJ_LOG(3, (THIS_FILE, "[JNI] Unable to find static method '" \ method_name "' in class '" class_name "'")); \ status = PJMEDIA_EVID_SYSERR; \ goto on_return; \ } #define GET_FIELD_ID(cls, class_name, field_name, signature, id) \ id = (*jni_env)->GetFieldID(jni_env, cls, field_name, signature); \ if (id == 0) { \ PJ_LOG(3, (THIS_FILE, "[JNI] Unable to find field '" field_name \ "' in class '" class_name "'")); \ status = PJMEDIA_EVID_SYSERR; \ goto on_return; \ } /* PjCamera class info */ GET_CLASS(PJ_CAMERA_CLASS_PATH, "PjCamera", jobjs.cam.cls); GET_METHOD_ID(jobjs.cam.cls, "PjCamera", "<init>", "(IIIIIJLandroid/view/SurfaceView;)V", jobjs.cam.m_init); GET_METHOD_ID(jobjs.cam.cls, "PjCamera", "Start", "()I", jobjs.cam.m_start); GET_METHOD_ID(jobjs.cam.cls, "PjCamera", "Stop", "()V", jobjs.cam.m_stop); GET_METHOD_ID(jobjs.cam.cls, "PjCamera", "SwitchDevice", "(I)I", jobjs.cam.m_switch); /* PjCameraInfo class info */ GET_CLASS(PJ_CAMERA_INFO_CLASS_PATH, "PjCameraInfo", jobjs.cam_info.cls); GET_SMETHOD_ID(jobjs.cam_info.cls, "PjCameraInfo", "GetCameraCount", "()I", jobjs.cam_info.m_get_cnt); GET_SMETHOD_ID(jobjs.cam_info.cls, "PjCameraInfo", "GetCameraInfo", "(I)L" PJ_CAMERA_INFO_CLASS_PATH ";", jobjs.cam_info.m_get_info); GET_FIELD_ID(jobjs.cam_info.cls, "PjCameraInfo", "facing", "I", jobjs.cam_info.f_facing); GET_FIELD_ID(jobjs.cam_info.cls, "PjCameraInfo", "orient", "I", jobjs.cam_info.f_orient); GET_FIELD_ID(jobjs.cam_info.cls, "PjCameraInfo", "supportedSize", "[I", jobjs.cam_info.f_sup_size); GET_FIELD_ID(jobjs.cam_info.cls, "PjCameraInfo", "supportedFormat", "[I", jobjs.cam_info.f_sup_fmt); GET_FIELD_ID(jobjs.cam_info.cls, "PjCameraInfo", "supportedFps1000", "[I", jobjs.cam_info.f_sup_fps); #undef GET_CLASS_ID #undef GET_METHOD_ID #undef GET_SMETHOD_ID #undef GET_FIELD_ID /* Register native function */ { JNINativeMethod m = { "PushFrame", "([BIJ)V", (void*)&OnGetFrame }; if ((*jni_env)->RegisterNatives(jni_env, jobjs.cam.cls, &m, 1)) { PJ_LOG(3, (THIS_FILE, "[JNI] Failed in registering native " "function 'OnGetFrame()'")); status = PJMEDIA_EVID_SYSERR; } } on_return: jni_detach_env(with_attach); return status; }
/* init all jni fields. * Done only one time during the first initialisation */ static bool InitJNIFields( audio_output_t *p_aout ) { static vlc_mutex_t lock = VLC_STATIC_MUTEX; static int i_init_state = -1; bool ret; jclass clazz; jfieldID field; JNIEnv* env = NULL; vlc_mutex_lock( &lock ); if( i_init_state != -1 ) goto end; if (!(env = jni_get_env(THREAD_NAME))) { i_init_state = 0; goto end; } #define CHECK_EXCEPTION( what, critical ) do { \ if( (*env)->ExceptionOccurred( env ) ) \ { \ msg_Err( p_aout, "%s failed", what ); \ (*env)->ExceptionClear( env ); \ if( (critical) ) \ { \ i_init_state = 0; \ goto end; \ } \ } \ } while( 0 ) #define GET_CLASS( str, critical ) do { \ clazz = (*env)->FindClass( env, (str) ); \ CHECK_EXCEPTION( str, critical ); \ } while( 0 ) #define GET_ID( get, id, str, args, critical ) do { \ jfields.id = (*env)->get( env, clazz, (str), (args) ); \ CHECK_EXCEPTION( #get, critical ); \ } while( 0 ) #define GET_CONST_INT( id, str, critical ) do { \ field = NULL; \ field = (*env)->GetStaticFieldID( env, clazz, (str), "I" ); \ CHECK_EXCEPTION( #id, critical ); \ if( field ) \ { \ jfields.id = (*env)->GetStaticIntField( env, clazz, field ); \ CHECK_EXCEPTION( #id, critical ); \ } \ } while( 0 ) /* AudioTrack class init */ GET_CLASS( "android/media/AudioTrack", true ); jfields.AudioTrack.clazz = (jclass) (*env)->NewGlobalRef( env, clazz ); CHECK_EXCEPTION( "NewGlobalRef", true ); GET_ID( GetMethodID, AudioTrack.ctor, "<init>", "(IIIIII)V", true ); GET_ID( GetMethodID, AudioTrack.release, "release", "()V", true ); GET_ID( GetMethodID, AudioTrack.getState, "getState", "()I", true ); GET_ID( GetMethodID, AudioTrack.play, "play", "()V", true ); GET_ID( GetMethodID, AudioTrack.stop, "stop", "()V", true ); GET_ID( GetMethodID, AudioTrack.flush, "flush", "()V", true ); GET_ID( GetMethodID, AudioTrack.pause, "pause", "()V", true ); GET_ID( GetMethodID, AudioTrack.writeV21, "write", "(Ljava/nio/ByteBuffer;II)I", false ); if( jfields.AudioTrack.writeV21 ) { GET_CONST_INT( AudioTrack.WRITE_NON_BLOCKING, "WRITE_NON_BLOCKING", true ); #ifdef AUDIOTRACK_USE_FLOAT GET_ID( GetMethodID, AudioTrack.writeFloat, "write", "([FIII)I", true ); #endif } else GET_ID( GetMethodID, AudioTrack.write, "write", "([BII)I", true ); #ifdef AUDIOTRACK_HW_LATENCY GET_ID( GetMethodID, AudioTrack.getTimestamp, "getTimestamp", "(Landroid/media/AudioTimestamp;)Z", false ); #endif GET_ID( GetMethodID, AudioTrack.getPlaybackHeadPosition, "getPlaybackHeadPosition", "()I", true ); GET_ID( GetStaticMethodID, AudioTrack.getMinBufferSize, "getMinBufferSize", "(III)I", true ); #ifdef AUDIOTRACK_NATIVE_SAMPLERATE GET_ID( GetStaticMethodID, AudioTrack.getNativeOutputSampleRate, "getNativeOutputSampleRate", "(I)I", true ); #endif GET_CONST_INT( AudioTrack.STATE_INITIALIZED, "STATE_INITIALIZED", true ); GET_CONST_INT( AudioTrack.MODE_STREAM, "MODE_STREAM", true ); GET_CONST_INT( AudioTrack.ERROR, "ERROR", true ); GET_CONST_INT( AudioTrack.ERROR_BAD_VALUE , "ERROR_BAD_VALUE", true ); GET_CONST_INT( AudioTrack.ERROR_INVALID_OPERATION, "ERROR_INVALID_OPERATION", true ); /* AudioTimestamp class init (if any) */ if( jfields.AudioTrack.getTimestamp ) { GET_CLASS( "android/media/AudioTimestamp", true ); jfields.AudioTimestamp.clazz = (jclass) (*env)->NewGlobalRef( env, clazz ); CHECK_EXCEPTION( "NewGlobalRef", true ); GET_ID( GetMethodID, AudioTimestamp.ctor, "<init>", "()V", true ); GET_ID( GetFieldID, AudioTimestamp.framePosition, "framePosition", "J", true ); GET_ID( GetFieldID, AudioTimestamp.nanoTime, "nanoTime", "J", true ); } #ifdef AUDIOTRACK_HW_LATENCY /* AudioSystem class init */ GET_CLASS( "android/media/AudioSystem", false ); if( clazz ) { jfields.AudioSystem.clazz = (jclass) (*env)->NewGlobalRef( env, clazz ); GET_ID( GetStaticMethodID, AudioSystem.getOutputLatency, "getOutputLatency", "(I)I", false ); } #endif /* AudioFormat class init */ GET_CLASS( "android/media/AudioFormat", true ); GET_CONST_INT( AudioFormat.ENCODING_PCM_8BIT, "ENCODING_PCM_8BIT", true ); GET_CONST_INT( AudioFormat.ENCODING_PCM_16BIT, "ENCODING_PCM_16BIT", true ); #ifdef AUDIOTRACK_USE_FLOAT GET_CONST_INT( AudioFormat.ENCODING_PCM_FLOAT, "ENCODING_PCM_FLOAT", false ); jfields.AudioFormat.has_ENCODING_PCM_FLOAT = field != NULL && jfields.AudioTrack.writeFloat; #else jfields.AudioFormat.has_ENCODING_PCM_FLOAT = false; #endif GET_CONST_INT( AudioFormat.ENCODING_AC3, "ENCODING_AC3", false ); if( field != NULL ) { GET_CONST_INT( AudioFormat.ENCODING_E_AC3, "ENCODING_E_AC3", false ); jfields.AudioFormat.has_ENCODING_AC3 = field != NULL; } else jfields.AudioFormat.has_ENCODING_AC3 = false; GET_CONST_INT( AudioFormat.CHANNEL_OUT_MONO, "CHANNEL_OUT_MONO", true ); GET_CONST_INT( AudioFormat.CHANNEL_OUT_STEREO, "CHANNEL_OUT_STEREO", true ); GET_CONST_INT( AudioFormat.CHANNEL_OUT_FRONT_LEFT, "CHANNEL_OUT_FRONT_LEFT", true ); GET_CONST_INT( AudioFormat.CHANNEL_OUT_FRONT_RIGHT, "CHANNEL_OUT_FRONT_RIGHT", true ); GET_CONST_INT( AudioFormat.CHANNEL_OUT_5POINT1, "CHANNEL_OUT_5POINT1", true ); GET_CONST_INT( AudioFormat.CHANNEL_OUT_BACK_LEFT, "CHANNEL_OUT_BACK_LEFT", true ); GET_CONST_INT( AudioFormat.CHANNEL_OUT_BACK_RIGHT, "CHANNEL_OUT_BACK_RIGHT", true ); GET_CONST_INT( AudioFormat.CHANNEL_OUT_FRONT_CENTER, "CHANNEL_OUT_FRONT_CENTER", true ); GET_CONST_INT( AudioFormat.CHANNEL_OUT_LOW_FREQUENCY, "CHANNEL_OUT_LOW_FREQUENCY", true ); GET_CONST_INT( AudioFormat.CHANNEL_OUT_BACK_CENTER, "CHANNEL_OUT_BACK_CENTER", true ); GET_CONST_INT( AudioFormat.CHANNEL_OUT_SIDE_LEFT, "CHANNEL_OUT_SIDE_LEFT", false ); if( field != NULL ) { GET_CONST_INT( AudioFormat.CHANNEL_OUT_SIDE_RIGHT, "CHANNEL_OUT_SIDE_RIGHT", true ); jfields.AudioFormat.has_CHANNEL_OUT_SIDE = true; } else jfields.AudioFormat.has_CHANNEL_OUT_SIDE = false; /* AudioManager class init */ GET_CLASS( "android/media/AudioManager", true ); GET_CONST_INT( AudioManager.ERROR_DEAD_OBJECT, "ERROR_DEAD_OBJECT", false ); jfields.AudioManager.has_ERROR_DEAD_OBJECT = field != NULL; GET_CONST_INT( AudioManager.STREAM_MUSIC, "STREAM_MUSIC", true ); #undef CHECK_EXCEPTION #undef GET_CLASS #undef GET_ID #undef GET_CONST_INT i_init_state = 1; end: ret = i_init_state == 1; if( !ret ) msg_Err( p_aout, "AudioTrack jni init failed" ); vlc_mutex_unlock( &lock ); return ret; }