Esempio n. 1
0
JNIEXPORT void JNICALL Java_org_dolphinemu_dolphinemu_NativeLibrary_Run(JNIEnv *env, jobject obj, jobject _surf)
{
	surf = ANativeWindow_fromSurface(env, _surf);
	// Install our callbacks
	OSD::AddCallback(OSD::OSD_INIT, ButtonManager::Init);
	OSD::AddCallback(OSD::OSD_SHUTDOWN, ButtonManager::Shutdown);

	LogManager::Init();
	SConfig::Init();
	VideoBackend::PopulateList();
	VideoBackend::ActivateBackend(SConfig::GetInstance().m_LocalCoreStartupParameter.m_strVideoBackend);
	WiimoteReal::LoadSettings();

	// Load our Android specific settings
	IniFile ini;
	bool onscreencontrols = true;
	ini.Load(File::GetUserPath(D_CONFIG_IDX) + std::string("Dolphin.ini"));
	ini.Get("Android", "ScreenControls", &onscreencontrols, true);

	if (onscreencontrols)
		OSD::AddCallback(OSD::OSD_ONFRAME, ButtonManager::DrawButtons);

	// No use running the loop when booting fails
	if ( BootManager::BootCore( g_filename.c_str() ) )
		while (PowerPC::GetState() != PowerPC::CPU_POWERDOWN)
			updateMainFrameEvent.Wait();

	WiimoteReal::Shutdown();
	VideoBackend::ClearList();
	SConfig::Shutdown();
	LogManager::Shutdown();
}
Esempio n. 2
0
static void onSurfaceCreated(JNIEnv *env, jobject /*object*/, jobject surface)
{
    s_currentNativeWindowMutex.lock();
    s_currentNativeWindow = AndroidWindow(ANativeWindow_fromSurface(env, surface), [](ANativeWindow *w) { ANativeWindow_release(w); });
    ++s_currentNativeWindowId;
    s_currentNativeWindowMutex.unlock();
}
Esempio n. 3
0
//rendering
JNIEXPORT void JNICALL Java_me_lake_librestreaming_core_ColorHelper_renderingSurface
(JNIEnv * env, jobject thiz,jobject javaSurface,jbyteArray pixelsArray,jint w,jint h,jint size) {
	ANativeWindow* window = ANativeWindow_fromSurface(env, javaSurface);
	if(window!=NULL)
	{
		ANativeWindow_setBuffersGeometry(window,w,h,COLOR_FORMAT_NV21);
		ANativeWindow_Buffer buffer;
		if (ANativeWindow_lock(window, &buffer, NULL) == 0) {
			unsigned char *pixels = (unsigned char*)(*env)->GetByteArrayElements(env,pixelsArray, 0);
			if(buffer.width==buffer.stride){
				memcpy(buffer.bits, pixels,  size);
			}else{
				int height = h*3/2;
				int width = w;
				int i=0;
				for(;i<height;++i)
					memcpy(buffer.bits +  buffer.stride * i
						, pixels + width * i
						, width);
			}
			(*env)->ReleaseByteArrayElements(env,pixelsArray,pixels,JNI_ABORT);
			ANativeWindow_unlockAndPost(window);
		}
		ANativeWindow_release(window);
	}
	return;
}
Esempio n. 4
0
extern "C" JNIEXPORT void JNICALL Java_org_skia_viewer_ViewerActivity_onSurfaceChanged(
    JNIEnv* env, jobject activity, jlong handle, jobject surface) {
    auto skiaAndroidApp = (SkiaAndroidApp*)handle;
    Message message(kSurfaceChanged);
    message.fNativeWindow = ANativeWindow_fromSurface(env, surface);
    skiaAndroidApp->postMessage(message);
}
static void
com_stainberg_MediaPlayer_MediaPlayer_setSurface(JNIEnv* env, jobject thiz, jobject surface) {
//	if(surface != NULL) {
//		if(spWindow != NULL) {
//			ANativeWindow_release(spWindow);
//			spWindow = NULL;
//		}
//		spWindow = ANativeWindow_fromSurface(env, surface);
//		if(spMediaPlayer != NULL) {
//			spMediaPlayer->initSurface(spWindow);
//		}
//	}
	if(spWindow != NULL) {
		ANativeWindow_release(spWindow);
		spWindow = NULL;
	}
	if(spMediaPlayer != NULL) {
		spMediaPlayer->initSurface(NULL);
	}
	if(surface != NULL) {
		spWindow = ANativeWindow_fromSurface(env, surface);
		if(spMediaPlayer != NULL) {
			spMediaPlayer->initSurface(spWindow);
		}
	}
}
Esempio n. 6
0
/*
 * Class:     com_mcxiaoke_ndk_Native
 * Method:    initNW
 * Signature: (JLandroid/view/Surface;)V
 */
JNIEXPORT void JNICALL Java_com_mcxiaoke_ndk_Native_initNW
(JNIEnv *env, jclass clazz, jlong avi, jobject surface)
{
    // Get the native window from the surface
    ANativeWindow* nativeWindow = ANativeWindow_fromSurface(
                                      env, surface);
    if (0 == nativeWindow)
    {
        ThrowException(env, "java/lang/RuntimeException",
                       "Unable to get native window from surface.");
        goto exit;
    }

    // Set the buffers geometry to AVI movie frame dimensions
    // If these are different than the window's physical size
    // then the buffer will be scaled to match that size.
    if (0 > ANativeWindow_setBuffersGeometry(nativeWindow,
            AVI_video_width((avi_t*) avi),
            AVI_video_height((avi_t*) avi),
            WINDOW_FORMAT_RGB_565))
    {
        ThrowException(env, "java/lang/RuntimeException",
                       "Unable to set buffers geometry.");
    }

    // Release the native window
    ANativeWindow_release(nativeWindow);
    nativeWindow = 0;

exit:
    return;
}
Esempio n. 7
0
static void onSurfaceChanged(JNIEnv *env, jobject /*object*/, jobject surface, int /*format*/, int /*width*/, int /*height*/)
{
    s_currentNativeWindowMutex.lock();
    s_currentNativeWindow = AndroidWindow(ANativeWindow_fromSurface(env, surface), [](ANativeWindow *w) { ANativeWindow_release(w); });
    ++s_currentNativeWindowId;
    s_currentNativeWindowMutex.unlock();
}
Esempio n. 8
0
extern void GetPlatformWindowDimensions(void* nativeHandle, int* width, int* height)
{
    ANativeWindow* w = ANativeWindow_fromSurface(Jni, (jobject)nativeHandle);
    VERBOSE("Got native window for measurements %p", w);
    *width = ANativeWindow_getWidth(w);
    *height = ANativeWindow_getHeight(w);
    ANativeWindow_release(w);
}
JNIEXPORT jlong JNICALL Java_com_gaulois94_samples_ZombieRenderer_createZombieRenderer(JNIEnv* jenv, jclass jcls, jlong parent, jobject surface)
{
	ZombieRenderer* renderer = new ZombieRenderer((Updatable*)parent);
	ANativeWindow* window = ANativeWindow_fromSurface(jenv, surface);
	renderer->initializeContext(window);

	return (jlong)renderer;
}
Esempio n. 10
0
	JNIEXPORT void JNICALL nativeSetSurface(JNIEnv* jenv, jobject obj, jobject surface) {
		if(surface != 0) {
			nativeWindow = ANativeWindow_fromSurface(jenv, surface);
		} else {
			ANativeWindow_release(nativeWindow);
		}
		return;
	}
Esempio n. 11
0
//当Android系统中对应播放窗口的Surfaceview创建的时候,在native层得到这个surface的引用地址
int Java_info_sodapanda_sodaplayer_FFmpegVideoView_setupsurface(JNIEnv* env,jobject thiz,jobject pSurface,int pwidth,int pheight,jlong ptr){
	playInstance *instance = (playInstance *)ptr;
	instance->window = ANativeWindow_fromSurface(env,pSurface);
	if(instance->display_width !=0){
		setAndroidWindowPix(instance->display_width,instance->display_height,instance);
	}
	instance->disable_video=0;
	return 0;
}
Esempio n. 12
0
extern "C" JNIEXPORT void JNICALL
Java_com_android_cts_opengl_primitive_GLPrimitiveActivity_setupContextSwitchBenchmark(
        JNIEnv* env, jclass clazz, jobject surface, jboolean offscreen, jint workload) {
    if (workload <= 8) {
        // This test uses 8 iterations, so workload can't be more than 8.
        gRenderer = new ContextSwitchRenderer(
                ANativeWindow_fromSurface(env, surface), offscreen, workload);
    }
}
Esempio n. 13
0
extern "C" JNIEXPORT void JNICALL
Java_com_huazhen_barcode_MainActivity_JNIinitDecoder(JNIEnv *env, jclass cls, jint w, jint h, jobject surface) {
    ANativeWindow* window = 0;
    if (surface) {
        window = ANativeWindow_fromSurface(env, surface); // ANativeWindow_release
        ERR_EXIT_IF(!window, "ANativeWindow_fromSurface: %p %p", env, surface);
    }
    hgs_init( w,h, window);
}
Esempio n. 14
0
ANativeWindow *Android_JNI_GetNativeWindow(void) {
	ANativeWindow *anw;
	jobject s;
	JNIEnv *env = Android_JNI_GetEnv();

	s = (*env)->CallStaticObjectMethod(env, mActivityClass, midGetNativeSurface);
	anw = ANativeWindow_fromSurface(env, s);
	(*env)->DeleteLocalRef(env, s);

	return anw;
}
Esempio n. 15
0
// set the surface
void Java_com_example_nativecodec_NativeCodec_setSurface(JNIEnv *env, jclass clazz, jobject surface)
{
   // obtain a native window from a Java surface
   if (data.window)
   {
      ANativeWindow_release(data.window);
      data.window = NULL;
   }
   data.window = ANativeWindow_fromSurface(env, surface);
   LOGV("@@@ setsurface %p", data.window);
}
void naSetSurface(JNIEnv *pEnv, jobject pObj, jobject pSurface) {
	if (0 != pSurface) {
		// get the native window reference
		window = ANativeWindow_fromSurface(pEnv, pSurface);
		// set format and size of window buffer
		ANativeWindow_setBuffersGeometry(window, 0, 0, WINDOW_FORMAT_RGBA_8888);
	} else {
		// release the native window
		ANativeWindow_release(window);
	}
}
Esempio n. 17
0
// Surface Handling
JNIEXPORT void JNICALL Java_org_dolphinemu_dolphinemu_NativeLibrary_SurfaceChanged(JNIEnv* env,
                                                                                   jobject obj,
                                                                                   jobject surf)
{
  s_surf = ANativeWindow_fromSurface(env, surf);
  if (s_surf == nullptr)
    __android_log_print(ANDROID_LOG_ERROR, DOLPHIN_TAG, "Error: Surface is null.");

  if (g_renderer)
    g_renderer->ChangeSurface(s_surf);
}
Esempio n. 18
0
/*
 * Class:     com_mcxiaoke_ndk_Native
 * Method:    renderNW
 * Signature: (JLandroid/view/Surface;)Z
 */
JNIEXPORT jboolean JNICALL Java_com_mcxiaoke_ndk_Native_renderNW
(JNIEnv *env, jclass clazz, jlong avi, jobject surface)
{
    jboolean isFrameRead = JNI_FALSE;

    long frameSize = 0;
    int keyFrame = 0;

    // Get the native window from the surface
    ANativeWindow* nativeWindow = ANativeWindow_fromSurface(
                                      env, surface);
    if (0 == nativeWindow)
    {
        ThrowException(env, "java/io/RuntimeException",
                       "Unable to get native window from surface.");
        goto exit;
    }

    // Lock the native window and get access to raw buffer
    ANativeWindow_Buffer windowBuffer;
    if (0 > ANativeWindow_lock(nativeWindow, &windowBuffer, 0))
    {
        ThrowException(env, "java/io/RuntimeException",
                       "Unable to lock native window.");
        goto release;
    }

    // Read AVI frame bytes to raw buffer
    frameSize = AVI_read_frame((avi_t*) avi,
                               (char*) windowBuffer.bits,
                               &keyFrame);

    // Check if frame is successfully read
    if (0 < frameSize)
    {
        isFrameRead = JNI_TRUE;
    }

    // Unlock and post the buffer for displaying
    if (0 > ANativeWindow_unlockAndPost(nativeWindow))
    {
        ThrowException(env, "java/io/RuntimeException",
                       "Unable to unlock and post to native window.");
        goto release;
    }

release:
    // Release the native window
    ANativeWindow_release(nativeWindow);
    nativeWindow = 0;

exit:
    return isFrameRead;
}
Esempio n. 19
0
static void gst_native_surface_init (JNIEnv *env, jobject thiz, jobject surface) {
    CustomData *data = GET_CUSTOM_DATA (env, thiz, custom_data_field_id);
    if (!data) return;
    ANativeWindow *new_native_window = ANativeWindow_fromSurface(env, surface);
    GST_DEBUG ("Received surface %p (native window %p)", surface, new_native_window);

    data->player->releaseSurface();
    data->player->setSurface(new_native_window);

    if (!new_native_window)
        handle_stream_loading_error();
}
Esempio n. 20
0
JNIEXPORT void JNICALL Java_com_orcchg_openglsample_NativeInterface_setSurface
  (JNIEnv *jenv, jobject, jlong descriptor, jobject surface) {
  NativeInterface* ptr = (NativeInterface*) descriptor;
  if (surface == nullptr) {
    ANativeWindow_release(ptr->window);
    ptr->window = nullptr;
  } else {
    ptr->window = ANativeWindow_fromSurface(jenv, surface);
    ptr->render->setSurface(ptr->window);
    ptr->render->draw();
  }
}
static jint nativeSetCaptureDisplay(JNIEnv *env, jobject thiz,
	ID_TYPE id_camera, jobject jSurface) {

	jint result = JNI_ERR;
	ENTER();
	UVCCamera *camera = reinterpret_cast<UVCCamera *>(id_camera);
	if (LIKELY(camera)) {
		ANativeWindow *capture_window = jSurface ? ANativeWindow_fromSurface(env, jSurface) : NULL;
		result = camera->setCaptureDisplay(capture_window);
	}
	RETURN(result, jint);
}
void QAndroidPlatformOpenGLWindow::createEgl(EGLConfig config)
{
    clearEgl();
    QJNIEnvironmentPrivate env;
    m_nativeWindow = ANativeWindow_fromSurface(env, m_androidSurfaceObject.object());
    m_androidSurfaceObject = QJNIObjectPrivate();
    m_eglSurface = eglCreateWindowSurface(m_eglDisplay, config, m_nativeWindow, NULL);
    if (m_eglSurface == EGL_NO_SURFACE) {
        EGLint error = eglGetError();
        eglTerminate(m_eglDisplay);
        qFatal("EGL Error : Could not create the egl surface: error = 0x%x\n", error);
    }
}
Esempio n. 23
0
extern "C" JNIEXPORT void JNICALL Java_com_virtualapplications_play_NativeInterop_setupGsHandler(JNIEnv* env, jobject obj, jobject surface)
{
	auto nativeWindow = ANativeWindow_fromSurface(env, surface);
	auto gsHandler = g_virtualMachine->GetGSHandler();
	if(gsHandler == nullptr)
	{
		g_virtualMachine->CreateGSHandler(CGSH_OpenGLAndroid::GetFactoryFunction(nativeWindow));
	}
	else
	{
		static_cast<CGSH_OpenGLAndroid*>(gsHandler)->SetWindow(nativeWindow);
	}
}
Esempio n. 24
0
JNIEXPORT void JNICALL Java_com_example_rogelio_opengl_NativeEglExample_nativeSetSurface(JNIEnv* jenv, jobject obj, jobject surface)
{
    if (surface != 0) {
        window = ANativeWindow_fromSurface(jenv, surface);
        LOG_INFO("Got window %p", window);
        renderer->setWindow(window);
    } else {
        LOG_INFO("Releasing window");
        ANativeWindow_release(window);
    }

    return;
}
Esempio n. 25
0
static void setSurface(JNIEnv *env, jobject /*thiz*/, jobject jSurface)
{
#ifndef ANDROID_PLUGIN_OPENGL
    if (m_surface)
        env->DeleteGlobalRef(m_surface);
    m_surface = env->NewGlobalRef(jSurface);
#else
    m_surfaceMutex.lock();
    EGLNativeWindowType nativeWindow = ANativeWindow_fromSurface(env, jSurface);
    bool sameNativeWindow = (nativeWindow != 0 && nativeWindow == m_nativeWindow);

    m_nativeWindow = nativeWindow;
    if (m_waitForWindow)
        m_waitForWindowSemaphore.release();

    if (m_androidPlatformIntegration) {
        // Use the desktop size.
        // On some devices, the getters for the native window size gives wrong values
        QSize size = QAndroidPlatformIntegration::defaultDesktopSize();

        QPlatformScreen *screen = m_androidPlatformIntegration->screen();
        QRect geometry(QPoint(0, 0), size);
        if (screen) {
            QWindowSystemInterface::handleScreenAvailableGeometryChange(screen->screen(), geometry);
            QWindowSystemInterface::handleScreenGeometryChange(screen->screen(), geometry);
        }

        if (!sameNativeWindow) {
            m_surfaceMutex.unlock();
            m_androidPlatformIntegration->surfaceChanged();
        } else {
            // Resize all top level windows, since they share the same surface
            foreach (QWindow *w, QGuiApplication::topLevelWindows()) {
                QAndroidOpenGLPlatformWindow *window =
                        static_cast<QAndroidOpenGLPlatformWindow *>(w->handle());

                if (window != 0) {
                    window->lock();
                    window->scheduleResize(size);

                    QWindowSystemInterface::handleExposeEvent(window->window(),
                                                              QRegion(window->window()->geometry()));
                    window->unlock();
                }
            }

            m_surfaceMutex.unlock();
        }

    } else {
Esempio n. 26
0
DEFINE_JNI(jlong, setsurface) (JNIEnv* env, jclass clazz, jlong info, jobject surface)
{
    FFmpegInfo* pInfo = (FFmpegInfo*) info;
    if (!pInfo || !pInfo->binit) return -1;

    LOGI("get native window ...");
    ANativeWindow* pNativeWindow = ANativeWindow_fromSurface(env, surface);
    if (!pNativeWindow)
        return -1;

    pInfo->window = pNativeWindow;
    LOGI("setsurface ok");
    return 0;
}
Esempio n. 27
0
    jint Java_ws_websca_benchscaw_MainActivity_directRender( JNIEnv* env, jobject thiz, jobject surface ) {
        //__android_log_write(ANDROID_LOG_DEBUG, "Benchscaw JNI native nextFrame cpu flags:", "flags");



        //__android_log_write(ANDROID_LOG_DEBUG, "Benchscaw JNI native nextFrame", "av_read_frame");
        int done = av_read_frame(pFormatCtx, &packet);
        if(done>=0) {
            // Is this a packet from the video stream?
            if(packet.stream_index==videoStream) {
                // Decode video frame
                //__android_log_write(ANDROID_LOG_DEBUG, "Benchscaw JNI native nextFrame", "avcodec_decode_video2");
                avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);

                // Did we get a video frame?
                if(frameFinished) {
                    ANativeWindow* window = ANativeWindow_fromSurface(env, surface);
                    ANativeWindow_Buffer buffer;
                    if (ANativeWindow_lock(window, &buffer, NULL) == 0) {
                        // Convert the image from its native format to RGB
                        AVPicture pict;
                        pict.data[0] = (uint8_t*)buffer.bits;
                        pict.linesize[0] = buffer.stride*4;
                        sws_scale
                        (
                            sws_ctx,
                            (uint8_t const * const *)pFrame->data,
                            pFrame->linesize,
                            0,
                            pCodecCtx->height,
                            pict.data,
                            pict.linesize
                        );

                        //char str[200];
                        //sprintf(str, "%i", buffer.width);
                        ///__android_log_write(ANDROID_LOG_DEBUG, "width", str);
                        ANativeWindow_unlockAndPost(window);
                    }
                    ANativeWindow_release(window);
                }
            }

            // Free the packet that was allocated by av_read_frame
            //__android_log_write(ANDROID_LOG_DEBUG, "Benchscaw JNI native nextFrame", "av_free_packet");
            av_free_packet(&packet);
        }
        return done;
    }
	void Java_com_test_surfaceview_TestNative_Init(JNIEnv* env,jobject obj
			,jobject surface,jobject assetManager)
	{
		g_nativeWindow =  ANativeWindow_fromSurface(env,surface);
		if(g_nativeWindow == NULL)
		{
			LOGE("Get AAssetManager ERROR!");
		}
		//ANativeWindow_acquire(g_nativeWindow);
		g_assetManager = AAssetManager_fromJava(env, assetManager);
		if(g_assetManager == NULL)
		{
			LOGE("Get AAssetManager ERROR!");
		}
	}
Esempio n. 29
0
extern "C" JNIEXPORT void JNICALL Java_com_virtualapplications_play_NativeInterop_setupGsHandler(JNIEnv* env, jobject obj, jobject surface)
{
	auto nativeWindow = ANativeWindow_fromSurface(env, surface);
	auto gsHandler = g_virtualMachine->GetGSHandler();
	if(gsHandler == nullptr)
	{
		g_virtualMachine->CreateGSHandler(CGSH_OpenGLAndroid::GetFactoryFunction(nativeWindow));
		g_virtualMachine->m_ee->m_gs->OnNewFrame.connect(
			boost::bind(&CStatsManager::OnNewFrame, &CStatsManager::GetInstance(), _1));
	}
	else
	{
		static_cast<CGSH_OpenGLAndroid*>(gsHandler)->SetWindow(nativeWindow);
	}
}
Esempio n. 30
0
JNIEXPORT jboolean JNICALL Java_com_android_video_worker_EFEditorWorker_createEditorWorker(JNIEnv* env, jobject obj, jobject surface, jstring rootfolder, jstring videopath)
{
	EFEditorWorker* editorWorker = new EFEditorWorker();
	setJavaNativeObject(env, obj, editorWorker);

	ANativeWindow* nativeWindow = ANativeWindow_fromSurface(env, surface);

	const char* pRootPath = env->GetStringUTFChars(rootfolder, NULL) ;
	std::string strRootPath = pRootPath ;
	env->ReleaseStringUTFChars(rootfolder, pRootPath);

	const char* pVideoFile = env->GetStringUTFChars(videopath, NULL) ;
	std::string strVideoFile = pVideoFile ;
	env->ReleaseStringUTFChars(videopath, pVideoFile);

	return editorWorker->createEditorWorker(nativeWindow, strRootPath.c_str(), strVideoFile.c_str()) ;
}