void DirectRender::InitForCurrentSurface( JNIEnv * jni, bool wantFrontBuffer_, int buildVersionSDK_ ) { LOG( "%p DirectRender::InitForCurrentSurface(%s)", this, wantFrontBuffer_ ? "true" : "false" ); wantFrontBuffer = wantFrontBuffer_; display = eglGetDisplay( EGL_DEFAULT_DISPLAY ); context = eglGetCurrentContext(); windowSurface = eglGetCurrentSurface( EGL_DRAW ); // NOTE: On Mali as well as under Android-L, we need to perform // an initial swapbuffers in order for the front-buffer extension // to work. // TODO: In previous KitKat development binaries, applying the initial // swapbuffers on Adreno would result in poor performance. We should // review if the issue still exists on the final production binary // and if not, remove the check for KitKat and always perform the // initial swapbuffers. static const int KITKAT_WATCH = 20; const GpuType gpuType = EglGetGpuType(); if ( ( buildVersionSDK_ > KITKAT_WATCH ) || // if the SDK is Lollipop or higher ( gpuType & GPU_TYPE_MALI ) != 0 ) // or the GPU is Mali { LOG( "Performing an initial swapbuffers for Mali and/or Android-L" ); eglSwapBuffers( display, windowSurface ); // swap buffer will operate que/deque related process internally // now ready to set usage to proper surface } // Get the surface size. eglQuerySurface( display, windowSurface, EGL_WIDTH, &width ); eglQuerySurface( display, windowSurface, EGL_HEIGHT, &height ); LOG( "surface size: %i x %i", width, height ); if ( !wantFrontBuffer_ ) { LOG( "Running without front buffer"); } else { surfaceMgr.Init( jni ); gvrFrontbufferExtension = surfaceMgr.SetFrontBuffer( windowSurface, true ); LOG ( "gvrFrontbufferExtension = %s", ( gvrFrontbufferExtension ) ? "TRUE" : "FALSE" ); if ( ( gpuType & GPU_TYPE_MALI ) != 0 ) { LOG( "Mali GPU" ); tilerControl = FB_MALI; } else if ( ( gpuType & GPU_TYPE_ADRENO ) != 0 ) { // Query the number of samples on the display EGLint configID; if ( !eglQueryContext( display, context, EGL_CONFIG_ID, &configID ) ) { FAIL( "eglQueryContext EGL_CONFIG_ID failed" ); } EGLConfig eglConfig = EglConfigForConfigID( display, configID ); if ( eglConfig == NULL ) { FAIL( "EglConfigForConfigID failed" ); } EGLint samples = 0; eglGetConfigAttrib( display, eglConfig, EGL_SAMPLES, &samples ); if ( gpuType == GPU_TYPE_ADRENO_330 ) { LOG( "Adreno 330 GPU" ); tilerControl = FB_TILED_RENDERING; } else { LOG( "Adreno GPU" ); // NOTE: On KitKat, only tiled render mode will continue to work // with multisamples set on the frame buffer (at a performance // loss). On Lollipop, having multisamples set on the frame buffer // is an error for all render modes and will result in a black screen. if ( samples != 0 ) { // TODO: We may want to make this a FATAL ERROR. LOG( "**********************************************" ); LOG( "ERROR: frame buffer uses MSAA - turn off MSAA!" ); LOG( "**********************************************" ); tilerControl = FB_TILED_RENDERING; } else { // NOTE: Currently (2014-11-19) the memory controller // clock is not fixed when running with fixed CPU/GPU levels. // For direct render mode, the fluctuation may cause significant // performance issues. // FIXME: Enable tiled render mode for now until we are able // to run with fixed memory clock. #if 0 tilerControl = FB_BINNING_CONTROL; // 2014-09-28: Qualcomm is moving to a new extension with // the next driver. In order for the binning control to // work for both the current and next driver, we add the // following call which should happen before any calls to // glHint( GL_* ). // This causes a gl error on current drivers, but will be // needed for the new driver. GL_CheckErrors( "Before enabling Binning Control" ); LOG( "Enable GL_BINNING_CONTROL_HINT_QCOM - may cause a GL_ERROR on current driver" ); glEnable( GL_BINNING_CONTROL_HINT_QCOM ); GL_CheckErrors( "Expected on current driver" ); #else tilerControl = FB_TILED_RENDERING; #endif } } } // draw stuff to the screen without swapping to see if it is working // ExerciseFrontBuffer(); } }
eglSetup_t EglSetup( const EGLContext shareContext, const int requestedGlEsVersion, const int redBits, const int greenBits, const int blueBits, const int depthBits, const int multisamples, const GLuint contextPriority ) { LOG( "EglSetup: requestGlEsVersion(%d), redBits(%d), greenBits(%d), blueBits(%d), depthBits(%d), multisamples(%d), contextPriority(%d)", requestedGlEsVersion, redBits, greenBits, blueBits, depthBits, multisamples, contextPriority ); eglSetup_t egl = {}; // Get the built in display // TODO: check for external HDMI displays egl.display = eglGetDisplay( EGL_DEFAULT_DISPLAY ); // Initialize EGL EGLint majorVersion; EGLint minorVersion; eglInitialize( egl.display, &majorVersion, &minorVersion ); LOG( "eglInitialize gives majorVersion %i, minorVersion %i", majorVersion, minorVersion); const char * eglVendorString = eglQueryString( egl.display, EGL_VENDOR ); LOG( "EGL_VENDOR: %s", eglVendorString ); const char * eglClientApisString = eglQueryString( egl.display, EGL_CLIENT_APIS ); LOG( "EGL_CLIENT_APIS: %s", eglClientApisString ); const char * eglVersionString = eglQueryString( egl.display, EGL_VERSION ); LOG( "EGL_VERSION: %s", eglVersionString ); const char * eglExtensionString = eglQueryString( egl.display, EGL_EXTENSIONS ); LOG( "EGL_EXTENSIONS:" ); LogStringWords( eglExtensionString ); // We do NOT want to use eglChooseConfig, because the Android EGL code pushes in // multisample flags behind our back if the user has selected the "force 4x MSAA" // option in developer settings, and that is completely wasted for our warp target. egl.config = ChooseColorConfig( egl.display, redBits, greenBits, blueBits, depthBits, multisamples, true /* pBuffer compatible */ ); if ( egl.config == 0 ) { FAIL( "No acceptable EGL color configs." ); return egl; } // The EGLContext is created with the EGLConfig // Try to get an OpenGL ES 3.0 context first, which is required to do // MSAA to framebuffer objects on Adreno. for ( int version = requestedGlEsVersion ; version >= 2 ; version-- ) { LOG( "Trying for a EGL_CONTEXT_CLIENT_VERSION %i context shared with %p:", version, shareContext ); // We want the application context to be lower priority than the TimeWarp context. EGLint contextAttribs[] = { EGL_CONTEXT_CLIENT_VERSION, version, EGL_NONE, EGL_NONE, EGL_NONE }; // Don't set EGL_CONTEXT_PRIORITY_LEVEL_IMG at all if set to EGL_CONTEXT_PRIORITY_MEDIUM_IMG, // It is the caller's responsibility to use that if the driver doesn't support it. if ( contextPriority != EGL_CONTEXT_PRIORITY_MEDIUM_IMG ) { contextAttribs[2] = EGL_CONTEXT_PRIORITY_LEVEL_IMG; contextAttribs[3] = contextPriority; } egl.context = eglCreateContext( egl.display, egl.config, shareContext, contextAttribs ); if ( egl.context != EGL_NO_CONTEXT ) { LOG( "Succeeded." ); egl.glEsVersion = version; EGLint configIDReadback; if ( !eglQueryContext( egl.display, egl.context, EGL_CONFIG_ID, &configIDReadback ) ) { WARN("eglQueryContext EGL_CONFIG_ID failed" ); } EGLConfig configCheck = EglConfigForConfigID( egl.display, configIDReadback ); LOG( "Created context with config %i, query returned ID %i = config %i", (int)egl.config, configIDReadback, (int)configCheck ); break; } } if ( egl.context == EGL_NO_CONTEXT ) { WARN( "eglCreateContext failed: %s", EglErrorString() ); return egl; } if ( contextPriority != EGL_CONTEXT_PRIORITY_MEDIUM_IMG ) { // See what context priority we actually got EGLint actualPriorityLevel; eglQueryContext( egl.display, egl.context, EGL_CONTEXT_PRIORITY_LEVEL_IMG, &actualPriorityLevel ); switch ( actualPriorityLevel ) { case EGL_CONTEXT_PRIORITY_HIGH_IMG: LOG( "Context is EGL_CONTEXT_PRIORITY_HIGH_IMG" ); break; case EGL_CONTEXT_PRIORITY_MEDIUM_IMG: LOG( "Context is EGL_CONTEXT_PRIORITY_MEDIUM_IMG" ); break; case EGL_CONTEXT_PRIORITY_LOW_IMG: LOG( "Context is EGL_CONTEXT_PRIORITY_LOW_IMG" ); break; default: LOG( "Context has unknown priority level" ); break; } } // Because EGL_KHR_surfaceless_context is not widespread (Only on Tegra as of // September 2013), we need to create a tiny pbuffer surface to make the context // current. // // It is necessary to use a config with the same characteristics that the // context was created with, plus the pbuffer flag, or we will get an // EGL_BAD_MATCH error on the eglMakeCurrent() call. // // This is necessary to support 565 framebuffers, which may be important // for higher refresh rate displays. const EGLint attrib_list[] = { EGL_WIDTH, 16, EGL_HEIGHT, 16, EGL_NONE }; egl.pbufferSurface = eglCreatePbufferSurface( egl.display, egl.config, attrib_list ); if ( egl.pbufferSurface == EGL_NO_SURFACE ) { WARN( "eglCreatePbufferSurface failed: %s", EglErrorString() ); eglDestroyContext( egl.display, egl.context ); egl.context = EGL_NO_CONTEXT; return egl; } if ( eglMakeCurrent( egl.display, egl.pbufferSurface, egl.pbufferSurface, egl.context ) == EGL_FALSE ) { WARN( "eglMakeCurrent pbuffer failed: %s", EglErrorString() ); eglDestroySurface( egl.display, egl.pbufferSurface ); eglDestroyContext( egl.display, egl.context ); egl.context = EGL_NO_CONTEXT; return egl; } const char * glVendorString = (const char *) glGetString(GL_VENDOR); LOG( "GL_VENDOR: %s", glVendorString); const char * glRendererString = (const char *) glGetString(GL_RENDERER); LOG( "GL_RENDERER: %s", glRendererString); const char * glVersionString = (const char *) glGetString(GL_VERSION); LOG( "GL_VERSION: %s", glVersionString); const char * glSlVersionString = (const char *) glGetString( GL_SHADING_LANGUAGE_VERSION); LOG( "GL_SHADING_LANGUAGE_VERSION: %s", glSlVersionString); egl.gpuType = EglGetGpuType(); return egl; }