bool GrGLContextInfo::initialize(const GrGLInterface* interface) { this->reset(); // We haven't validated the GrGLInterface yet, so check for GetString // function pointer if (interface->fFunctions.fGetString) { const GrGLubyte* verUByte; GR_GL_CALL_RET(interface, verUByte, GetString(GR_GL_VERSION)); const char* ver = reinterpret_cast<const char*>(verUByte); const GrGLubyte* rendererUByte; GR_GL_CALL_RET(interface, rendererUByte, GetString(GR_GL_RENDERER)); const char* renderer = reinterpret_cast<const char*>(rendererUByte); if (interface->validate()) { fGLVersion = GrGLGetVersionFromString(ver); if (GR_GL_INVALID_VER == fGLVersion) { return false; } if (!GrGetGLSLGeneration(interface, &fGLSLGeneration)) { return false; } fVendor = GrGLGetVendor(interface); /* * Qualcomm drivers have a horrendous bug with some drivers. Though they claim to * support GLES 3.00, some perfectly valid GLSL300 shaders will only compile with * #version 100, and will fail to compile with #version 300 es. In the long term, we * need to lock this down to a specific driver version. */ if (kQualcomm_GrGLVendor == fVendor) { fGLSLGeneration = k110_GrGLSLGeneration; } fRenderer = GrGLGetRendererFromString(renderer); fIsMesa = GrGLIsMesaFromVersionString(ver); fIsChromium = GrGLIsChromiumFromRendererString(renderer); // This must occur before caps init. fInterface.reset(SkRef(interface)); return fGLCaps->init(*this, interface); } } return false; }
GrGLRenderer GrGLGetRenderer(const GrGLInterface* gl) { const GrGLubyte* v; GR_GL_CALL_RET(gl, v, GetString(GR_GL_RENDERER)); return GrGLGetRendererFromString((const char*) v); }