void GraphicsManager::setFullScreen(bool fullScreen) { if (_fullScreen == fullScreen) // Nothing to do return; if (!Common::isMainThread()) { // Not the main thread, send a request instead RequestMan.dispatchAndWait(RequestMan.fullscreen(fullScreen)); return; } destroyContext(); // Save the flags uint32 flags = _screen->flags; // Now try to change modes _screen = SDL_SetVideoMode(0, 0, 0, flags ^ SDL_FULLSCREEN); // If we could not go full screen, revert back. if (!_screen) _screen = SDL_SetVideoMode(0, 0, 0, flags); else _fullScreen = fullScreen; // There's no reason how this could possibly fail, but ok... if (!_screen) throw Common::Exception("Failed going to fullscreen and then failed reverting."); rebuildContext(); }
void Display::terminate() { makeCurrent(nullptr, nullptr, nullptr); while (!mContextSet.empty()) { destroyContext(*mContextSet.begin()); } while (!mImageSet.empty()) { destroyImage(*mImageSet.begin()); } while (!mImplementation->getSurfaceSet().empty()) { destroySurface(*mImplementation->getSurfaceSet().begin()); } mConfigSet.clear(); if (mDevice != nullptr && mDevice->getOwningDisplay() != nullptr) { // Don't delete the device if it was created externally using eglCreateDeviceANGLE // We also shouldn't set it to null in case eglInitialize() is called again later SafeDelete(mDevice); } mImplementation->terminate(); mInitialized = false; // Never de-init default platform.. terminate is not that final. }
void GraphicsManager::setFullScreen(bool fullScreen) { if (_fullScreen == fullScreen) // Nothing to do return; // Force calling it from the main thread if (!Common::isMainThread()) { Events::MainThreadFunctor<void> functor(boost::bind(&GraphicsManager::setFullScreen, this, fullScreen)); return RequestMan.callInMainThread(functor); } destroyContext(); // uint32 flags = SDL_GetWindowFlags(_screen); // Now try to change modes SDL_SetWindowFullscreen(_screen, SDL_WINDOW_FULLSCREEN); // If we could not go full screen, revert back. if (!_screen) SDL_SetWindowFullscreen(_screen, 0); else _fullScreen = fullScreen; // There's no reason how this could possibly fail, but ok... if (!_screen) throw Common::Exception("Failed going to fullscreen and then failed reverting."); rebuildContext(); }
HeadlessView::~HeadlessView() { activate(); clearBuffers(); deactivate(); destroyContext(); }
///////////////////////////////////////////////////////// // Destructor // ///////////////////////////////////////////////////////// GemWindow :: ~GemWindow() { if(m_pimpl) { m_pimpl->mycontext=destroyContext(m_pimpl->mycontext); delete m_pimpl; m_pimpl=NULL; } }
void Window::fullScreen() { if (WindowMode != m_mode) return; GLFWmonitor * monitor = glfwGetPrimaryMonitor(); if (!monitor) return; m_windowedModeSize = size(); const GLFWvidmode * mode = glfwGetVideoMode(monitor); int w = mode->width; int h = mode->height; ContextFormat format = m_context->format(); finalizeEventHandler(); WindowEventDispatcher::deregisterWindow(this); destroyContext(); if (createContext(format, w, h, monitor)) { WindowEventDispatcher::registerWindow(this); initializeEventHandler(); m_mode = FullScreenMode; } }
void Window::destroy() { finalizeEventHandler(); destroyContext(); if (m_quitOnDestroy) Application::quit(0); }
bool DataSourceInterface::endSession() { if(context!=NULL) { destroyContext(context); context = NULL; return true; } return false; }
///////////////////////////////////////////////////////// // Destructor // ///////////////////////////////////////////////////////// GemWindow :: ~GemWindow() { if(m_pimpl) { m_pimpl->mycontext=destroyContext(m_pimpl->mycontext); delete m_pimpl; m_pimpl=0; } GemWindow::PIMPL::s_contexts.erase(this); }
void GemWindow::destroyGemWindow(void){ // tell all objects that this context is vanishing sendContextDestroyedMsg(gensym("__gemBase")->s_thing); // do the rest m_pimpl->mycontext=destroyContext(m_pimpl->mycontext); m_pimpl->undispatch(); m_context=m_pimpl->mycontext; }
Renderer::~Renderer() { for(auto m: models) delete m; delete program; delete guiprogram; destroyContext(); SDL_Quit(); }
QEglContext::~QEglContext() { destroyContext(); if (currentGLContext == this) currentGLContext = 0; if (currentVGContext == this) currentVGContext = 0; QEglContextTracker::deref(); }
SLresult AudioOutput::stop() { ALOGI("Set the audio player state paused"); // Set the audio player state playing SLresult result = setAudioPlayerStatePaused(); if (SL_RESULT_SUCCESS != result) { return result; } playingState = PLAYING_STATE_STOPPED; usleep(0.05 * 1000000); ALOGI("destroyContext..."); destroyContext(); }
void GraphicsManager::setScreenSize(int width, int height) { // Force calling it from the main thread if (!Common::isMainThread()) { Events::MainThreadFunctor<void> functor(boost::bind(&GraphicsManager::setScreenSize, this, width, height)); return RequestMan.callInMainThread(functor); } // Save properties // uint32 flags = SDL_GetWindowFlags(_screen); destroyContext(); SDL_DisplayMode displayMode; // Now try to change modes if (!_fullScreen) { SDL_SetWindowSize(_screen, width, height); } else { SDL_SetWindowFullscreen(_screen, 0); displayMode.w = width; displayMode.h = height; displayMode.driverdata = 0; displayMode.refresh_rate = 0; displayMode.format = 0; SDL_SetWindowDisplayMode(_screen, &displayMode); SDL_SetWindowFullscreen(_screen, SDL_WINDOW_FULLSCREEN); } if (!_screen) { // Could not change mode, revert back. if (!_fullScreen) SDL_SetWindowSize(_screen, _width, _height); else { displayMode.w = _width; displayMode.h = _height; SDL_SetWindowDisplayMode(_screen, &displayMode); } // There's no reason how this could possibly fail, but ok... if (!_screen) throw Common::Exception("Failed changing the resolution and then failed reverting."); return; } _width = width; _height = height; rebuildContext(); // Let the NotificationManager notify the Notifyables that the resolution changed NotificationMan.resized(_width, _height, width, height); }
LLWindowMacOSX::~LLWindowMacOSX() { destroyContext(); if(mSupportedResolutions != NULL) { delete []mSupportedResolutions; } gWindowImplementation = NULL; }
OSGLContext_win::OSGLContext_win(const FramebufferConfig& pixelFormatAttrs, int major, int minor, bool coreProfile, const GLRendererID &rendererID, const OSGLContext_win* shareContext) : _dc(0) , _handle(0) , _interval(0) , _windowHandle(0) { if ( !createWindow(&_windowHandle) ) { throw std::runtime_error("WGL: Failed to create window"); } createGLContext(pixelFormatAttrs, major, minor, coreProfile, rendererID, shareContext); if ( analyzeContextWGL(pixelFormatAttrs, major, minor) ) { // Some window hints require us to re-create the context using WGL // extensions retrieved through the current context, as we cannot // check for WGL extensions or retrieve WGL entry points before we // have a current context (actually until we have implicitly loaded // the vendor ICD) // Yes, this is strange, and yes, this is the proper way on WGL // As Windows only allows you to set the pixel format once for // a window, we need to destroy the current window and create a new // one to be able to use the new pixel format // Technically, it may be possible to keep the old window around if // we're just creating an OpenGL 3.0+ context with the same pixel // format, but it's not worth the added code complexity // First we clear the current context (the one we just created) // This is usually done by glfwDestroyWindow, but as we're not doing // full GLFW window destruction, it's duplicated here makeContextCurrent(NULL); // Next destroy the Win32 window and WGL context (without resetting // or destroying the GLFW window object) destroyContext(); destroyWindow(); // ...and then create them again, this time with better APIs if ( !createWindow(&_windowHandle) ) { throw std::runtime_error("WGL: Failed to create window"); } createGLContext(pixelFormatAttrs, major, minor, coreProfile, rendererID, shareContext); } }
// close() destroys all OS-specific code associated with a window. // Usually called from LLWindowManager::destroyWindow() void LLWindowMacOSX::close() { // Is window is already closed? // if (!mWindow) // { // return; // } // Make sure cursor is visible and we haven't mangled the clipping state. setMouseClipping(FALSE); showCursor(); destroyContext(); }
bool GraphicsManager::setFSAA(int level) { // Force calling it from the main thread if (!Common::isMainThread()) { Events::MainThreadFunctor<bool> functor(boost::bind(&GraphicsManager::setFSAA, this, level)); return RequestMan.callInMainThread(functor); } if (_fsaa == level) // Nothing to do return true; // Check if we have the support for that level if (level > _fsaaMax) return false; // Backup the old level and set the new level int oldFSAA = _fsaa; _fsaa = level; destroyContext(); // Set the multisample level SDL_GL_SetAttribute(SDL_GL_MULTISAMPLEBUFFERS, (_fsaa > 0) ? 1 : 0); SDL_GL_SetAttribute(SDL_GL_MULTISAMPLESAMPLES, _fsaa); uint32 flags = _screen->flags; // Now try to change the screen _screen = SDL_SetVideoMode(0, 0, 0, flags); if (!_screen) { // Failed changing, back up _fsaa = oldFSAA; // Set the multisample level SDL_GL_SetAttribute(SDL_GL_MULTISAMPLEBUFFERS, (_fsaa > 0) ? 1 : 0); SDL_GL_SetAttribute(SDL_GL_MULTISAMPLESAMPLES, _fsaa); _screen = SDL_SetVideoMode(0, 0, 0, flags); // There's no reason how this could possibly fail, but ok... if (!_screen) throw Common::Exception("Failed reverting to the old FSAA settings"); } rebuildContext(); return _fsaa == level; }
Window::~Window() { s_instances.erase(this); if (m_context) { finalizeEventHandler(); WindowEventDispatcher::deregisterWindow(this); destroyContext(); } if (s_instances.empty()) Application::quit(0); }
void Display::terminate() { while (!mSurfaceSet.empty()) { destroySurface(*mSurfaceSet.begin()); } while (!mContextSet.empty()) { destroyContext(*mContextSet.begin()); } glDestroyRenderer(mRenderer); mRenderer = NULL; }
void puglDestroy(PuglView* view) { if (!view) { return; } x_fib_close(view->impl->display); destroyContext(view); XDestroyWindow(view->impl->display, view->impl->win); XCloseDisplay(view->impl->display); free(view->impl); free(view); }
bool CNFSConnection::Connect(VFSURL* url, std::string& relativePath) { PLATFORM::CLockObject lock(*this); bool ret = false; int nfsRet = 0; std::string exportPath; resolveHost(url->hostname); ret = splitUrlIntoExportAndPath(url->hostname, url->filename, exportPath, relativePath); if( (ret && (exportPath != m_exportPath || m_hostName != url->hostname)) || (PLATFORM::GetTimeMs() - m_lastAccessedTime) > CONTEXT_TIMEOUT ) { int contextRet = getContextForExport(std::string(url->hostname) + exportPath); if(contextRet == CONTEXT_INVALID)//we need a new context because sharename or hostname has changed { return false; } if(contextRet == CONTEXT_NEW) //new context was created - we need to mount it { //we connect to the directory of the path. This will be the "root" path of this connection then. //So all fileoperations are relative to this mountpoint... nfsRet = nfs_mount(m_pNfsContext, m_resolvedHostName.c_str(), exportPath.c_str()); if(nfsRet != 0) { XBMC->Log(ADDON::LOG_ERROR,"NFS: Failed to mount nfs share: %s %s (%s)\n", m_resolvedHostName.c_str(), exportPath.c_str(), nfs_get_error(m_pNfsContext)); destroyContext(std::string(url->hostname) + exportPath); return false; } XBMC->Log(ADDON::LOG_DEBUG,"NFS: Connected to server %s and export %s\n", url->hostname, exportPath.c_str()); } m_exportPath = exportPath; m_hostName = url->hostname; //read chunksize only works after mount m_readChunkSize = nfs_get_readmax(m_pNfsContext); m_writeChunkSize =nfs_get_writemax(m_pNfsContext); if(contextRet == CONTEXT_NEW) { XBMC->Log(ADDON::LOG_DEBUG,"NFS: chunks: r/w %i/%i\n", (int)m_readChunkSize,(int)m_writeChunkSize); } } return ret; }
void Display::terminate() { while(!mSurfaceSet.empty()) { destroySurface(*mSurfaceSet.begin()); } while(!mContextSet.empty()) { destroyContext(*mContextSet.begin()); } while(!mSharedImageNameSpace.empty()) { destroySharedImage(reinterpret_cast<EGLImageKHR>((intptr_t)mSharedImageNameSpace.firstName())); } }
void Display::terminate() { makeCurrent(nullptr, nullptr, nullptr); while (!mContextSet.empty()) { destroyContext(*mContextSet.begin()); } mConfigSet.clear(); mImplementation->terminate(); mInitialized = false; // De-init default platform DeinitDefaultPlatformImpl(); }
int main(int argc, char** argv) { pthread_t thread1, thread2; /* thread variables */ cpu_set_t mask; program_thread_args *args = malloc(sizeof(program_thread_args)); args->argc = argc; args->argv = argv; CPU_ZERO(&mask); CPU_SET(0, &mask); int result = sched_setaffinity(0, sizeof(mask), &mask); if (result != 0) { printf("Cannot set affinity to monitor..\n"); exit(0); } // This is just a switch to run the program in hi-res timer evaluation mode // If checktimers() is called it will output the average of a timer with a given interval if (argc >= 2 && !strcmp(argv[1],"-test")) { printf("%s\n", argv[1]); } else { /* create monitor and main thread */ pthread_create(&thread1, NULL, (void *) &monitor_thread, 0); pthread_create(&thread2, NULL, (void *) &main_thread, (void *)args); /* Main block now waits for both threads to terminate, before it exits If main block exits, both threads exit, even if the threads have not finished their work */ pthread_join(thread1, NULL); pthread_join(thread2, NULL); } /* exit */ report(); destroyContext(); free(args); exit(0);
/** * Initializes the OpenGL context using EGL. */ bool EGLPlatform::initContext() { // if there is already a current context, destroy it first destroyContext(); // choose the default config for now int numConfigs = 1; EGLint attribute_list[] = { EGL_RED_SIZE, 8, EGL_GREEN_SIZE, 8, EGL_BLUE_SIZE, 8, EGL_SURFACE_TYPE, EGL_WINDOW_BIT, EGL_RENDERABLE_TYPE, EGL_OPENGL_BIT, EGL_NONE }; if(!eglChooseConfig(this->display, attribute_list, &this->config, 1, &numConfigs)) { printf("Choosing EGL config failed (error code: 0x%x)\n", eglGetError()); return false; } else if(numConfigs == 0) { printf("No matching EGL configs\n"); return false; } this->context = eglCreateContext(this->display, this->config, EGL_NO_CONTEXT, NULL); if(this->context == EGL_NO_CONTEXT) { printf("Error: creating OpenGL context failed (error code: 0x%x)\n", eglGetError()); return false; } #if USE_PBUFFER if(this->surface == EGL_NO_SURFACE) { if(!initSurface()) return false; } #endif return true; }
void Display::terminate() { makeCurrent(nullptr, nullptr, nullptr); while (!mContextSet.empty()) { destroyContext(*mContextSet.begin()); } while (!mImageSet.empty()) { destroyImage(*mImageSet.begin()); } mConfigSet.clear(); mImplementation->terminate(); mInitialized = false; // Never de-init default platform.. terminate is not that final. }
void GraphicsManager::setScreenSize(int width, int height) { if ((width == _screen->w) && (height == _screen->h)) // No changes, nothing to do return; // Force calling it from the main thread if (!Common::isMainThread()) { Events::MainThreadFunctor<void> functor(boost::bind(&GraphicsManager::setScreenSize, this, width, height)); return RequestMan.callInMainThread(functor); } // Save properties uint32 flags = _screen->flags; int bpp = _screen->format->BitsPerPixel; int oldWidth = _screen->w; int oldHeight = _screen->h; destroyContext(); // Now try to change modes _screen = SDL_SetVideoMode(width, height, bpp, flags); if (!_screen) { // Could not change mode, revert back. _screen = SDL_SetVideoMode(oldWidth, oldHeight, bpp, flags); } // There's no reason how this could possibly fail, but ok... if (!_screen) throw Common::Exception("Failed changing the resolution and then failed reverting."); rebuildContext(); // Let the NotificationManager notify the Notifyables that the resolution changed if ((oldWidth != _screen->w) || (oldHeight != _screen->h)) NotificationMan.resized(oldWidth, oldHeight, _screen->w, _screen->h); }
void Window::windowed() { if (FullScreenMode != m_mode) return; int w = m_windowedModeSize.x; int h = m_windowedModeSize.y; ContextFormat format = m_context->format(); finalizeEventHandler(); WindowEventDispatcher::deregisterWindow(this); destroyContext(); if (createContext(format, w, h, nullptr)) { WindowEventDispatcher::registerWindow(this); initializeEventHandler(); m_mode = WindowMode; } }
CEGLManager::~CEGLManager() { destroyContext(); destroySurface(); terminate(); }