void OculusViewConfig::configure(osgViewer::View& view) const { m_device->setNearClip(m_nearClip); m_device->setFarClip(m_farClip); m_device->setSensorPredictionEnabled(m_useSensorPrediction); m_device->setSensorPredictionDelta(m_predictionDelta); if (m_useCustomScaleFactor) { m_device->setCustomScaleFactor(m_customScaleFactor); } // Create screen with match the Oculus Rift resolution osg::GraphicsContext::WindowingSystemInterface* wsi = osg::GraphicsContext::getWindowingSystemInterface(); if (!wsi) { osg::notify(osg::NOTICE)<<"Error, no WindowSystemInterface available, cannot create windows."<<std::endl; return; } // Get the screen identifiers set in environment variable DISPLAY osg::GraphicsContext::ScreenIdentifier si; si.readDISPLAY(); // If displayNum has not been set, reset it to 0. if (si.displayNum < 0) si.displayNum = 0; // If screenNum has not been set, reset it to 0. if (si.screenNum < 0) si.screenNum = 0; //test by Shao si.displayNum = _displayNum; si.screenNum = _screenNum; unsigned int width, height; wsi->getScreenResolution(si, width, height); osg::ref_ptr<osg::GraphicsContext::Traits> traits = new osg::GraphicsContext::Traits; traits->hostName = si.hostName; traits->screenNum = si.screenNum; traits->displayNum = si.displayNum; traits->windowDecoration = false; traits->x = 0; traits->y = 0; traits->width = m_device->hScreenResolution(); traits->height = m_device->vScreenResolution(); traits->doubleBuffer = true; traits->sharedContext = 0; traits->vsync = true; // VSync should always be enabled for Oculus Rift applications // Create a graphic context based on our desired traits osg::ref_ptr<osg::GraphicsContext> gc = osg::GraphicsContext::createGraphicsContext(traits); if (!gc) { osg::notify(osg::NOTICE) << "Error, GraphicsWindow has not been created successfully" << std::endl; return; } _main_camera = view.getCamera(); _main_camera->setName("Main"); // Disable scene rendering for main camera _main_camera->setCullMask(~m_sceneNodeMask); _main_camera->setGraphicsContext(gc); // Use full view port _main_camera->setViewport(new osg::Viewport(0, 0, traits->width, traits->height)); // Disable automatic computation of near and far plane on main camera, will propagate to slave cameras _main_camera->setComputeNearFarMode( osg::CullSettings::DO_NOT_COMPUTE_NEAR_FAR ); const int textureWidth = m_device->scaleFactor() * m_device->hScreenResolution()/2; const int textureHeight = m_device->scaleFactor() * m_device->vScreenResolution(); // master projection matrix _main_camera->setProjectionMatrix(m_device->projectionCenterMatrix()); // Create textures for RTT cameras osg::ref_ptr<osg::Texture2D> textureLeft = new osg::Texture2D; textureLeft->setTextureSize( textureWidth, textureHeight ); textureLeft->setInternalFormat( GL_RGBA ); osg::ref_ptr<osg::Texture2D> textureRight = new osg::Texture2D; textureRight->setTextureSize( textureWidth, textureHeight ); textureRight->setInternalFormat( GL_RGBA ); // Create RTT (Rendering to Texture) cameras and attach textures osg::ref_ptr<osg::Camera> cameraRTTLeft = createRTTCamera(textureLeft, gc); osg::ref_ptr<osg::Camera> cameraRTTRight = createRTTCamera(textureRight, gc); cameraRTTLeft->setName("LeftRTT"); cameraRTTRight->setName("RightRTT"); cameraRTTLeft->setCullMask(m_sceneNodeMask); cameraRTTRight->setCullMask(m_sceneNodeMask); // Create HUD cameras for left eye osg::ref_ptr<osg::Camera> cameraHUDLeft = createHUDCamera(0.0, 1.0, 0.0, 1.0, gc); cameraHUDLeft->setName("LeftHUD"); cameraHUDLeft->setViewport(new osg::Viewport(0, 0, m_device->hScreenResolution() / 2.0f, m_device->vScreenResolution())); // Create HUD cameras for right eye osg::ref_ptr<osg::Camera> cameraHUDRight = createHUDCamera(0.0, 1.0, 0.0, 1.0, gc); cameraHUDRight->setName("RightHUD"); cameraHUDRight->setViewport(new osg::Viewport(m_device->hScreenResolution() / 2.0f, 0, m_device->hScreenResolution() / 2.0f, m_device->vScreenResolution())); // Create quads for each camera osg::ref_ptr<osg::Geode> leftQuad = createHUDQuad(1.0f, 1.0f); cameraHUDLeft->addChild(leftQuad); osg::ref_ptr<osg::Geode> rightQuad = createHUDQuad(1.0f, 1.0f); cameraHUDRight->addChild(rightQuad); // Set up shaders from the Oculus SDK documentation osg::ref_ptr<osg::Program> program = new osg::Program; osg::ref_ptr<osg::Shader> vertexShader = new osg::Shader(osg::Shader::VERTEX); vertexShader->loadShaderSourceFromFile(osgDB::findDataFile("warp.vert")); osg::ref_ptr<osg::Shader> fragmentShader = new osg::Shader(osg::Shader::FRAGMENT); // Fragment shader with or without correction for chromatic aberration if (m_useChromaticAberrationCorrection) { fragmentShader->loadShaderSourceFromFile(osgDB::findDataFile("warpWithChromeAb.frag")); } else { fragmentShader->loadShaderSourceFromFile(osgDB::findDataFile("warpWithoutChromeAb.frag")); } program->addShader(vertexShader); program->addShader(fragmentShader); // Attach shaders to each HUD osg::StateSet* leftEyeStateSet = leftQuad->getOrCreateStateSet(); osg::StateSet* rightEyeStateSet = rightQuad->getOrCreateStateSet(); applyShaderParameters(leftEyeStateSet, program.get(), textureLeft.get(), OculusDevice::LEFT_EYE); applyShaderParameters(rightEyeStateSet, program.get(), textureRight.get(), OculusDevice::RIGHT_EYE); // Add RTT cameras as slaves, specifying offsets for the projection view.addSlave(cameraRTTLeft, m_device->projectionOffsetMatrix(OculusDevice::LEFT_EYE), m_device->viewMatrix(OculusDevice::LEFT_EYE), true); view.addSlave(cameraRTTRight, m_device->projectionOffsetMatrix(OculusDevice::RIGHT_EYE), m_device->viewMatrix(OculusDevice::RIGHT_EYE), true); // Add HUD cameras as slaves view.addSlave(cameraHUDLeft, false); view.addSlave(cameraHUDRight, false); view.setName("Oculus"); // Connect main camera to node callback that get HMD orientation if (m_useOrientations) { _main_camera->setDataVariance(osg::Object::DYNAMIC); _callback = new OculusViewConfigOrientationCallback(cameraRTTLeft, cameraRTTRight, m_device); _main_camera->setUpdateCallback(_callback); } }
/* Public functions */ void OculusViewConfig::configure(osgViewer::View& view) const { // Create a graphic context based on our desired traits osg::ref_ptr<osg::GraphicsContext::Traits> traits = m_device->graphicsContextTraits(); osg::ref_ptr<osg::GraphicsContext> gc = osg::GraphicsContext::createGraphicsContext(traits); if (!gc) { osg::notify(osg::NOTICE) << "Error, GraphicsWindow has not been created successfully" << std::endl; return; } // Attach to window, needed for direct mode m_device->attachToWindow(gc); // Attach a callback to detect swap osg::ref_ptr<OculusSwapCallback> swapCallback = new OculusSwapCallback(m_device); gc->setSwapCallback(swapCallback); osg::ref_ptr<osg::Camera> camera = view.getCamera(); camera->setName("Main"); // Disable scene rendering for main camera camera->setCullMask(~m_sceneNodeMask); camera->setGraphicsContext(gc); // Use full view port camera->setViewport(new osg::Viewport(0, 0, traits->width, traits->height)); // Disable automatic computation of near and far plane on main camera, will propagate to slave cameras camera->setComputeNearFarMode( osg::CullSettings::DO_NOT_COMPUTE_NEAR_FAR ); const int textureWidth = m_device->renderTargetWidth()/2; const int textureHeight = m_device->renderTargetHeight(); // master projection matrix camera->setProjectionMatrix(m_device->projectionMatrixCenter()); // Create textures for RTT cameras osg::ref_ptr<osg::Texture2D> textureLeft = new osg::Texture2D; textureLeft->setTextureSize( textureWidth, textureHeight ); textureLeft->setInternalFormat( GL_RGBA ); osg::ref_ptr<osg::Texture2D> textureRight = new osg::Texture2D; textureRight->setTextureSize( textureWidth, textureHeight ); textureRight->setInternalFormat( GL_RGBA ); // Create RTT cameras and attach textures osg::ref_ptr<osg::Camera> cameraRTTLeft = m_device->createRTTCamera(textureLeft, OculusDevice::LEFT, osg::Camera::RELATIVE_RF, gc); osg::ref_ptr<osg::Camera> cameraRTTRight = m_device->createRTTCamera(textureRight, OculusDevice::RIGHT, osg::Camera::RELATIVE_RF, gc); cameraRTTLeft->setName("LeftRTT"); cameraRTTRight->setName("RightRTT"); cameraRTTLeft->setCullMask(m_sceneNodeMask); cameraRTTRight->setCullMask(m_sceneNodeMask); // Create warp ortho camera osg::ref_ptr<osg::Camera> cameraWarp = m_device->createWarpOrthoCamera(0.0, 1.0, 0.0, 1.0, gc); cameraWarp->setName("WarpOrtho"); cameraWarp->setViewport(new osg::Viewport(0, 0, m_device->screenResolutionWidth(), m_device->screenResolutionHeight())); // Create shader program osg::ref_ptr<osg::Program> program = m_device->createShaderProgram(); // Create distortionMesh for each camera osg::ref_ptr<osg::Geode> leftDistortionMesh = m_device->distortionMesh(OculusDevice::LEFT, program, 0, 0, textureWidth, textureHeight); cameraWarp->addChild(leftDistortionMesh); osg::ref_ptr<osg::Geode> rightDistortionMesh = m_device->distortionMesh(OculusDevice::RIGHT, program, 0, 0, textureWidth, textureHeight); cameraWarp->addChild(rightDistortionMesh); // Add pre draw camera to handle time warp cameraWarp->setPreDrawCallback(new WarpCameraPreDrawCallback(m_device)); // Attach shaders to each distortion mesh osg::ref_ptr<osg::StateSet> leftEyeStateSet = leftDistortionMesh->getOrCreateStateSet(); osg::ref_ptr<osg::StateSet> rightEyeStateSet = rightDistortionMesh->getOrCreateStateSet(); m_device->applyShaderParameters(leftEyeStateSet, program.get(), textureLeft.get(), OculusDevice::LEFT); m_device->applyShaderParameters(rightEyeStateSet, program.get(), textureRight.get(), OculusDevice::RIGHT); // Add RTT cameras as slaves, specifying offsets for the projection view.addSlave(cameraRTTLeft, m_device->projectionOffsetMatrixLeft(), m_device->viewMatrixLeft(), true); view.addSlave(cameraRTTRight, m_device->projectionOffsetMatrixRight(), m_device->viewMatrixRight(), true); // Use sky light instead of headlight to avoid light changes when head movements view.setLightingMode(osg::View::SKY_LIGHT); // Add warp camera as slave view.addSlave(cameraWarp, false); view.setName("Oculus"); // Connect main camera to node callback that get HMD orientation camera->setDataVariance(osg::Object::DYNAMIC); camera->setCullCallback(new OculusViewConfigOrientationCallback(cameraRTTLeft, cameraRTTRight, m_device, swapCallback, m_warning)); // Add Oculus keyboard handler view.addEventHandler(new OculusEventHandler(m_device)); view.addEventHandler(new OculusWarningEventHandler(m_device, m_warning)); }
void OculusViewConfig::configure(osgViewer::View& view) const { m_dev->setNearClip(m_nearClip); m_dev->setFarClip(m_farClip); m_dev->setSensorPredictionEnabled(m_useSensorPrediction); m_dev->setSensorPredictionDelta(m_predictionDelta); if (m_useCustomScaleFactor) { m_dev->setCustomScaleFactor(m_customScaleFactor); } // Create screen with match the Oculus Rift resolution osg::GraphicsContext::WindowingSystemInterface* wsi = osg::GraphicsContext::getWindowingSystemInterface(); if (!wsi) { osg::notify(osg::NOTICE)<<"Error, no WindowSystemInterface available, cannot create windows."<<std::endl; return; } unsigned int width, height; wsi->getScreenResolution(osg::GraphicsContext::ScreenIdentifier(0), width, height); osg::ref_ptr<osg::GraphicsContext::Traits> traits = new osg::GraphicsContext::Traits; traits->windowDecoration = false; traits->x = 0; traits->y = 0; traits->width = m_dev->hScreenResolution(); traits->height = m_dev->vScreenResolution(); traits->doubleBuffer = true; traits->sharedContext = 0; traits->sampleBuffers = true; traits->samples = 4; traits->vsync = true; osg::ref_ptr<osg::GraphicsContext> gc = osg::GraphicsContext::createGraphicsContext(traits); osg::ref_ptr<osg::Camera> camera = view.getCamera(); camera->setGraphicsContext(gc); // Use full viewport camera->setViewport(new osg::Viewport(0, 0, traits->width, traits->height)); // Disable automatic computation of near and far plane on main camera, will propagate to slave cameras camera->setComputeNearFarMode( osg::CullSettings::DO_NOT_COMPUTE_NEAR_FAR ); const int textureWidth = m_dev->scaleFactor() * m_dev->hScreenResolution()/2; const int textureHeight = m_dev->scaleFactor() * m_dev->vScreenResolution(); // master projection matrix camera->setProjectionMatrix(m_dev->projectionCenterMatrix()); osg::ref_ptr<osg::Texture2D> l_tex = new osg::Texture2D; l_tex->setTextureSize( textureWidth, textureHeight ); l_tex->setInternalFormat( GL_RGBA ); osg::ref_ptr<osg::Texture2D> r_tex = new osg::Texture2D; r_tex->setTextureSize( textureWidth, textureHeight ); r_tex->setInternalFormat( GL_RGBA ); osg::ref_ptr<osg::Camera> l_rtt = createRTTCamera(osg::Camera::COLOR_BUFFER, l_tex, gc); osg::ref_ptr<osg::Camera> r_rtt = createRTTCamera(osg::Camera::COLOR_BUFFER, r_tex, gc); // Create HUD cameras for each eye osg::ref_ptr<osg::Camera> l_hud = createHUDCamera(0.0, 1.0, 0.0, 1.0, gc); l_hud->setViewport(new osg::Viewport(0, 0, m_dev->hScreenResolution() / 2.0f, m_dev->vScreenResolution())); osg::ref_ptr<osg::Camera> r_hud = createHUDCamera(0.0, 1.0, 0.0, 1.0, gc); r_hud->setViewport(new osg::Viewport(m_dev->hScreenResolution() / 2.0f, 0, m_dev->hScreenResolution() / 2.0f, m_dev->vScreenResolution())); // Create quads on each camera osg::ref_ptr<osg::Geode> leftQuad = createHUDQuad(1.0f, 1.0f); l_hud->addChild(leftQuad); osg::ref_ptr<osg::Geode> rightQuad = createHUDQuad(1.0f, 1.0f); r_hud->addChild(rightQuad); // Set up shaders from the Oculus SDK documentation osg::ref_ptr<osg::Program> program = new osg::Program; osg::ref_ptr<osg::Shader> vertexShader = new osg::Shader(osg::Shader::VERTEX); vertexShader->loadShaderSourceFromFile(osgDB::findDataFile("warp.vert")); osg::ref_ptr<osg::Shader> fragmentShader = new osg::Shader(osg::Shader::FRAGMENT); // Fragment shader with or without correction for chromatic aberration if (m_useChromaticAberrationCorrection) { fragmentShader->loadShaderSourceFromFile(osgDB::findDataFile("warpWithChromeAb.frag")); } else { fragmentShader->loadShaderSourceFromFile(osgDB::findDataFile("warpWithoutChromeAb.frag")); } program->addShader(vertexShader); program->addShader(fragmentShader); // Configure state sets for both eyes osg::StateSet* leftEyeStateSet = leftQuad->getOrCreateStateSet(); leftEyeStateSet->setTextureAttributeAndModes(0, l_tex, osg::StateAttribute::ON); leftEyeStateSet->setAttributeAndModes( program, osg::StateAttribute::ON ); leftEyeStateSet->addUniform( new osg::Uniform("WarpTexture", 0) ); leftEyeStateSet->addUniform( new osg::Uniform("LensCenter", m_dev->lensCenter(OculusDevice::LEFT_EYE))); leftEyeStateSet->addUniform( new osg::Uniform("ScreenCenter", m_dev->screenCenter())); leftEyeStateSet->addUniform( new osg::Uniform("Scale", m_dev->scale())); leftEyeStateSet->addUniform( new osg::Uniform("ScaleIn", m_dev->scaleIn())); leftEyeStateSet->addUniform( new osg::Uniform("HmdWarpParam", m_dev->warpParameters())); leftEyeStateSet->addUniform( new osg::Uniform("ChromAbParam", m_dev->chromAbParameters())); osg::StateSet* rightEyeStateSet = rightQuad->getOrCreateStateSet(); rightEyeStateSet->setTextureAttributeAndModes(0, r_tex, osg::StateAttribute::ON); rightEyeStateSet->setAttributeAndModes( program, osg::StateAttribute::ON ); rightEyeStateSet->addUniform( new osg::Uniform("WarpTexture", 0) ); rightEyeStateSet->addUniform( new osg::Uniform("LensCenter", m_dev->lensCenter(OculusDevice::RIGHT_EYE))); rightEyeStateSet->addUniform( new osg::Uniform("ScreenCenter", m_dev->screenCenter())); rightEyeStateSet->addUniform( new osg::Uniform("Scale", m_dev->scale())); rightEyeStateSet->addUniform( new osg::Uniform("ScaleIn", m_dev->scaleIn())); rightEyeStateSet->addUniform( new osg::Uniform("HmdWarpParam", m_dev->warpParameters())); rightEyeStateSet->addUniform( new osg::Uniform("ChromAbParam", m_dev->chromAbParameters())); // Add cameras as slaves, specifying offsets for the projection view.addSlave(l_rtt, m_dev->projectionOffsetMatrix(OculusDevice::LEFT_EYE), m_dev->viewMatrix(OculusDevice::LEFT_EYE), true); view.addSlave(r_rtt, m_dev->projectionOffsetMatrix(OculusDevice::RIGHT_EYE), m_dev->viewMatrix(OculusDevice::RIGHT_EYE), true); view.addSlave(l_hud, false); view.addSlave(r_hud, false); // Connect main camera to node callback that get HMD orientation if (m_useOrientations) { camera->setDataVariance(osg::Object::DYNAMIC); camera->setUpdateCallback(new OculusViewConfigOrientationCallback(l_rtt, r_rtt, m_dev)); } }
void WoWVxDisplay::configure(osgViewer::View& view) const { OSG_INFO<<"WoWVxDisplay::configure(...)"<<std::endl; osg::GraphicsContext::WindowingSystemInterface* wsi = osg::GraphicsContext::getWindowingSystemInterface(); if (!wsi) { OSG_NOTICE<<"Error, no WindowSystemInterface available, cannot create windows."<<std::endl; return; } osg::GraphicsContext::ScreenIdentifier si; si.readDISPLAY(); // displayNum has not been set so reset it to 0. if (si.displayNum<0) si.displayNum = 0; si.screenNum = _screenNum; unsigned int width, height; wsi->getScreenResolution(si, width, height); osg::ref_ptr<osg::GraphicsContext::Traits> traits = new osg::GraphicsContext::Traits; traits->hostName = si.hostName; traits->displayNum = si.displayNum; traits->screenNum = si.screenNum; traits->x = 0; traits->y = 0; traits->width = width; traits->height = height; traits->windowDecoration = false; traits->doubleBuffer = true; traits->sharedContext = 0; osg::ref_ptr<osg::GraphicsContext> gc = osg::GraphicsContext::createGraphicsContext(traits.get()); if (!gc) { OSG_NOTICE<<"GraphicsWindow has not been created successfully."<<std::endl; return; } int tex_width = width; int tex_height = height; int camera_width = tex_width; int camera_height = tex_height; osg::Texture2D* texture = new osg::Texture2D; texture->setTextureSize(tex_width, tex_height); texture->setInternalFormat(GL_RGB); texture->setFilter(osg::Texture2D::MIN_FILTER,osg::Texture2D::LINEAR); texture->setFilter(osg::Texture2D::MAG_FILTER,osg::Texture2D::LINEAR); osg::Texture2D* textureD = new osg::Texture2D; textureD->setTextureSize(tex_width, tex_height); textureD->setInternalFormat(GL_DEPTH_COMPONENT); textureD->setFilter(osg::Texture2D::MIN_FILTER,osg::Texture2D::LINEAR); textureD->setFilter(osg::Texture2D::MAG_FILTER,osg::Texture2D::LINEAR); #if 0 osg::Camera::RenderTargetImplementation renderTargetImplementation = osg::Camera::SEPERATE_WINDOW; GLenum buffer = GL_FRONT; #else osg::Camera::RenderTargetImplementation renderTargetImplementation = osg::Camera::FRAME_BUFFER_OBJECT; GLenum buffer = GL_FRONT; #endif // front face { osg::ref_ptr<osg::Camera> camera = new osg::Camera; camera->setName("Front face camera"); camera->setGraphicsContext(gc.get()); camera->setViewport(new osg::Viewport(0,0,camera_width, camera_height)); camera->setDrawBuffer(buffer); camera->setReadBuffer(buffer); camera->setAllowEventFocus(false); // tell the camera to use OpenGL frame buffer object where supported. camera->setRenderTargetImplementation(renderTargetImplementation); // attach the texture and use it as the color buffer. camera->attach(osg::Camera::COLOR_BUFFER, texture); camera->attach(osg::Camera::DEPTH_BUFFER, textureD); view.addSlave(camera.get(), osg::Matrixd(), osg::Matrixd()); } // WoW display set up. { osg::Texture1D *textureHeader = new osg::Texture1D(); // Set up the header { unsigned char header[]= {0xF1,_wow_content,_wow_factor,_wow_offset,0x00,0x00,0x00,0x00,0x00,0x00}; // Calc the CRC32 { unsigned long _register = 0; for(int i = 0; i < 10; ++i) { unsigned char mask = 0x80; unsigned char byte = header[i]; for (int j = 0; j < 8; ++j) { bool topBit = (_register & 0x80000000) != 0; _register <<= 1; _register ^= ((byte & mask) != 0? 0x1: 0x0); if (topBit) { _register ^= 0x04c11db7; } mask >>= 1; } } unsigned char *p = (unsigned char*) &_register; for(size_t i = 0; i < 4; ++i) { header[i+6] = p[3-i]; } } osg::ref_ptr<osg::Image> imageheader = new osg::Image(); imageheader->allocateImage(256,1,1,GL_LUMINANCE,GL_UNSIGNED_BYTE); { unsigned char *cheader = imageheader->data(); for (int x=0; x<256; ++x){ cheader[x] = 0; } for (int x=0; x<=9; ++x){ for (int y=7; y>=0; --y){ int i = 2*(7-y)+16*x; cheader[i] = (((1<<(y))&(header[x])) << (7-(y))); } } } textureHeader->setImage(imageheader.get()); } // Create the Screen Aligned Quad osg::Geode* geode = new osg::Geode(); { osg::Geometry* geom = new osg::Geometry; osg::Vec3Array* vertices = new osg::Vec3Array; vertices->push_back(osg::Vec3(0,height,0)); vertices->push_back(osg::Vec3(0,0,0)); vertices->push_back(osg::Vec3(width,0,0)); vertices->push_back(osg::Vec3(width,height,0)); geom->setVertexArray(vertices); osg::Vec2Array* tex = new osg::Vec2Array; tex->push_back(osg::Vec2(0,1)); tex->push_back(osg::Vec2(0,0)); tex->push_back(osg::Vec2(1,0)); tex->push_back(osg::Vec2(1,1)); geom->setTexCoordArray(0,tex); geom->addPrimitiveSet(new osg::DrawArrays(GL_QUADS,0,4)); geode->addDrawable(geom); // new we need to add the textures to the quad, and setting up the shader. osg::StateSet* stateset = geode->getOrCreateStateSet(); stateset->setTextureAttributeAndModes(0, textureHeader,osg::StateAttribute::ON); stateset->setTextureAttributeAndModes(1, texture,osg::StateAttribute::ON); stateset->setTextureAttributeAndModes(2, textureD,osg::StateAttribute::ON); stateset->setMode(GL_LIGHTING,osg::StateAttribute::OFF); osg::ref_ptr<osg::Program> programShader = new osg::Program(); stateset->setAttribute(programShader.get(), osg::StateAttribute::ON); stateset->addUniform( new osg::Uniform("wow_width", (int)width)); stateset->addUniform( new osg::Uniform("wow_height", (int)height)); stateset->addUniform( new osg::Uniform("wow_disparity_M", _wow_disparity_M)); stateset->addUniform( new osg::Uniform("wow_disparity_Zd", _wow_disparity_Zd)); stateset->addUniform( new osg::Uniform("wow_disparity_vz", _wow_disparity_vz)); stateset->addUniform( new osg::Uniform("wow_disparity_C", _wow_disparity_C)); stateset->addUniform(new osg::Uniform("wow_header", 0)); stateset->addUniform(new osg::Uniform("wow_tcolor", 1)); stateset->addUniform(new osg::Uniform("wow_tdepth", 2)); osg::Shader *frag = new osg::Shader(osg::Shader::FRAGMENT); frag->setShaderSource(" "\ " uniform sampler1D wow_header; " \ " uniform sampler2D wow_tcolor; " \ " uniform sampler2D wow_tdepth; " \ " " \ " uniform int wow_width; " \ " uniform int wow_height; " \ " uniform float wow_disparity_M; " \ " uniform float wow_disparity_Zd; " \ " uniform float wow_disparity_vz; " \ " uniform float wow_disparity_C; " \ " " \ " float disparity(float Z) " \ " { " \ " return (wow_disparity_M*(1.0-(wow_disparity_vz/(Z-wow_disparity_Zd+wow_disparity_vz))) " \ " + wow_disparity_C) / 255.0; " \ " } " \ " " \ " void main() " \ " { " \ " vec2 pos = (gl_FragCoord.xy / vec2(wow_width/2,wow_height) ); " \ " if (gl_FragCoord.x > float(wow_width/2)) " \ " { " \ " gl_FragColor = vec4(disparity(( texture2D(wow_tdepth, pos - vec2(1,0))).z)); " \ " } " \ " else{ " \ " gl_FragColor = texture2D(wow_tcolor, pos); " \ " } " \ " if ( (gl_FragCoord.y >= float(wow_height-1)) && (gl_FragCoord.x < 256.0) ) " \ " { " \ " float pos = gl_FragCoord.x/256.0; " \ " float blue = texture1D(wow_header, pos).b; " \ " if ( blue < 0.5) " \ " gl_FragColor.b = 0.0; " \ " else " \ " gl_FragColor.b = 1.0; " \ " } " \ " } " ); programShader->addShader(frag); } // Create the Camera { osg::ref_ptr<osg::Camera> camera = new osg::Camera; camera->setGraphicsContext(gc.get()); camera->setClearMask(GL_DEPTH_BUFFER_BIT | GL_COLOR_BUFFER_BIT ); camera->setClearColor( osg::Vec4(0.0,0.0,0.0,1.0) ); camera->setViewport(new osg::Viewport(0, 0, width, height)); GLenum buffer = traits->doubleBuffer ? GL_BACK : GL_FRONT; camera->setDrawBuffer(buffer); camera->setReadBuffer(buffer); camera->setReferenceFrame(osg::Camera::ABSOLUTE_RF); camera->setAllowEventFocus(false); camera->setInheritanceMask(camera->getInheritanceMask() & ~osg::CullSettings::CLEAR_COLOR & ~osg::CullSettings::COMPUTE_NEAR_FAR_MODE); //camera->setComputeNearFarMode(osg::CullSettings::DO_NOT_COMPUTE_NEAR_FAR); camera->setProjectionMatrixAsOrtho2D(0,width,0,height); camera->setViewMatrix(osg::Matrix::identity()); // add subgraph to render camera->addChild(geode); camera->setName("WoWCamera"); view.addSlave(camera.get(), osg::Matrixd(), osg::Matrixd(), false); } } }
void PanoramicSphericalDisplay::configure(osgViewer::View& view) const { OSG_INFO<<"PanoramicSphericalDisplay::configure(rad="<<_radius<<", cllr="<<_collar<<", sn="<<_screenNum<<", im="<<_intensityMap<<")"<<std::endl; osg::GraphicsContext::WindowingSystemInterface* wsi = osg::GraphicsContext::getWindowingSystemInterface(); if (!wsi) { OSG_NOTICE<<"Error, no WindowSystemInterface available, cannot create windows."<<std::endl; return; } osg::GraphicsContext::ScreenIdentifier si; si.readDISPLAY(); // displayNum has not been set so reset it to 0. if (si.displayNum<0) si.displayNum = 0; si.screenNum = _screenNum; unsigned int width, height; wsi->getScreenResolution(si, width, height); osg::ref_ptr<osg::GraphicsContext::Traits> traits = new osg::GraphicsContext::Traits; traits->hostName = si.hostName; traits->displayNum = si.displayNum; traits->screenNum = si.screenNum; traits->x = 0; traits->y = 0; traits->width = width; traits->height = height; traits->windowDecoration = false; traits->doubleBuffer = true; traits->sharedContext = 0; bool applyIntensityMapAsColours = true; osg::ref_ptr<osg::GraphicsContext> gc = osg::GraphicsContext::createGraphicsContext(traits.get()); if (!gc) { OSG_NOTICE<<"GraphicsWindow has not been created successfully."<<std::endl; return; } int tex_width = width; int tex_height = height; int camera_width = tex_width; int camera_height = tex_height; osg::TextureRectangle* texture = new osg::TextureRectangle; texture->setTextureSize(tex_width, tex_height); texture->setInternalFormat(GL_RGB); texture->setFilter(osg::Texture::MIN_FILTER,osg::Texture::LINEAR); texture->setFilter(osg::Texture::MAG_FILTER,osg::Texture::LINEAR); texture->setWrap(osg::Texture::WRAP_S,osg::Texture::CLAMP_TO_EDGE); texture->setWrap(osg::Texture::WRAP_T,osg::Texture::CLAMP_TO_EDGE); #if 0 osg::Camera::RenderTargetImplementation renderTargetImplementation = osg::Camera::SEPERATE_WINDOW; GLenum buffer = GL_FRONT; #else osg::Camera::RenderTargetImplementation renderTargetImplementation = osg::Camera::FRAME_BUFFER_OBJECT; GLenum buffer = GL_FRONT; #endif // front face { osg::ref_ptr<osg::Camera> camera = new osg::Camera; camera->setName("Front face camera"); camera->setGraphicsContext(gc.get()); camera->setViewport(new osg::Viewport(0,0,camera_width, camera_height)); camera->setDrawBuffer(buffer); camera->setReadBuffer(buffer); camera->setAllowEventFocus(false); // tell the camera to use OpenGL frame buffer object where supported. camera->setRenderTargetImplementation(renderTargetImplementation); // attach the texture and use it as the color buffer. camera->attach(osg::Camera::COLOR_BUFFER, texture); view.addSlave(camera.get(), osg::Matrixd(), osg::Matrixd()); } // distortion correction set up. { osg::Geode* geode = new osg::Geode(); geode->addDrawable(createParoramicSphericalDisplayDistortionMesh(osg::Vec3(0.0f,0.0f,0.0f), osg::Vec3(width,0.0f,0.0f), osg::Vec3(0.0f,height,0.0f), _radius, _collar, applyIntensityMapAsColours ? _intensityMap.get() : 0, _projectorMatrix)); // new we need to add the texture to the mesh, we do so by creating a // StateSet to contain the Texture StateAttribute. osg::StateSet* stateset = geode->getOrCreateStateSet(); stateset->setTextureAttributeAndModes(0, texture,osg::StateAttribute::ON); stateset->setMode(GL_LIGHTING,osg::StateAttribute::OFF); osg::TexMat* texmat = new osg::TexMat; texmat->setScaleByTextureRectangleSize(true); stateset->setTextureAttributeAndModes(0, texmat, osg::StateAttribute::ON); if (!applyIntensityMapAsColours && _intensityMap.valid()) { stateset->setTextureAttributeAndModes(1, new osg::Texture2D(_intensityMap.get()), osg::StateAttribute::ON); } osg::ref_ptr<osg::Camera> camera = new osg::Camera; camera->setGraphicsContext(gc.get()); camera->setClearMask(GL_DEPTH_BUFFER_BIT | GL_COLOR_BUFFER_BIT ); camera->setClearColor( osg::Vec4(0.0,0.0,0.0,1.0) ); camera->setViewport(new osg::Viewport(0, 0, width, height)); GLenum buffer = traits->doubleBuffer ? GL_BACK : GL_FRONT; camera->setDrawBuffer(buffer); camera->setReadBuffer(buffer); camera->setReferenceFrame(osg::Camera::ABSOLUTE_RF); camera->setAllowEventFocus(false); camera->setInheritanceMask(camera->getInheritanceMask() & ~osg::CullSettings::CLEAR_COLOR & ~osg::CullSettings::COMPUTE_NEAR_FAR_MODE); //camera->setComputeNearFarMode(osg::CullSettings::DO_NOT_COMPUTE_NEAR_FAR); camera->setProjectionMatrixAsOrtho2D(0,width,0,height); camera->setViewMatrix(osg::Matrix::identity()); // add subgraph to render camera->addChild(geode); camera->setName("DistortionCorrectionCamera"); view.addSlave(camera.get(), osg::Matrixd(), osg::Matrixd(), false); } }
void AcrossAllScreens::configure(osgViewer::View& view) const { osg::GraphicsContext::WindowingSystemInterface* wsi = osg::GraphicsContext::getWindowingSystemInterface(); if (!wsi) { OSG_NOTICE<<"AcrossAllScreens::configure() : Error, no WindowSystemInterface available, cannot create windows."<<std::endl; return; } osg::DisplaySettings* ds = getActiveDisplaySetting(view); double fovy, aspectRatio, zNear, zFar; view.getCamera()->getProjectionMatrixAsPerspective(fovy, aspectRatio, zNear, zFar); osg::GraphicsContext::ScreenIdentifier si; si.readDISPLAY(); // displayNum has not been set so reset it to 0. if (si.displayNum<0) si.displayNum = 0; unsigned int numScreens = wsi->getNumScreens(si); if (numScreens==1) { if (si.screenNum<0) si.screenNum = 0; unsigned int width, height; wsi->getScreenResolution(si, width, height); osg::ref_ptr<osg::GraphicsContext::Traits> traits = new osg::GraphicsContext::Traits(ds); traits->hostName = si.hostName; traits->displayNum = si.displayNum; traits->screenNum = si.screenNum; traits->x = 0; traits->y = 0; traits->width = width; traits->height = height; traits->windowDecoration = false; traits->doubleBuffer = true; traits->sharedContext = 0; osg::ref_ptr<osg::GraphicsContext> gc = osg::GraphicsContext::createGraphicsContext(traits.get()); view.getCamera()->setGraphicsContext(gc.get()); osgViewer::GraphicsWindow* gw = dynamic_cast<osgViewer::GraphicsWindow*>(gc.get()); if (gw) { OSG_INFO<<" GraphicsWindow has been created successfully."<<std::endl; gw->getEventQueue()->getCurrentEventState()->setWindowRectangle(0, 0, width, height ); } else { OSG_NOTICE<<" GraphicsWindow has not been created successfully."<<std::endl; } double newAspectRatio = double(traits->width) / double(traits->height); double aspectRatioChange = newAspectRatio / aspectRatio; if (aspectRatioChange != 1.0) { view.getCamera()->getProjectionMatrix() *= osg::Matrix::scale(1.0/aspectRatioChange,1.0,1.0); } view.getCamera()->setViewport(new osg::Viewport(0, 0, traits->width, traits->height)); GLenum buffer = traits->doubleBuffer ? GL_BACK : GL_FRONT; view.getCamera()->setDrawBuffer(buffer); view.getCamera()->setReadBuffer(buffer); } else { double translate_x = 0.0; for(unsigned int i=0; i<numScreens; ++i) { si.screenNum = i; unsigned int width, height; wsi->getScreenResolution(si, width, height); translate_x += double(width) / (double(height) * aspectRatio); } bool stereoSplitScreens = numScreens==2 && ds->getStereoMode()==osg::DisplaySettings::HORIZONTAL_SPLIT && ds->getStereo(); for(unsigned int i=0; i<numScreens; ++i) { si.screenNum = i; unsigned int width, height; wsi->getScreenResolution(si, width, height); osg::ref_ptr<osg::GraphicsContext::Traits> traits = new osg::GraphicsContext::Traits(ds); traits->hostName = si.hostName; traits->displayNum = si.displayNum; traits->screenNum = si.screenNum; traits->screenNum = i; traits->x = 0; traits->y = 0; traits->width = width; traits->height = height; traits->windowDecoration = false; traits->doubleBuffer = true; traits->sharedContext = 0; osg::ref_ptr<osg::GraphicsContext> gc = osg::GraphicsContext::createGraphicsContext(traits.get()); osg::ref_ptr<osg::Camera> camera = new osg::Camera; camera->setGraphicsContext(gc.get()); osgViewer::GraphicsWindow* gw = dynamic_cast<osgViewer::GraphicsWindow*>(gc.get()); if (gw) { OSG_INFO<<" GraphicsWindow has been created successfully."<<gw<<std::endl; gw->getEventQueue()->getCurrentEventState()->setWindowRectangle(traits->x, traits->y, traits->width, traits->height ); } else { OSG_NOTICE<<" GraphicsWindow has not been created successfully."<<std::endl; } camera->setViewport(new osg::Viewport(0, 0, traits->width, traits->height)); GLenum buffer = traits->doubleBuffer ? GL_BACK : GL_FRONT; camera->setDrawBuffer(buffer); camera->setReadBuffer(buffer); if (stereoSplitScreens) { unsigned int leftCameraNum = (ds->getSplitStereoHorizontalEyeMapping()==osg::DisplaySettings::LEFT_EYE_LEFT_VIEWPORT) ? 0 : 1; osg::ref_ptr<osg::DisplaySettings> ds_local = new osg::DisplaySettings(*ds); ds_local->setStereoMode(leftCameraNum==i ? osg::DisplaySettings::LEFT_EYE : osg::DisplaySettings::RIGHT_EYE); camera->setDisplaySettings(ds_local.get()); view.addSlave(camera.get(), osg::Matrixd(), osg::Matrixd() ); } else { double newAspectRatio = double(traits->width) / double(traits->height); double aspectRatioChange = newAspectRatio / aspectRatio; view.addSlave(camera.get(), osg::Matrixd::translate( translate_x - aspectRatioChange, 0.0, 0.0) * osg::Matrix::scale(1.0/aspectRatioChange,1.0,1.0), osg::Matrixd() ); translate_x -= aspectRatioChange * 2.0; } } } view.assignSceneDataToCameras(); }