Ejemplo n.º 1
0
    virtual bool handle(const osgGA::GUIEventAdapter& ea, osgGA::GUIActionAdapter& aa, osg::Object*, osg::NodeVisitor*)
    {
        if ( ea.getEventType() == ea.PUSH )
        {
            // mouse click from [-1...1]
            float nx = ea.getXnormalized();
            float ny = ea.getYnormalized();

            // clicked point in clip space:
            osg::Vec3d pn( nx, ny, -1 ); // on near plane
            osg::Vec3d pf( nx, ny,  1 ); // on far plane

            OE_NOTICE << "clip: nx=" << nx << ", ny=" << ny << std::endl;
            
            // take the view matrix as-is:
            osg::Matrix view = _view->getCamera()->getViewMatrix();

            // adjust projection matrix to include entire earth:
            double fovy, ar, zn, zf;
            _view->getCamera()->getProjectionMatrix().getPerspective(fovy, ar, zn, zf);
            osg::Matrix proj;
            proj.makePerspective(fovy, ar, 1.0, 1e10);

            // Invert the MVP to transform points from clip to model space:
            osg::Matrix MVP = view * proj;
            osg::Matrix invMVP;
            invMVP.invert(MVP);

            pn = pn * invMVP;
            pf = pf * invMVP;

            OE_NOTICE << "model: near = " << pn.x() << ", " << pn.y() << ", " << pn.z() << std::endl;
            OE_NOTICE << "model: far  = " << pf.x() << ", " << pf.y() << ", " << pf.z() << std::endl;

            // Intersect in model space.
            osgUtil::LineSegmentIntersector* lsi = new osgUtil::LineSegmentIntersector(
                osgUtil::Intersector::MODEL, pn, pf );

            lsi->setIntersectionLimit( lsi->LIMIT_NEAREST );

            osgUtil::IntersectionVisitor iv( lsi ); 
            
            _node->accept( iv );

            if ( lsi->containsIntersections() )
            {
                osg::Vec3d p = lsi->getIntersections().begin()->getWorldIntersectPoint();
                OE_NOTICE << "i = " << p.x() << ", " << p.y() << ", " << p.z() << std::endl;
            }
        }
        return false;
    }
void OculusViewConfig::configure(osgViewer::View& view) const
{
	m_device->setNearClip(m_nearClip);
	m_device->setFarClip(m_farClip);
	m_device->setSensorPredictionEnabled(m_useSensorPrediction);
	m_device->setSensorPredictionDelta(m_predictionDelta);

	if (m_useCustomScaleFactor) {
		m_device->setCustomScaleFactor(m_customScaleFactor);
	}

	// Create screen with match the Oculus Rift resolution
	osg::GraphicsContext::WindowingSystemInterface* wsi = osg::GraphicsContext::getWindowingSystemInterface();

	if (!wsi) {
		osg::notify(osg::NOTICE)<<"Error, no WindowSystemInterface available, cannot create windows."<<std::endl;
		return;
	}

	// Get the screen identifiers set in environment variable DISPLAY
	osg::GraphicsContext::ScreenIdentifier si;
	si.readDISPLAY();
	
	// If displayNum has not been set, reset it to 0.
	if (si.displayNum < 0) si.displayNum = 0;

	// If screenNum has not been set, reset it to 0.
	if (si.screenNum < 0) si.screenNum = 0;

	//test by Shao
	si.displayNum = _displayNum;
	si.screenNum = _screenNum;
	unsigned int width, height;
	wsi->getScreenResolution(si, width, height);

	osg::ref_ptr<osg::GraphicsContext::Traits> traits = new osg::GraphicsContext::Traits;
	traits->hostName = si.hostName;
	traits->screenNum = si.screenNum;
	traits->displayNum = si.displayNum;
	traits->windowDecoration = false;
	traits->x = 0;
	traits->y = 0;
	traits->width = m_device->hScreenResolution();
	traits->height = m_device->vScreenResolution();
	traits->doubleBuffer = true;
	traits->sharedContext = 0;
	traits->vsync = true; // VSync should always be enabled for Oculus Rift applications

	// Create a graphic context based on our desired traits
	osg::ref_ptr<osg::GraphicsContext> gc = osg::GraphicsContext::createGraphicsContext(traits);
	if (!gc) {
		osg::notify(osg::NOTICE) << "Error, GraphicsWindow has not been created successfully" << std::endl;
		return;
	}

	_main_camera = view.getCamera();
	_main_camera->setName("Main");
	// Disable scene rendering for main camera
	_main_camera->setCullMask(~m_sceneNodeMask);
	_main_camera->setGraphicsContext(gc);
	// Use full view port
	_main_camera->setViewport(new osg::Viewport(0, 0, traits->width, traits->height));
	// Disable automatic computation of near and far plane on main camera, will propagate to slave cameras
	_main_camera->setComputeNearFarMode( osg::CullSettings::DO_NOT_COMPUTE_NEAR_FAR );
	const int textureWidth  = m_device->scaleFactor() * m_device->hScreenResolution()/2;
	const int textureHeight = m_device->scaleFactor() * m_device->vScreenResolution();
	// master projection matrix
	_main_camera->setProjectionMatrix(m_device->projectionCenterMatrix());
	// Create textures for RTT cameras
	osg::ref_ptr<osg::Texture2D> textureLeft = new osg::Texture2D;
	textureLeft->setTextureSize( textureWidth, textureHeight );
	textureLeft->setInternalFormat( GL_RGBA );
	osg::ref_ptr<osg::Texture2D> textureRight = new osg::Texture2D;
	textureRight->setTextureSize( textureWidth, textureHeight );
	textureRight->setInternalFormat( GL_RGBA );
	// Create RTT (Rendering to Texture) cameras and attach textures 
	osg::ref_ptr<osg::Camera> cameraRTTLeft = createRTTCamera(textureLeft, gc);
	osg::ref_ptr<osg::Camera> cameraRTTRight = createRTTCamera(textureRight, gc);
	cameraRTTLeft->setName("LeftRTT");
	cameraRTTRight->setName("RightRTT");
	cameraRTTLeft->setCullMask(m_sceneNodeMask);
	cameraRTTRight->setCullMask(m_sceneNodeMask);
	// Create HUD cameras for left eye
	osg::ref_ptr<osg::Camera> cameraHUDLeft = createHUDCamera(0.0, 1.0, 0.0, 1.0, gc);
	cameraHUDLeft->setName("LeftHUD");
	cameraHUDLeft->setViewport(new osg::Viewport(0, 0, 
		m_device->hScreenResolution() / 2.0f, m_device->vScreenResolution()));
	// Create HUD cameras for right eye
	osg::ref_ptr<osg::Camera> cameraHUDRight = createHUDCamera(0.0, 1.0, 0.0, 1.0, gc);
	cameraHUDRight->setName("RightHUD");
	cameraHUDRight->setViewport(new osg::Viewport(m_device->hScreenResolution() / 2.0f, 0,
										 m_device->hScreenResolution() / 2.0f, m_device->vScreenResolution()));
	// Create quads for each camera
	osg::ref_ptr<osg::Geode> leftQuad = createHUDQuad(1.0f, 1.0f);
	cameraHUDLeft->addChild(leftQuad);
	osg::ref_ptr<osg::Geode> rightQuad = createHUDQuad(1.0f, 1.0f);
	cameraHUDRight->addChild(rightQuad);

	// Set up shaders from the Oculus SDK documentation
	osg::ref_ptr<osg::Program> program = new osg::Program;
	osg::ref_ptr<osg::Shader> vertexShader = new osg::Shader(osg::Shader::VERTEX);
	vertexShader->loadShaderSourceFromFile(osgDB::findDataFile("warp.vert"));
	osg::ref_ptr<osg::Shader> fragmentShader = new osg::Shader(osg::Shader::FRAGMENT);

	// Fragment shader with or without correction for chromatic aberration
	if (m_useChromaticAberrationCorrection) {
		fragmentShader->loadShaderSourceFromFile(osgDB::findDataFile("warpWithChromeAb.frag"));
	} else {
		fragmentShader->loadShaderSourceFromFile(osgDB::findDataFile("warpWithoutChromeAb.frag"));
	}

	program->addShader(vertexShader);
	program->addShader(fragmentShader);
	
	// Attach shaders to each HUD
	osg::StateSet* leftEyeStateSet = leftQuad->getOrCreateStateSet();
	osg::StateSet* rightEyeStateSet = rightQuad->getOrCreateStateSet();
	applyShaderParameters(leftEyeStateSet, program.get(), textureLeft.get(), OculusDevice::LEFT_EYE);
	applyShaderParameters(rightEyeStateSet, program.get(), textureRight.get(), OculusDevice::RIGHT_EYE);
	
	// Add RTT cameras as slaves, specifying offsets for the projection
	view.addSlave(cameraRTTLeft, 
		m_device->projectionOffsetMatrix(OculusDevice::LEFT_EYE), 
		m_device->viewMatrix(OculusDevice::LEFT_EYE), 
		true);
	view.addSlave(cameraRTTRight, 
		m_device->projectionOffsetMatrix(OculusDevice::RIGHT_EYE), 
		m_device->viewMatrix(OculusDevice::RIGHT_EYE), 
		true);

	// Add HUD cameras as slaves
	view.addSlave(cameraHUDLeft, false);
	view.addSlave(cameraHUDRight, false);

	view.setName("Oculus");
	// Connect main camera to node callback that get HMD orientation
	if (m_useOrientations) {
		_main_camera->setDataVariance(osg::Object::DYNAMIC);
		_callback = new OculusViewConfigOrientationCallback(cameraRTTLeft, cameraRTTRight, m_device);
		_main_camera->setUpdateCallback(_callback);
	}
}
Ejemplo n.º 3
0
/* Public functions */
void OculusViewConfig::configure(osgViewer::View& view) const
{
	// Create a graphic context based on our desired traits
	osg::ref_ptr<osg::GraphicsContext::Traits> traits = m_device->graphicsContextTraits();
	osg::ref_ptr<osg::GraphicsContext> gc = osg::GraphicsContext::createGraphicsContext(traits);
	if (!gc) {
		osg::notify(osg::NOTICE) << "Error, GraphicsWindow has not been created successfully" << std::endl;
		return;
	}

	// Attach to window, needed for direct mode
	m_device->attachToWindow(gc);
	
	// Attach a callback to detect swap
	osg::ref_ptr<OculusSwapCallback> swapCallback = new OculusSwapCallback(m_device);
	gc->setSwapCallback(swapCallback);

	osg::ref_ptr<osg::Camera> camera = view.getCamera();
	camera->setName("Main");
	// Disable scene rendering for main camera
	camera->setCullMask(~m_sceneNodeMask);
	camera->setGraphicsContext(gc);
	// Use full view port
	camera->setViewport(new osg::Viewport(0, 0, traits->width, traits->height));
	// Disable automatic computation of near and far plane on main camera, will propagate to slave cameras
	camera->setComputeNearFarMode( osg::CullSettings::DO_NOT_COMPUTE_NEAR_FAR );
	const int textureWidth  = m_device->renderTargetWidth()/2;
	const int textureHeight = m_device->renderTargetHeight();
	// master projection matrix
	camera->setProjectionMatrix(m_device->projectionMatrixCenter());
	// Create textures for RTT cameras
	osg::ref_ptr<osg::Texture2D> textureLeft = new osg::Texture2D;
	textureLeft->setTextureSize( textureWidth, textureHeight );
	textureLeft->setInternalFormat( GL_RGBA );
	osg::ref_ptr<osg::Texture2D> textureRight = new osg::Texture2D;
	textureRight->setTextureSize( textureWidth, textureHeight );
	textureRight->setInternalFormat( GL_RGBA );
	// Create RTT cameras and attach textures
	osg::ref_ptr<osg::Camera> cameraRTTLeft = m_device->createRTTCamera(textureLeft, OculusDevice::LEFT, osg::Camera::RELATIVE_RF, gc);
	osg::ref_ptr<osg::Camera> cameraRTTRight = m_device->createRTTCamera(textureRight, OculusDevice::RIGHT, osg::Camera::RELATIVE_RF, gc);
	cameraRTTLeft->setName("LeftRTT");
	cameraRTTRight->setName("RightRTT");
	cameraRTTLeft->setCullMask(m_sceneNodeMask);
	cameraRTTRight->setCullMask(m_sceneNodeMask);
	
	// Create warp ortho camera
	osg::ref_ptr<osg::Camera> cameraWarp = m_device->createWarpOrthoCamera(0.0, 1.0, 0.0, 1.0, gc);
	cameraWarp->setName("WarpOrtho");
	cameraWarp->setViewport(new osg::Viewport(0, 0, m_device->screenResolutionWidth(), m_device->screenResolutionHeight()));

	// Create shader program
	osg::ref_ptr<osg::Program> program = m_device->createShaderProgram();

	// Create distortionMesh for each camera
	osg::ref_ptr<osg::Geode> leftDistortionMesh = m_device->distortionMesh(OculusDevice::LEFT, program, 0, 0, textureWidth, textureHeight);
	cameraWarp->addChild(leftDistortionMesh);

	osg::ref_ptr<osg::Geode> rightDistortionMesh = m_device->distortionMesh(OculusDevice::RIGHT, program, 0, 0, textureWidth, textureHeight);
	cameraWarp->addChild(rightDistortionMesh);

	// Add pre draw camera to handle time warp
	cameraWarp->setPreDrawCallback(new WarpCameraPreDrawCallback(m_device));

	// Attach shaders to each distortion mesh
	osg::ref_ptr<osg::StateSet> leftEyeStateSet = leftDistortionMesh->getOrCreateStateSet();
	osg::ref_ptr<osg::StateSet> rightEyeStateSet = rightDistortionMesh->getOrCreateStateSet();

	m_device->applyShaderParameters(leftEyeStateSet, program.get(), textureLeft.get(), OculusDevice::LEFT);
	m_device->applyShaderParameters(rightEyeStateSet, program.get(), textureRight.get(), OculusDevice::RIGHT);

	// Add RTT cameras as slaves, specifying offsets for the projection
	view.addSlave(cameraRTTLeft, 
		m_device->projectionOffsetMatrixLeft(),
		m_device->viewMatrixLeft(), 
		true);
	view.addSlave(cameraRTTRight, 
		m_device->projectionOffsetMatrixRight(),
		m_device->viewMatrixRight(),
		true);

	// Use sky light instead of headlight to avoid light changes when head movements
	view.setLightingMode(osg::View::SKY_LIGHT);

	// Add warp camera as slave
	view.addSlave(cameraWarp, false);
	view.setName("Oculus");

	// Connect main camera to node callback that get HMD orientation
	camera->setDataVariance(osg::Object::DYNAMIC);
	camera->setCullCallback(new OculusViewConfigOrientationCallback(cameraRTTLeft, cameraRTTRight, m_device, swapCallback, m_warning));
	
	// Add Oculus keyboard handler
	view.addEventHandler(new OculusEventHandler(m_device));
	view.addEventHandler(new OculusWarningEventHandler(m_device, m_warning));
}
Ejemplo n.º 4
0
void OculusViewConfig::configure(osgViewer::View& view) const
{
	m_dev->setNearClip(m_nearClip);
	m_dev->setFarClip(m_farClip);
	m_dev->setSensorPredictionEnabled(m_useSensorPrediction);
	m_dev->setSensorPredictionDelta(m_predictionDelta);

	if (m_useCustomScaleFactor) {
		m_dev->setCustomScaleFactor(m_customScaleFactor);
	}

	// Create screen with match the Oculus Rift resolution
	osg::GraphicsContext::WindowingSystemInterface* wsi = osg::GraphicsContext::getWindowingSystemInterface();

	if (!wsi) {
		osg::notify(osg::NOTICE)<<"Error, no WindowSystemInterface available, cannot create windows."<<std::endl;
		return;
	}

	unsigned int width, height;
	wsi->getScreenResolution(osg::GraphicsContext::ScreenIdentifier(0), width, height);
	osg::ref_ptr<osg::GraphicsContext::Traits> traits = new osg::GraphicsContext::Traits;
	traits->windowDecoration = false;
	traits->x = 0;
	traits->y = 0;
	traits->width = m_dev->hScreenResolution();
	traits->height = m_dev->vScreenResolution();
	traits->doubleBuffer = true;
	traits->sharedContext = 0;
	traits->sampleBuffers = true;
	traits->samples = 4;
	traits->vsync = true;
	osg::ref_ptr<osg::GraphicsContext> gc = osg::GraphicsContext::createGraphicsContext(traits);
	osg::ref_ptr<osg::Camera> camera = view.getCamera();
	camera->setGraphicsContext(gc);
	// Use full viewport
	camera->setViewport(new osg::Viewport(0, 0, traits->width, traits->height));
	// Disable automatic computation of near and far plane on main camera, will propagate to slave cameras
	camera->setComputeNearFarMode( osg::CullSettings::DO_NOT_COMPUTE_NEAR_FAR );
	const int textureWidth  = m_dev->scaleFactor() * m_dev->hScreenResolution()/2;
	const int textureHeight = m_dev->scaleFactor() * m_dev->vScreenResolution();
	// master projection matrix
	camera->setProjectionMatrix(m_dev->projectionCenterMatrix());
	osg::ref_ptr<osg::Texture2D> l_tex = new osg::Texture2D;
	l_tex->setTextureSize( textureWidth, textureHeight );
	l_tex->setInternalFormat( GL_RGBA );
	osg::ref_ptr<osg::Texture2D> r_tex = new osg::Texture2D;
	r_tex->setTextureSize( textureWidth, textureHeight );
	r_tex->setInternalFormat( GL_RGBA );
	osg::ref_ptr<osg::Camera> l_rtt = createRTTCamera(osg::Camera::COLOR_BUFFER, l_tex, gc);
	osg::ref_ptr<osg::Camera> r_rtt = createRTTCamera(osg::Camera::COLOR_BUFFER, r_tex, gc);
	// Create HUD cameras for each eye
	osg::ref_ptr<osg::Camera> l_hud = createHUDCamera(0.0, 1.0, 0.0, 1.0, gc);
	l_hud->setViewport(new osg::Viewport(0, 0, m_dev->hScreenResolution() / 2.0f, m_dev->vScreenResolution()));
	osg::ref_ptr<osg::Camera> r_hud = createHUDCamera(0.0, 1.0, 0.0, 1.0, gc);
	r_hud->setViewport(new osg::Viewport(m_dev->hScreenResolution() / 2.0f, 0,
										 m_dev->hScreenResolution() / 2.0f, m_dev->vScreenResolution()));
	// Create quads on each camera
	osg::ref_ptr<osg::Geode> leftQuad = createHUDQuad(1.0f, 1.0f);
	l_hud->addChild(leftQuad);
	osg::ref_ptr<osg::Geode> rightQuad = createHUDQuad(1.0f, 1.0f);
	r_hud->addChild(rightQuad);
	// Set up shaders from the Oculus SDK documentation
	osg::ref_ptr<osg::Program> program = new osg::Program;
	osg::ref_ptr<osg::Shader> vertexShader = new osg::Shader(osg::Shader::VERTEX);
	vertexShader->loadShaderSourceFromFile(osgDB::findDataFile("warp.vert"));
	osg::ref_ptr<osg::Shader> fragmentShader = new osg::Shader(osg::Shader::FRAGMENT);

	// Fragment shader with or without correction for chromatic aberration
	if (m_useChromaticAberrationCorrection) {
		fragmentShader->loadShaderSourceFromFile(osgDB::findDataFile("warpWithChromeAb.frag"));
	} else {
		fragmentShader->loadShaderSourceFromFile(osgDB::findDataFile("warpWithoutChromeAb.frag"));
	}

	program->addShader(vertexShader);
	program->addShader(fragmentShader);
	// Configure state sets for both eyes
	osg::StateSet* leftEyeStateSet = leftQuad->getOrCreateStateSet();
	leftEyeStateSet->setTextureAttributeAndModes(0, l_tex, osg::StateAttribute::ON);
	leftEyeStateSet->setAttributeAndModes( program, osg::StateAttribute::ON );
	leftEyeStateSet->addUniform( new osg::Uniform("WarpTexture", 0) );
	leftEyeStateSet->addUniform( new osg::Uniform("LensCenter", m_dev->lensCenter(OculusDevice::LEFT_EYE)));
	leftEyeStateSet->addUniform( new osg::Uniform("ScreenCenter", m_dev->screenCenter()));
	leftEyeStateSet->addUniform( new osg::Uniform("Scale", m_dev->scale()));
	leftEyeStateSet->addUniform( new osg::Uniform("ScaleIn", m_dev->scaleIn()));
	leftEyeStateSet->addUniform( new osg::Uniform("HmdWarpParam", m_dev->warpParameters()));
	leftEyeStateSet->addUniform( new osg::Uniform("ChromAbParam", m_dev->chromAbParameters()));
	osg::StateSet* rightEyeStateSet = rightQuad->getOrCreateStateSet();
	rightEyeStateSet->setTextureAttributeAndModes(0, r_tex, osg::StateAttribute::ON);
	rightEyeStateSet->setAttributeAndModes( program, osg::StateAttribute::ON );
	rightEyeStateSet->addUniform( new osg::Uniform("WarpTexture", 0) );
	rightEyeStateSet->addUniform( new osg::Uniform("LensCenter", m_dev->lensCenter(OculusDevice::RIGHT_EYE)));
	rightEyeStateSet->addUniform( new osg::Uniform("ScreenCenter", m_dev->screenCenter()));
	rightEyeStateSet->addUniform( new osg::Uniform("Scale", m_dev->scale()));
	rightEyeStateSet->addUniform( new osg::Uniform("ScaleIn", m_dev->scaleIn()));
	rightEyeStateSet->addUniform( new osg::Uniform("HmdWarpParam", m_dev->warpParameters()));
	rightEyeStateSet->addUniform( new osg::Uniform("ChromAbParam", m_dev->chromAbParameters()));
	// Add cameras as slaves, specifying offsets for the projection
	view.addSlave(l_rtt, m_dev->projectionOffsetMatrix(OculusDevice::LEFT_EYE), m_dev->viewMatrix(OculusDevice::LEFT_EYE), true);
	view.addSlave(r_rtt, m_dev->projectionOffsetMatrix(OculusDevice::RIGHT_EYE), m_dev->viewMatrix(OculusDevice::RIGHT_EYE), true);
	view.addSlave(l_hud, false);
	view.addSlave(r_hud, false);

	// Connect main camera to node callback that get HMD orientation
	if (m_useOrientations) {
		camera->setDataVariance(osg::Object::DYNAMIC);
		camera->setUpdateCallback(new OculusViewConfigOrientationCallback(l_rtt, r_rtt, m_dev));
	}
}
Ejemplo n.º 5
0
void AcrossAllScreens::configure(osgViewer::View& view) const
{
    osg::GraphicsContext::WindowingSystemInterface* wsi = osg::GraphicsContext::getWindowingSystemInterface();
    if (!wsi)
    {
        OSG_NOTICE<<"AcrossAllScreens::configure() : Error, no WindowSystemInterface available, cannot create windows."<<std::endl;
        return;
    }

    osg::DisplaySettings* ds = getActiveDisplaySetting(view);

    double fovy, aspectRatio, zNear, zFar;
    view.getCamera()->getProjectionMatrixAsPerspective(fovy, aspectRatio, zNear, zFar);

    osg::GraphicsContext::ScreenIdentifier si;
    si.readDISPLAY();

    // displayNum has not been set so reset it to 0.
    if (si.displayNum<0) si.displayNum = 0;

    unsigned int numScreens = wsi->getNumScreens(si);
    if (numScreens==1)
    {
        if (si.screenNum<0) si.screenNum = 0;

        unsigned int width, height;
        wsi->getScreenResolution(si, width, height);

        osg::ref_ptr<osg::GraphicsContext::Traits> traits = new osg::GraphicsContext::Traits(ds);
        traits->hostName = si.hostName;
        traits->displayNum = si.displayNum;
        traits->screenNum = si.screenNum;
        traits->x = 0;
        traits->y = 0;
        traits->width = width;
        traits->height = height;
        traits->windowDecoration = false;
        traits->doubleBuffer = true;
        traits->sharedContext = 0;

        osg::ref_ptr<osg::GraphicsContext> gc = osg::GraphicsContext::createGraphicsContext(traits.get());

        view.getCamera()->setGraphicsContext(gc.get());

        osgViewer::GraphicsWindow* gw = dynamic_cast<osgViewer::GraphicsWindow*>(gc.get());
        if (gw)
        {
            OSG_INFO<<"  GraphicsWindow has been created successfully."<<std::endl;
            gw->getEventQueue()->getCurrentEventState()->setWindowRectangle(0, 0, width, height );
        }
        else
        {
            OSG_NOTICE<<"  GraphicsWindow has not been created successfully."<<std::endl;
        }

        double newAspectRatio = double(traits->width) / double(traits->height);
        double aspectRatioChange = newAspectRatio / aspectRatio;
        if (aspectRatioChange != 1.0)
        {
            view.getCamera()->getProjectionMatrix() *= osg::Matrix::scale(1.0/aspectRatioChange,1.0,1.0);
        }

        view.getCamera()->setViewport(new osg::Viewport(0, 0, traits->width, traits->height));

        GLenum buffer = traits->doubleBuffer ? GL_BACK : GL_FRONT;

        view.getCamera()->setDrawBuffer(buffer);
        view.getCamera()->setReadBuffer(buffer);

    }
    else
    {

        double translate_x = 0.0;

        for(unsigned int i=0; i<numScreens; ++i)
        {
            si.screenNum = i;

            unsigned int width, height;
            wsi->getScreenResolution(si, width, height);
            translate_x += double(width) / (double(height) * aspectRatio);
        }

        bool stereoSplitScreens = numScreens==2 &&
                                 ds->getStereoMode()==osg::DisplaySettings::HORIZONTAL_SPLIT &&
                                 ds->getStereo();

        for(unsigned int i=0; i<numScreens; ++i)
        {
            si.screenNum = i;

            unsigned int width, height;
            wsi->getScreenResolution(si, width, height);

            osg::ref_ptr<osg::GraphicsContext::Traits> traits = new osg::GraphicsContext::Traits(ds);
            traits->hostName = si.hostName;
            traits->displayNum = si.displayNum;
            traits->screenNum = si.screenNum;
            traits->screenNum = i;
            traits->x = 0;
            traits->y = 0;
            traits->width = width;
            traits->height = height;
            traits->windowDecoration = false;
            traits->doubleBuffer = true;
            traits->sharedContext = 0;

            osg::ref_ptr<osg::GraphicsContext> gc = osg::GraphicsContext::createGraphicsContext(traits.get());

            osg::ref_ptr<osg::Camera> camera = new osg::Camera;
            camera->setGraphicsContext(gc.get());

            osgViewer::GraphicsWindow* gw = dynamic_cast<osgViewer::GraphicsWindow*>(gc.get());
            if (gw)
            {
                OSG_INFO<<"  GraphicsWindow has been created successfully."<<gw<<std::endl;

                gw->getEventQueue()->getCurrentEventState()->setWindowRectangle(traits->x, traits->y, traits->width, traits->height );
            }
            else
            {
                OSG_NOTICE<<"  GraphicsWindow has not been created successfully."<<std::endl;
            }

            camera->setViewport(new osg::Viewport(0, 0, traits->width, traits->height));

            GLenum buffer = traits->doubleBuffer ? GL_BACK : GL_FRONT;
            camera->setDrawBuffer(buffer);
            camera->setReadBuffer(buffer);

            if (stereoSplitScreens)
            {
                unsigned int leftCameraNum = (ds->getSplitStereoHorizontalEyeMapping()==osg::DisplaySettings::LEFT_EYE_LEFT_VIEWPORT) ? 0 : 1;

                osg::ref_ptr<osg::DisplaySettings> ds_local = new osg::DisplaySettings(*ds);
                ds_local->setStereoMode(leftCameraNum==i ? osg::DisplaySettings::LEFT_EYE : osg::DisplaySettings::RIGHT_EYE);
                camera->setDisplaySettings(ds_local.get());

                view.addSlave(camera.get(), osg::Matrixd(), osg::Matrixd() );
            }
            else
            {
                double newAspectRatio = double(traits->width) / double(traits->height);
                double aspectRatioChange = newAspectRatio / aspectRatio;

                view.addSlave(camera.get(), osg::Matrixd::translate( translate_x - aspectRatioChange, 0.0, 0.0) * osg::Matrix::scale(1.0/aspectRatioChange,1.0,1.0), osg::Matrixd() );
                translate_x -= aspectRatioChange * 2.0;
            }
        }
    }

    view.assignSceneDataToCameras();
}