Ejemplo n.º 1
0
OSGTextNode::OSGTextNode(QObject *parent) :
    osgQtQuick::OSGNode(parent),
    h(new Hidden)
{
    osg::ref_ptr<osgText::Font> textFont = createFont(QFont("Times"));

    h->text = createText(osg::Vec3(-100, 20, 0),
                         "The osgQtQuick :-)\n"
                         "И даже по русски!",
                         20.0f,
                         textFont.get());
    osg::ref_ptr<osg::Geode> textGeode = new osg::Geode();
    h->text->setColor(osg::Vec4(0.0f, 1.0f, 0.0f, 1.0f));
    textGeode->addDrawable(h->text.get());
#if 0
    h->text->setAutoRotateToScreen(true);
    setNode(textGeode.get());
#else
    osg::Camera *camera = createHUDCamera(-100, 100, -100, 100);
    camera->addChild(textGeode.get());
    camera->getOrCreateStateSet()->setMode(
        GL_LIGHTING, osg::StateAttribute::OFF);
    setNode(camera);
#endif
}
void OculusViewConfig::configure(osgViewer::View& view) const
{
	m_device->setNearClip(m_nearClip);
	m_device->setFarClip(m_farClip);
	m_device->setSensorPredictionEnabled(m_useSensorPrediction);
	m_device->setSensorPredictionDelta(m_predictionDelta);

	if (m_useCustomScaleFactor) {
		m_device->setCustomScaleFactor(m_customScaleFactor);
	}

	// Create screen with match the Oculus Rift resolution
	osg::GraphicsContext::WindowingSystemInterface* wsi = osg::GraphicsContext::getWindowingSystemInterface();

	if (!wsi) {
		osg::notify(osg::NOTICE)<<"Error, no WindowSystemInterface available, cannot create windows."<<std::endl;
		return;
	}

	// Get the screen identifiers set in environment variable DISPLAY
	osg::GraphicsContext::ScreenIdentifier si;
	si.readDISPLAY();
	
	// If displayNum has not been set, reset it to 0.
	if (si.displayNum < 0) si.displayNum = 0;

	// If screenNum has not been set, reset it to 0.
	if (si.screenNum < 0) si.screenNum = 0;

	//test by Shao
	si.displayNum = _displayNum;
	si.screenNum = _screenNum;
	unsigned int width, height;
	wsi->getScreenResolution(si, width, height);

	osg::ref_ptr<osg::GraphicsContext::Traits> traits = new osg::GraphicsContext::Traits;
	traits->hostName = si.hostName;
	traits->screenNum = si.screenNum;
	traits->displayNum = si.displayNum;
	traits->windowDecoration = false;
	traits->x = 0;
	traits->y = 0;
	traits->width = m_device->hScreenResolution();
	traits->height = m_device->vScreenResolution();
	traits->doubleBuffer = true;
	traits->sharedContext = 0;
	traits->vsync = true; // VSync should always be enabled for Oculus Rift applications

	// Create a graphic context based on our desired traits
	osg::ref_ptr<osg::GraphicsContext> gc = osg::GraphicsContext::createGraphicsContext(traits);
	if (!gc) {
		osg::notify(osg::NOTICE) << "Error, GraphicsWindow has not been created successfully" << std::endl;
		return;
	}

	_main_camera = view.getCamera();
	_main_camera->setName("Main");
	// Disable scene rendering for main camera
	_main_camera->setCullMask(~m_sceneNodeMask);
	_main_camera->setGraphicsContext(gc);
	// Use full view port
	_main_camera->setViewport(new osg::Viewport(0, 0, traits->width, traits->height));
	// Disable automatic computation of near and far plane on main camera, will propagate to slave cameras
	_main_camera->setComputeNearFarMode( osg::CullSettings::DO_NOT_COMPUTE_NEAR_FAR );
	const int textureWidth  = m_device->scaleFactor() * m_device->hScreenResolution()/2;
	const int textureHeight = m_device->scaleFactor() * m_device->vScreenResolution();
	// master projection matrix
	_main_camera->setProjectionMatrix(m_device->projectionCenterMatrix());
	// Create textures for RTT cameras
	osg::ref_ptr<osg::Texture2D> textureLeft = new osg::Texture2D;
	textureLeft->setTextureSize( textureWidth, textureHeight );
	textureLeft->setInternalFormat( GL_RGBA );
	osg::ref_ptr<osg::Texture2D> textureRight = new osg::Texture2D;
	textureRight->setTextureSize( textureWidth, textureHeight );
	textureRight->setInternalFormat( GL_RGBA );
	// Create RTT (Rendering to Texture) cameras and attach textures 
	osg::ref_ptr<osg::Camera> cameraRTTLeft = createRTTCamera(textureLeft, gc);
	osg::ref_ptr<osg::Camera> cameraRTTRight = createRTTCamera(textureRight, gc);
	cameraRTTLeft->setName("LeftRTT");
	cameraRTTRight->setName("RightRTT");
	cameraRTTLeft->setCullMask(m_sceneNodeMask);
	cameraRTTRight->setCullMask(m_sceneNodeMask);
	// Create HUD cameras for left eye
	osg::ref_ptr<osg::Camera> cameraHUDLeft = createHUDCamera(0.0, 1.0, 0.0, 1.0, gc);
	cameraHUDLeft->setName("LeftHUD");
	cameraHUDLeft->setViewport(new osg::Viewport(0, 0, 
		m_device->hScreenResolution() / 2.0f, m_device->vScreenResolution()));
	// Create HUD cameras for right eye
	osg::ref_ptr<osg::Camera> cameraHUDRight = createHUDCamera(0.0, 1.0, 0.0, 1.0, gc);
	cameraHUDRight->setName("RightHUD");
	cameraHUDRight->setViewport(new osg::Viewport(m_device->hScreenResolution() / 2.0f, 0,
										 m_device->hScreenResolution() / 2.0f, m_device->vScreenResolution()));
	// Create quads for each camera
	osg::ref_ptr<osg::Geode> leftQuad = createHUDQuad(1.0f, 1.0f);
	cameraHUDLeft->addChild(leftQuad);
	osg::ref_ptr<osg::Geode> rightQuad = createHUDQuad(1.0f, 1.0f);
	cameraHUDRight->addChild(rightQuad);

	// Set up shaders from the Oculus SDK documentation
	osg::ref_ptr<osg::Program> program = new osg::Program;
	osg::ref_ptr<osg::Shader> vertexShader = new osg::Shader(osg::Shader::VERTEX);
	vertexShader->loadShaderSourceFromFile(osgDB::findDataFile("warp.vert"));
	osg::ref_ptr<osg::Shader> fragmentShader = new osg::Shader(osg::Shader::FRAGMENT);

	// Fragment shader with or without correction for chromatic aberration
	if (m_useChromaticAberrationCorrection) {
		fragmentShader->loadShaderSourceFromFile(osgDB::findDataFile("warpWithChromeAb.frag"));
	} else {
		fragmentShader->loadShaderSourceFromFile(osgDB::findDataFile("warpWithoutChromeAb.frag"));
	}

	program->addShader(vertexShader);
	program->addShader(fragmentShader);
	
	// Attach shaders to each HUD
	osg::StateSet* leftEyeStateSet = leftQuad->getOrCreateStateSet();
	osg::StateSet* rightEyeStateSet = rightQuad->getOrCreateStateSet();
	applyShaderParameters(leftEyeStateSet, program.get(), textureLeft.get(), OculusDevice::LEFT_EYE);
	applyShaderParameters(rightEyeStateSet, program.get(), textureRight.get(), OculusDevice::RIGHT_EYE);
	
	// Add RTT cameras as slaves, specifying offsets for the projection
	view.addSlave(cameraRTTLeft, 
		m_device->projectionOffsetMatrix(OculusDevice::LEFT_EYE), 
		m_device->viewMatrix(OculusDevice::LEFT_EYE), 
		true);
	view.addSlave(cameraRTTRight, 
		m_device->projectionOffsetMatrix(OculusDevice::RIGHT_EYE), 
		m_device->viewMatrix(OculusDevice::RIGHT_EYE), 
		true);

	// Add HUD cameras as slaves
	view.addSlave(cameraHUDLeft, false);
	view.addSlave(cameraHUDRight, false);

	view.setName("Oculus");
	// Connect main camera to node callback that get HMD orientation
	if (m_useOrientations) {
		_main_camera->setDataVariance(osg::Object::DYNAMIC);
		_callback = new OculusViewConfigOrientationCallback(cameraRTTLeft, cameraRTTRight, m_device);
		_main_camera->setUpdateCallback(_callback);
	}
}
Ejemplo n.º 3
0
int main(void){
	osg::DisplaySettings::instance()->setNumMultiSamples( 4 );
	viewer.setUpViewInWindow( 100, 50, 800, 600 );
	viewer.getCamera()->setClearColor( osg::Vec4( 0.5,0.5,0.5,1) );
	viewer.addEventHandler(new osgViewer::StatsHandler);
	
	osg::Group* scene = new osg::Group;
	
	// Création d'une boîte centrée à l'origine, de dimensions 2x3x4:
	osg::Box* boite = new osg::Box(osg::Vec3(-10, 0, 0), 2,3,4);
	osg::ShapeDrawable* boiteDrawable = new osg::ShapeDrawable(boite);
	osg::Geode* geodeBoite = new osg::Geode();
	geodeBoite->addDrawable(boiteDrawable);
	
	osg::Sphere* sphere = new osg::Sphere( osg::Vec3(10,0,0), 1.0);
	osg::ShapeDrawable* sphereDrawable = new osg::ShapeDrawable(sphere);
	osg::Geode* geodeSphere = new osg::Geode();
	geodeSphere->addDrawable(sphereDrawable);
	
	osg::Capsule* capsule = new osg::Capsule(osg::Vec3(0, 0, 0), 1.0, 3.0);
	osg::ShapeDrawable* capsuleDrawable = new osg::ShapeDrawable(capsule);
	osg::Geode* geodeCapsule = new osg::Geode();
	geodeCapsule->addDrawable(capsuleDrawable);
	
	osg::Cone* cone = new osg::Cone(osg::Vec3(0, 10, 0), 1, 2);
	osg::ShapeDrawable* coneDrawable = new osg::ShapeDrawable(cone);
	osg::Geode* geodeCone= new osg::Geode();
	geodeCone->addDrawable(coneDrawable);
	
	
	osg::Material* matBoite = new osg::Material;
	matBoite->setAmbient (osg::Material::FRONT_AND_BACK, osg::Vec4(0.5, 0.0, 0.0, 1.0));
	matBoite->setDiffuse (osg::Material::FRONT_AND_BACK, osg::Vec4(0.9, 0.0, 0.0, 1.0));
	matBoite->setSpecular(osg::Material::FRONT_AND_BACK, osg::Vec4(0.2, 0.2, 0.2, 1.0));
	matBoite->setShininess(osg::Material::FRONT_AND_BACK, 64);
	
	osg::Material* matCone = new osg::Material;
	matCone->setAmbient (osg::Material::FRONT_AND_BACK, osg::Vec4(0.5, 0.0, 0.5, 1.0));
	matCone->setDiffuse (osg::Material::FRONT_AND_BACK, osg::Vec4(0.9, 0.0, 0.9, 1.0));
	matCone->setSpecular(osg::Material::FRONT_AND_BACK, osg::Vec4(0.2, 0.2, 0.2, 1.0));
	matCone->setShininess(osg::Material::FRONT_AND_BACK, 64);
	
	osg::Node* aregne = osgDB::readNodeFile("cow_high.3ds"); 
	
	transformAregne->setPosition(osg::Vec3(5, 0, 0));
	transformAregne->setScale(osg::Vec3(0.2, 0.2, 0.2));
	transformAregne->getOrCreateStateSet()->setMode(GL_NORMALIZE,osg::StateAttribute::ON); 
	transformAregne->addChild(aregne);
	
	boiteDrawable->getOrCreateStateSet()->setAttributeAndModes(matBoite);
	coneDrawable->getOrCreateStateSet()->setAttributeAndModes(matCone);
	
	
	/*scene->addChild(geodeCapsule);
	scene->addChild(geodeCone);
	scene->addChild(geodeBoite);
	scene->addChild(geodeSphere);
	scene->addChild(transformAregne);*/
	scene->addChild(aregne);
	
	// Création d'une texture
osg::ref_ptr<osg::Texture2D> tex2D = new osg::Texture2D;
tex2D->setTextureSize(1024, 1024);
tex2D->setInternalFormat(GL_RGBA);
// Création d'une caméra qui effectuera son rendu dans la texture
osg::ref_ptr<osg::Camera> rttCamera =
 createRTTCamera(osg::Camera::COLOR_BUFFER, tex2D.get());
// On indique la partie du graphe que la caméra devra rendre, ici toute la scène :
rttCamera->addChild(scene);
// Création d'une caméra permettant d'afficher un HUD qui couvrira tout l'écran
osg::ref_ptr<osg::Camera> hudCamera = createHUDCamera();
osg::Geode* screenQuad = createScreenQuad();
hudCamera->addChild(screenQuad);
osg::StateSet* stateset = screenQuad->getOrCreateStateSet();
stateset->setTextureAttributeAndModes(0, tex2D.get());
// VOUS METTREZ ICI LE CODE DE LA QUESTION 7
// Création d'une nouvelle racine du graphe, à laquelle on rattache la caméra
// de rendu dans une texture, la caméra du HUD et la racine du graphe de la scène
osg::ref_ptr<osg::Group> root = new osg::Group;
root->addChild(rttCamera.get());
root->addChild(hudCamera.get());
root->addChild(scene);
// Indique au viewer la scène à affich
	
	trackCone->setTrackNode(geodeCone);
	trackCone->setTrackerMode(osgGA::NodeTrackerManipulator::NODE_CENTER);
	
	
	trackBoite->setTrackNode(geodeBoite);
	trackBoite->setTrackerMode(osgGA::NodeTrackerManipulator::NODE_CENTER);
	
	trackSphere->setTrackNode(geodeSphere);
	trackSphere->setTrackerMode(osgGA::NodeTrackerManipulator::NODE_CENTER);
	
	transformAregne->setUpdateCallback(new Deplacement);
	
	viewer.setSceneData(scene);
	
	osg::ref_ptr<GestionEvenements> gestionnaire = new GestionEvenements();
	viewer.addEventHandler(gestionnaire.get());

	return viewer.run();
}
Ejemplo n.º 4
0
void OculusViewConfig::configure(osgViewer::View& view) const
{
	m_dev->setNearClip(m_nearClip);
	m_dev->setFarClip(m_farClip);
	m_dev->setSensorPredictionEnabled(m_useSensorPrediction);
	m_dev->setSensorPredictionDelta(m_predictionDelta);

	if (m_useCustomScaleFactor) {
		m_dev->setCustomScaleFactor(m_customScaleFactor);
	}

	// Create screen with match the Oculus Rift resolution
	osg::GraphicsContext::WindowingSystemInterface* wsi = osg::GraphicsContext::getWindowingSystemInterface();

	if (!wsi) {
		osg::notify(osg::NOTICE)<<"Error, no WindowSystemInterface available, cannot create windows."<<std::endl;
		return;
	}

	unsigned int width, height;
	wsi->getScreenResolution(osg::GraphicsContext::ScreenIdentifier(0), width, height);
	osg::ref_ptr<osg::GraphicsContext::Traits> traits = new osg::GraphicsContext::Traits;
	traits->windowDecoration = false;
	traits->x = 0;
	traits->y = 0;
	traits->width = m_dev->hScreenResolution();
	traits->height = m_dev->vScreenResolution();
	traits->doubleBuffer = true;
	traits->sharedContext = 0;
	traits->sampleBuffers = true;
	traits->samples = 4;
	traits->vsync = true;
	osg::ref_ptr<osg::GraphicsContext> gc = osg::GraphicsContext::createGraphicsContext(traits);
	osg::ref_ptr<osg::Camera> camera = view.getCamera();
	camera->setGraphicsContext(gc);
	// Use full viewport
	camera->setViewport(new osg::Viewport(0, 0, traits->width, traits->height));
	// Disable automatic computation of near and far plane on main camera, will propagate to slave cameras
	camera->setComputeNearFarMode( osg::CullSettings::DO_NOT_COMPUTE_NEAR_FAR );
	const int textureWidth  = m_dev->scaleFactor() * m_dev->hScreenResolution()/2;
	const int textureHeight = m_dev->scaleFactor() * m_dev->vScreenResolution();
	// master projection matrix
	camera->setProjectionMatrix(m_dev->projectionCenterMatrix());
	osg::ref_ptr<osg::Texture2D> l_tex = new osg::Texture2D;
	l_tex->setTextureSize( textureWidth, textureHeight );
	l_tex->setInternalFormat( GL_RGBA );
	osg::ref_ptr<osg::Texture2D> r_tex = new osg::Texture2D;
	r_tex->setTextureSize( textureWidth, textureHeight );
	r_tex->setInternalFormat( GL_RGBA );
	osg::ref_ptr<osg::Camera> l_rtt = createRTTCamera(osg::Camera::COLOR_BUFFER, l_tex, gc);
	osg::ref_ptr<osg::Camera> r_rtt = createRTTCamera(osg::Camera::COLOR_BUFFER, r_tex, gc);
	// Create HUD cameras for each eye
	osg::ref_ptr<osg::Camera> l_hud = createHUDCamera(0.0, 1.0, 0.0, 1.0, gc);
	l_hud->setViewport(new osg::Viewport(0, 0, m_dev->hScreenResolution() / 2.0f, m_dev->vScreenResolution()));
	osg::ref_ptr<osg::Camera> r_hud = createHUDCamera(0.0, 1.0, 0.0, 1.0, gc);
	r_hud->setViewport(new osg::Viewport(m_dev->hScreenResolution() / 2.0f, 0,
										 m_dev->hScreenResolution() / 2.0f, m_dev->vScreenResolution()));
	// Create quads on each camera
	osg::ref_ptr<osg::Geode> leftQuad = createHUDQuad(1.0f, 1.0f);
	l_hud->addChild(leftQuad);
	osg::ref_ptr<osg::Geode> rightQuad = createHUDQuad(1.0f, 1.0f);
	r_hud->addChild(rightQuad);
	// Set up shaders from the Oculus SDK documentation
	osg::ref_ptr<osg::Program> program = new osg::Program;
	osg::ref_ptr<osg::Shader> vertexShader = new osg::Shader(osg::Shader::VERTEX);
	vertexShader->loadShaderSourceFromFile(osgDB::findDataFile("warp.vert"));
	osg::ref_ptr<osg::Shader> fragmentShader = new osg::Shader(osg::Shader::FRAGMENT);

	// Fragment shader with or without correction for chromatic aberration
	if (m_useChromaticAberrationCorrection) {
		fragmentShader->loadShaderSourceFromFile(osgDB::findDataFile("warpWithChromeAb.frag"));
	} else {
		fragmentShader->loadShaderSourceFromFile(osgDB::findDataFile("warpWithoutChromeAb.frag"));
	}

	program->addShader(vertexShader);
	program->addShader(fragmentShader);
	// Configure state sets for both eyes
	osg::StateSet* leftEyeStateSet = leftQuad->getOrCreateStateSet();
	leftEyeStateSet->setTextureAttributeAndModes(0, l_tex, osg::StateAttribute::ON);
	leftEyeStateSet->setAttributeAndModes( program, osg::StateAttribute::ON );
	leftEyeStateSet->addUniform( new osg::Uniform("WarpTexture", 0) );
	leftEyeStateSet->addUniform( new osg::Uniform("LensCenter", m_dev->lensCenter(OculusDevice::LEFT_EYE)));
	leftEyeStateSet->addUniform( new osg::Uniform("ScreenCenter", m_dev->screenCenter()));
	leftEyeStateSet->addUniform( new osg::Uniform("Scale", m_dev->scale()));
	leftEyeStateSet->addUniform( new osg::Uniform("ScaleIn", m_dev->scaleIn()));
	leftEyeStateSet->addUniform( new osg::Uniform("HmdWarpParam", m_dev->warpParameters()));
	leftEyeStateSet->addUniform( new osg::Uniform("ChromAbParam", m_dev->chromAbParameters()));
	osg::StateSet* rightEyeStateSet = rightQuad->getOrCreateStateSet();
	rightEyeStateSet->setTextureAttributeAndModes(0, r_tex, osg::StateAttribute::ON);
	rightEyeStateSet->setAttributeAndModes( program, osg::StateAttribute::ON );
	rightEyeStateSet->addUniform( new osg::Uniform("WarpTexture", 0) );
	rightEyeStateSet->addUniform( new osg::Uniform("LensCenter", m_dev->lensCenter(OculusDevice::RIGHT_EYE)));
	rightEyeStateSet->addUniform( new osg::Uniform("ScreenCenter", m_dev->screenCenter()));
	rightEyeStateSet->addUniform( new osg::Uniform("Scale", m_dev->scale()));
	rightEyeStateSet->addUniform( new osg::Uniform("ScaleIn", m_dev->scaleIn()));
	rightEyeStateSet->addUniform( new osg::Uniform("HmdWarpParam", m_dev->warpParameters()));
	rightEyeStateSet->addUniform( new osg::Uniform("ChromAbParam", m_dev->chromAbParameters()));
	// Add cameras as slaves, specifying offsets for the projection
	view.addSlave(l_rtt, m_dev->projectionOffsetMatrix(OculusDevice::LEFT_EYE), m_dev->viewMatrix(OculusDevice::LEFT_EYE), true);
	view.addSlave(r_rtt, m_dev->projectionOffsetMatrix(OculusDevice::RIGHT_EYE), m_dev->viewMatrix(OculusDevice::RIGHT_EYE), true);
	view.addSlave(l_hud, false);
	view.addSlave(r_hud, false);

	// Connect main camera to node callback that get HMD orientation
	if (m_useOrientations) {
		camera->setDataVariance(osg::Object::DYNAMIC);
		camera->setUpdateCallback(new OculusViewConfigOrientationCallback(l_rtt, r_rtt, m_dev));
	}
}
Ejemplo n.º 5
0
int main( int argc, char **argv )
{
    osg::ArgumentParser arguments(&argc, argv);
    arguments.getApplicationUsage()->setDescription(arguments.getApplicationName() + " is the example which demonstrates how to enable/disable blending on specified draw buffers in multi-rendering-target cases.");

    std::vector<osg::Texture*> textures;
    bool useGlobalBlending = false;
    if ( arguments.read("--no-draw-buffers") ) useGlobalBlending = true;


    osg::ref_ptr<osg::Node> cessna = osgDB::readRefNodeFile("cessna.osgt");
    if (!cessna)
    {
        OSG_NOTICE<<"Cannot not find model 'cessna.osg' to render"<<std::endl;
        return 1;
    }

    // Create a camera to output multi-rendering-targets (MRT)
    osg::ref_ptr<osg::Camera> mrtCam = createMRTCamera( textures );
    mrtCam->addChild( cessna );

    // Create shader program to be used
    const char* mrtFragmentCode = {
        "void main() {\n"
        "   gl_FragData[0] = gl_Color * vec4(1.0, 1.0, 1.0, 0.7);\n"
        "   gl_FragData[1] = vec4(0.0, 1.0, 1.0, 0.0);\n"
        "   gl_FragData[2] = vec4(1.0, 0.0, 1.0, 0.3);\n"
        "   gl_FragData[3] = vec4(1.0, 1.0, 0.0, 1.0);\n"
        "}\n"
    };
    osg::ref_ptr<osg::Program> program = new osg::Program;
    program->addShader( new osg::Shader(osg::Shader::FRAGMENT, mrtFragmentCode) );

    osg::ref_ptr<osg::StateSet> ss = mrtCam->getOrCreateStateSet();
    ss->setAttributeAndModes( program );

    // Apply blending to the original scene in MRT
    if ( !useGlobalBlending )
    {
        // Only enable blending on the first draw buffer so other three outputs are
        // totally opaque, which is important for MRT cases
        ss->setAttribute( new osg::Enablei(GL_BLEND, 0) );
        ss->setAttribute( new osg::Disablei(GL_BLEND, 1) );
        ss->setAttribute( new osg::Disablei(GL_BLEND, 2) );
        ss->setAttribute( new osg::Disablei(GL_BLEND, 3) );

        // Accept different blend/colormask attributes on multiple render targets
        osg::ref_ptr<osg::BlendFunci> blend0 = new osg::BlendFunci(0, GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
        osg::ref_ptr<osg::ColorMaski> colormask3 = new osg::ColorMaski(3, false, true, false, true);
        ss->setAttribute( blend0 );
        ss->setAttributeAndModes( colormask3 );
    }
    else
    {
        // When separated blending is disabled, all rendering targets will be affected
        // by its alpha channel and you will see each output blended with the background.
        //
        // This causes a big program in situations like deferred shading because we may
        // have to save different scene data to MRT 'GBuffer', in which alpha channels are
        // used to store certain attributes rather than opacity. These attributes can be
        // reused in following post-processing steps.
        //
        // For such targets, alpha blending must be disabled; otherwise it will mess the
        // output. That is why this example exists!
        osg::ref_ptr<osg::BlendFunc> blend = new osg::BlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
        ss->setAttributeAndModes( blend.get() );
    }

    // Create some quads to be shown on screen to contain the MRT result
    osg::ref_ptr<osg::Geode> quad = new osg::Geode;
    for ( unsigned int i=0; i<textures.size(); ++i )
    {
        osg::Geometry* geom = osg::createTexturedQuadGeometry(
            osg::Vec3((float)i/(float)textures.size(), 0.0f, 0.0f),
            osg::Vec3(1.0f/(float)textures.size()-0.01f,0.0f,0.0f), osg::Vec3(0.0f,1.0f,0.0f) );
        geom->getOrCreateStateSet()->setTextureAttributeAndModes( 0, textures[i] );
        quad->addDrawable( geom );
    }

    osg::Camera* hudCam = createHUDCamera( 0.0, 1.0, 0.0, 1.0 );
    hudCam->addChild( quad.get() );

    // Construct scene graph and viewer
    osg::ref_ptr<osg::Group> root = new osg::Group;
    root->addChild( mrtCam );
    root->addChild( hudCam );

    osgViewer::Viewer viewer;
    viewer.setSceneData( root.get() );
    return viewer.run();
}
int main( int argc, char** argv )
{
    osg::ArgumentParser arguments( &argc, argv );
    osg::ref_ptr<osg::MatrixTransform> scene = new osg::MatrixTransform;
    scene->addChild( osgDB::readNodeFile("sponza.ive") );
    
    // Create MRT camera and setup draw callback
    int w = 1600, h = 900;
    std::vector<osg::Texture2D*> textures;
    
    osg::Camera* mrtCam = createMRTCamera( textures, w, h );
    mrtCam->addChild( scene.get() );
    
    osg::ref_ptr<HBAODrawCallback> vxgi = new HBAODrawCallback;
    vxgi->setInputNormalTexture( textures[0] );
    vxgi->setInputDepthTexture( textures[1] );
    mrtCam->setPostDrawCallback( vxgi.get() );
    
    osg::ref_ptr<osg::Texture2D> outputTex = new osg::Texture2D;
    outputTex->setTextureSize( w, h );
    outputTex->setSourceType( GL_UNSIGNED_BYTE );
    outputTex->setSourceFormat( GL_RGBA );
    outputTex->setInternalFormat( GL_RGBA );
    outputTex->setWrap( osg::Texture::WRAP_S, osg::Texture::CLAMP_TO_EDGE );
    outputTex->setWrap( osg::Texture::WRAP_T, osg::Texture::CLAMP_TO_EDGE );
    outputTex->setFilter( osg::Texture2D::MIN_FILTER, osg::Texture2D::NEAREST );
    outputTex->setFilter( osg::Texture2D::MAG_FILTER, osg::Texture2D::NEAREST );
    vxgi->setOutputTexture( outputTex.get() );
    
    // Create shader program to be used
    const char* mrtVertexCode = {
        "varying vec4 vecInEye;\n"
        "void main() {\n"
        "   vecInEye = gl_ModelViewMatrix * gl_Vertex;\n"
        "   gl_Position = ftransform();\n"
        "}\n"
    };
    
    const char* mrtFragmentCode = {
        "uniform mat4 osg_ViewMatrixInverse;\n"
        "varying vec4 vecInEye;\n"
        "void main() {\n"
        "   vec3 normal = normalize(cross(dFdx(vecInEye.xyz), dFdy(vecInEye.xyz)));\n"
        "   normal = mat3(osg_ViewMatrixInverse) * vec3(normal.x, -normal.y, -normal.z);\n"
        "   gl_FragColor = vec4(normal * 0.5 + vec3(0.5), 1.0);\n"
        "}\n"
    };
    osg::ref_ptr<osg::Program> program = new osg::Program;
    program->addShader( new osg::Shader(osg::Shader::VERTEX, mrtVertexCode) );
    program->addShader( new osg::Shader(osg::Shader::FRAGMENT, mrtFragmentCode) );

    osg::StateSet* ss = mrtCam->getOrCreateStateSet();
    ss->setAttributeAndModes( program.get() );
    
    // Create screen quad to contain the MRT result
    osg::ref_ptr<osg::Geode> quad = new osg::Geode;
    quad->addDrawable( osg::createTexturedQuadGeometry(
        osg::Vec3(), osg::Vec3(0.5f, 0.0f, 0.0f), osg::Vec3(0.0f, 1.0f, 0.0f),
        0.0f, 0.0f, 0.5f, 1.0f) );
    
    osg::Camera* hudCam = createHUDCamera( 0.0, 1.0, 0.0, 1.0 );
    hudCam->getOrCreateStateSet()->setTextureAttributeAndModes( 0, outputTex.get() );
    hudCam->addChild( quad.get() );
    
    // Construct scene graph and viewer
    osg::ref_ptr<osg::Group> root = new osg::Group;
    root->addChild( mrtCam );
    root->addChild( hudCam );
    root->addChild( scene.get() );
    
    osgViewer::Viewer viewer;
    viewer.setSceneData( root.get() );
    //viewer.getCamera()->setProjectionMatrixAsPerspective( 30.0f, 16.0f/9.0f, 0.1f, 1000.0f );
    //viewer.getCamera()->setComputeNearFarMode( osg::Camera::DO_NOT_COMPUTE_NEAR_FAR );
    viewer.addEventHandler( new osgGA::StateSetManipulator(viewer.getCamera()->getOrCreateStateSet()) );
    viewer.addEventHandler( new osgViewer::StatsHandler );
    viewer.addEventHandler( new osgViewer::WindowSizeHandler );
    viewer.setCameraManipulator( new osgGA::TrackballManipulator );
    viewer.setUpViewOnSingleScreen( 0 );
    
    viewer.run();
    return 0;
}