Exemplo n.º 1
0
QString QPipelineClearTargetNode::description() const
{
	QString targets;
	if( clearDepth() )
		targets += "Depth";
	if( clearColBuf(0) )
	{
		if( targets.length() ) targets += " | ";
		targets += "ColBuf0";
	}

	if( clearColBuf(1) )
	{
		if( targets.length() ) targets += " | ";
		targets += "ColBuf1";
	}
	if( clearColBuf(2) )
	{
		if( targets.length() ) targets += " | ";
		targets += "ColBuf2";
	}
	if( clearColBuf(3) )
	{
		if( targets.length() ) targets += " | ";
		targets += "ColBuf3";
	}
	if( targets.isEmpty() )
		targets += tr("None");
	QQuatF color = clearColor();
	targets += QString(" | R: %1 G: %2 B: %3 A: %4").arg(color.x).arg(color.y).arg(color.z).arg(color.w);
	return targets;
}
	void capture(Graphics& gl, const Lens& cam, const Pose& pose, Drawable& draw) {
		validate();
		
		glPushAttrib(GL_ALL_ATTRIB_BITS);
		
		Vec3d pos = pose.pos();
		Vec3d ux, uy, uz; 
		pose.unitVectors(ux, uy, uz);
		mProjection = Matrix4d::perspective(90, 1, cam.near(), cam.far());
		
		glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, mFboId);
		for (int face=0; face<6; face++) {
			glDrawBuffer(GL_COLOR_ATTACHMENT0_EXT+face);
			
			gl.viewport(0, 0, resolution(), resolution());
			gl.clearColor(clearColor());
			gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
			
			switch (face) {
				case 0:
					// GL_TEXTURE_CUBE_MAP_POSITIVE_X   
					mModelView = Matrix4d::lookAt(uz, uy, -ux, pos);
					break;
				case 1:
					// GL_TEXTURE_CUBE_MAP_NEGATIVE_X   
					mModelView = Matrix4d::lookAt(-uz, uy, ux, pos);
					break;
				case 2:
					// GL_TEXTURE_CUBE_MAP_POSITIVE_Y   
					mModelView = Matrix4d::lookAt(ux, -uz, uy, pos);
					break;
				case 3:
					// GL_TEXTURE_CUBE_MAP_NEGATIVE_Y   
					mModelView = Matrix4d::lookAt(ux, uz, -uy, pos);
					break;
				case 4:
					// GL_TEXTURE_CUBE_MAP_POSITIVE_Z   
					mModelView = Matrix4d::lookAt(ux, uy, uz, pos);
					break;
				default:
					// GL_TEXTURE_CUBE_MAP_NEGATIVE_Z   
					mModelView = Matrix4d::lookAt(-ux, uy, -uz, pos);
					break;
			}
			
			// apply camera transform:
			gl.pushMatrix(gl.PROJECTION);
			gl.loadMatrix(mProjection);
			gl.pushMatrix(gl.MODELVIEW);
			gl.loadMatrix(mModelView);
			
			draw.onDraw(gl);
			
			gl.popMatrix(gl.PROJECTION);
			gl.popMatrix(gl.MODELVIEW);
		}
		glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, 0);
		
		glPopAttrib();
	}
Exemplo n.º 3
0
void DialogSellItem::on_barcodeLineEdit_returnPressed()
{
    WarehouseItemData data = m_common->sqlHelper()->warehouseItemDataFromBarcode(ui->barcodeLineEdit->text());

    if (data.id > 0) {
        QSqlQuery query;
        query.prepare("UPDATE warehouse_items SET amount = amount - :amount WHERE id = :id");
        query.bindValue(":id", data.id);
        query.bindValue(":amount", data.amount);
        if (query.exec()) {
            if (data.amount == 1)
                ui->labelMessage->setText(tr("%1 added").arg(data.name));
            else
                ui->labelMessage->setText(tr("%1 pcs of %2 added").arg(data.amount).arg(data.name));
            ui->labelMessage->setStyleSheet("background-color: #15cd10;");
            m_common->sqlHelper()->saveAmountChange(data.id, -data.amount, data.bruttoSellPrice);
        } else {
            qWarning() << query.lastError().text();
        }

    } else {
        ui->labelMessage->setStyleSheet("background-color: red;");
        ui->labelMessage->setText(tr("The %1 barcode does not belongs to any items!").arg(ui->barcodeLineEdit->text()));
    }
    QTimer::singleShot(2000, this, SLOT(clearColor()));
    ui->barcodeLineEdit->clear();
    ui->barcodeLineEdit->setFocus();
}
Exemplo n.º 4
0
void timer(int v)
{
	planeTimer(v);
	clearColor();
	sun += dSun;
	glutPostRedisplay(); // trigger display function by sending redraw into message queue
	glutTimerFunc(1000/FPS,timer,v); // restart timer again
}
Exemplo n.º 5
0
void Wall::drawCube(COLOR cubeCell){
    COLOR cubeColor = wallColors[cubeCell.X][cubeCell.Y][cubeCell.Z];
    pushMatrix();
    clearColor(cubeColor.R, cubeColor.G, cubeColor.B);
    translate(cubeCell.X * deltaVector.X + startCorner.X,
              cubeCell.Y * deltaVector.Y + startCorner.Y,
              cubeCell.Z * deltaVector.Z + startCorner.Z);
    scale(deltaVector.X, deltaVector.Y, deltaVector.Z);
    cube(1);
    popMatrix();
}
Exemplo n.º 6
0
LedControl::LedControl() {
  if ( MatrixCount < 64) {
    this->matrixIndex = MatrixCount++;                    // assign a key index to this instance
    this->Devices_addr = 64 - MatrixCount;
  }
  else {
    this->matrixIndex = 255 ;  // too many keys
  }

  this->Fast_mode = false;
  this->Font_mode = true;
  clearColor();
}
Exemplo n.º 7
0
LedControl::LedControl(int _addr) {
  _addr--;
  if ( MatrixCount < 64)
  {
    this->matrixIndex = MatrixCount++;                    // assign a key index to this instance
    if (_addr < 64) {
      matrixs[this->matrixIndex].Addr.nbr = _addr;
    }
  }
  else
    this->matrixIndex = 255 ;  // too many keys
	
  this->Fast_mode=false;
  clearColor();
}
Exemplo n.º 8
0
void GLWidget::initializeGL()
{
    Color clearColor(0,0,0,0);

    OpenGL::clearColor(clearColor);

    glEnable(GL_TEXTURE_2D);
    glEnable(GL_COLOR_MATERIAL);

    // Depth buffer setup
    glClearDepth(1.0f);
    glDepthFunc(GL_LEQUAL);
    glEnable(GL_DEPTH_TEST);

    glEnable(GL_BLEND);
    glBlendFunc(GL_SRC_ALPHA,GL_ONE_MINUS_SRC_ALPHA);

    glEnable(GL_ALPHA_TEST);

    glEnable(GL_CULL_FACE);

    glHint(GL_PERSPECTIVE_CORRECTION_HINT, GL_NICEST);

    // Initialize lighting
    glEnable(GL_LIGHTING);
    glShadeModel(GL_SMOOTH);

    glEnable(GL_LIGHT0);

    OpenGL::lightColor(GL_LIGHT0,GL_AMBIENT,Color(0.2,0.2,0.2));
    OpenGL::lightColor(GL_LIGHT0,GL_DIFFUSE,Color(1,1,1));
    OpenGL::lightColor(GL_LIGHT0,GL_SPECULAR,Color(1,1,1));

    setRecording(false);

    glPointSize(10);
    glLineWidth(3);

    _planarChain = new PlanarChain();
    _humanHand = new HumanHand();
    _walkingBug = new WalkingBug();

    _character = _planarChain;
}
Matrix::Matrix(uint8_t (*_addr)[8]) {
//	uint8_t (*p)[10]=_addr;
  uint8_t _x = 0, _y = 0;
  for(uint8_t a=0;a<8;a++)	//判断第一层
  {
     if(_addr[0][a] == 0){

      break;	//NULL,结束当前层判断
    }
    else{
      _x = a+1;
      for(uint8_t b=0;b<8;b++) { //判断第二层
        if(_addr[b][a] == 0){
          break;	//NULL,结束当前层判断
        }
        else {
          _y = b+1;
        }
      }
    } 
  }
  
  this->_numX = _x;
  this->_numY = _y;

  this->_matrixNum = this->_numX * this->_numY;
  led = new LedControl[this->_matrixNum];

  this->cursor_y = 0;
  this->cursor_x = 0;

  uint8_t _p[64];  
  for (int a = 0; a < this->_numY; a++) {
    for (int b = 0; b < this->_numX ; b++) {
      uint8_t _s = b + a * this->_numX ;
      _p[_s]=_addr[a][b];
    }
  }
  setDeviceAddr(_p);

  clearFastMode();
  clearColor();
  setFontMode(true);
}
Exemplo n.º 10
0
void RenderingPane::Render()
{
    if (_graphics)
    {
        GDK::Vector4 clearColor(0.5f, 0.5f, 0.5f, 1.0f);
        _graphics->Clear(clearColor);
        _graphics->ClearDepth(1.0f);
        if (_camera)
        {
            CRect clientRect;
            _renderWindow.GetClientRect(clientRect);
            _camera->SetAspect((float)clientRect.Width()/(float)clientRect.Height());

            _graphics->SetViewProjection(_camera->View(), _camera->Projection());
        }

        OnRender();
        _graphics->Present();
    }
}
Exemplo n.º 11
0
int main( int argc, char **argv )
{
    // use an ArgumentParser object to manage the program arguments.
    osg::ArgumentParser arguments(&argc,argv);

    arguments.getApplicationUsage()->setDescription(arguments.getApplicationName()+" is the example which demonstrates use of 3D textures.");
    arguments.getApplicationUsage()->setCommandLineUsage(arguments.getApplicationName()+" [options]");
    arguments.getApplicationUsage()->addCommandLineOption("-h or --help","Display this information");
    arguments.getApplicationUsage()->addCommandLineOption("-i <filename>","Input scene (or presentation) filename.");
    arguments.getApplicationUsage()->addCommandLineOption("-o <filename>","Base ouput filename of the images, recommended to use something like Images/image.png");
    arguments.getApplicationUsage()->addCommandLineOption("--cs <filename>","Load pre-generated configuration file for run.");
    arguments.getApplicationUsage()->addCommandLineOption("--ouput-cs <filename>","Output configuration file with settings provided on commandline.");
    arguments.getApplicationUsage()->addCommandLineOption("-p <filename>","Use specificied camera path file to control camera position.");
    arguments.getApplicationUsage()->addCommandLineOption("--offscreen","Use an pbuffer to render the images offscreen.");
    arguments.getApplicationUsage()->addCommandLineOption("--screen","Use an window to render the images.");
    arguments.getApplicationUsage()->addCommandLineOption("--width <width>","Window/output image width.");
    arguments.getApplicationUsage()->addCommandLineOption("--height <height>","Window/output image height.");
    arguments.getApplicationUsage()->addCommandLineOption("--screen-distance <distance>","Set the distance of the viewer from the physical screen.");
    arguments.getApplicationUsage()->addCommandLineOption("--screen-width <width>","Set the width of the physical screen.");
    arguments.getApplicationUsage()->addCommandLineOption("--screen-height <height>","Set the height of the physical screen.");
    arguments.getApplicationUsage()->addCommandLineOption("--ms <s>","Number of multi-samples to use when rendering, an enable a single sample buffer.");
    arguments.getApplicationUsage()->addCommandLineOption("--samples <s>","Number of multi-samples to use when rendering.");
    arguments.getApplicationUsage()->addCommandLineOption("--sampleBuffers <sb>","Number of sample buffers to use when rendering.");
    arguments.getApplicationUsage()->addCommandLineOption("-f <fps>","Number of frames per second in simulation time.");
    arguments.getApplicationUsage()->addCommandLineOption("-n <frames>","Number of frames to render/images to create.");
    arguments.getApplicationUsage()->addCommandLineOption("-d <time>","Duration of rendering run (duration = frames/fps).");
    arguments.getApplicationUsage()->addCommandLineOption("--center <x> <y> <z>","View center.");
    arguments.getApplicationUsage()->addCommandLineOption("--eye <x> <y> <z>","Camera eye point.");
    arguments.getApplicationUsage()->addCommandLineOption("--up <x> <y> <z>","Camera up vector.");
    arguments.getApplicationUsage()->addCommandLineOption("--rotation-center <x> <y> <z>","Position to rotatate around.");
    arguments.getApplicationUsage()->addCommandLineOption("--rotation-axis <x> <y> <z>","Axis to rotate around.");
    arguments.getApplicationUsage()->addCommandLineOption("--rotation-speed <v>","Degrees per second.");
    arguments.getApplicationUsage()->addCommandLineOption("--stereo <mode>","OFF | HORIZONTAL_SPLIT | VERTICAL_SPLIT");

    unsigned int helpType = 0;
    if ((helpType = arguments.readHelpType()))
    {
        arguments.getApplicationUsage()->write(std::cout, helpType);
        return 1;
    }

    osgViewer::Viewer viewer;

    typedef std::list< osg::ref_ptr<gsc::CaptureSettings> > CaptureSettingsList;
    CaptureSettingsList frameCaptureList;

    osg::ref_ptr<gsc::CaptureSettings> fc = new gsc::CaptureSettings;

    double duration = 0.0;
    double fps = 0.0f;
    unsigned int nframes = 0;

    bool readCaptureSettings = false;
    std::string filename;
    if (arguments.read("--cs",filename))
    {
        osg::ref_ptr<osg::Object> object = osgDB::readObjectFile(filename);
        gsc::CaptureSettings* input_cs = dynamic_cast<gsc::CaptureSettings*>(object.get());
        if (input_cs) { fc = input_cs; readCaptureSettings = true; }
        else
        {
            OSG_NOTICE<<"Unable to read CaptureSettings from file: "<<filename<<std::endl;
            if (object.valid()) OSG_NOTICE<<"Object read, "<<object.get()<<", className()="<<object->className()<<std::endl;
            return 1;
        }
    }

    float screenWidth = fc->getScreenWidth()!=0.0 ? fc->getScreenWidth() : osg::DisplaySettings::instance()->getScreenWidth();
    if (arguments.read("--screen-width",screenWidth)) {}

    float screenHeight = fc->getScreenHeight()!=0.0 ? fc->getScreenHeight() : osg::DisplaySettings::instance()->getScreenHeight();
    if (arguments.read("--screen-height",screenHeight)) {}

    float screenDistance = fc->getScreenDistance()!=0.0 ? fc->getScreenDistance() : osg::DisplaySettings::instance()->getScreenDistance();
    if (arguments.read("--screen-distance",screenDistance)) {}

    fc->setScreenWidth(screenWidth);
    osg::DisplaySettings::instance()->setScreenWidth(screenWidth);
    
    fc->setScreenHeight(screenHeight);
    osg::DisplaySettings::instance()->setScreenHeight(screenHeight);

    fc->setScreenDistance(screenDistance);
    osg::DisplaySettings::instance()->setScreenDistance(screenDistance);

    bool useScreenSizeForProjectionMatrix = true;

    if (arguments.read("-i",filename)) fc->setInputFileName(filename);
    if (arguments.read("-o",filename)) fc->setOutputFileName(filename);
    if (arguments.read("-p",filename))
    {
        osg::ref_ptr<gsc::CameraPathProperty> cpp = new gsc::CameraPathProperty;
        cpp->setAnimationPathFileName(filename);

        double startTime = 0, endTime = 1.0f;
        if (cpp->getTimeRange(startTime, endTime))
        {
            OSG_NOTICE<<"Camera path time range "<<startTime<<", "<<endTime<<std::endl;
            if (startTime!=0.0)
            {
                cpp->resetTimeRange(0.0, endTime-startTime);
                if (cpp->getTimeRange(startTime, endTime))
                {
                    OSG_NOTICE<<"   new time range "<<startTime<<", "<<endTime<<std::endl;
                }
                else
                {
                    OSG_NOTICE<<"   failed to set new time range "<<startTime<<", "<<endTime<<std::endl;
                }
            }
            duration = endTime;            
        }
        else
        {
            OSG_NOTICE<<"Camera path time range "<<startTime<<", "<<endTime<<std::endl;
        }

        fc->addUpdateProperty(cpp.get());
    }
    else
    {
        osg::ref_ptr<gsc::CameraProperty> cp = fc->getPropertyOfType<gsc::CameraProperty>();

        bool newCameraProperty = false;
        bool valueSet = false;
        
        if (!cp)
        {
            newCameraProperty = true;
            cp = new gsc::CameraProperty;

            osg::ref_ptr<osg::Node> node = fc->getInputFileName().empty() ? 0 : osgDB::readNodeFile(fc->getInputFileName());
            if (node.valid())
            {
                cp->setToModel(node.get());
                valueSet = true;
            }
            
        }
        
        osg::Vec3d vec;
        while (arguments.read("--center",vec.x(), vec.y(), vec.z())) { cp->setCenter(vec); valueSet = true; }
        while (arguments.read("--eye",vec.x(), vec.y(), vec.z())) { cp->setEyePoint(vec); valueSet = true; }
        while (arguments.read("--up",vec.x(), vec.y(), vec.z())) { cp->setUpVector(vec); valueSet = true; }
        while (arguments.read("--rotation-center",vec.x(), vec.y(), vec.z())) { cp->setRotationCenter(vec); valueSet = true; }
        while (arguments.read("--rotation-axis",vec.x(), vec.y(), vec.z())) { cp->setRotationAxis(vec); valueSet = true; }

        double speed;
        while (arguments.read("--rotation-speed",speed)) { cp->setRotationSpeed(speed); valueSet = true; }

        if (newCameraProperty && valueSet)
        {
            fc->addUpdateProperty(cp.get());
        }
    }

    std::string stereoMode;
    if (arguments.read("--stereo", stereoMode))
    {        
        if      (stereoMode=="HORIZONTAL_SPLIT") fc->setStereoMode(gsc::CaptureSettings::HORIZONTAL_SPLIT);
        else if (stereoMode=="VERTICAL_SPLIT") fc->setStereoMode(gsc::CaptureSettings::VERTICAL_SPLIT);
        else if (stereoMode=="OFF") fc->setStereoMode(gsc::CaptureSettings::OFF);
    }

    if (arguments.read("--offscreen")) fc->setOffscreen(true);
    if (arguments.read("--screen")) fc->setOffscreen(false);

    if (arguments.read("--flip")) fc->setOutputImageFlip(true);
    if (arguments.read("--no-flip")) fc->setOutputImageFlip(false);

    unsigned int width = 1024;
    if (arguments.read("--width",width)) fc->setWidth(width);

    unsigned int height = 512;
    if (arguments.read("--height",height)) fc->setHeight(height);

    if (arguments.read("--rgb")) fc->setPixelFormat(gsc::CaptureSettings::RGB);
    if (arguments.read("--rgba")) fc->setPixelFormat(gsc::CaptureSettings::RGBA);
    
    osg::Vec4 clearColor(0.0f,0.0f,0.0f,0.0f);
    while (arguments.read("--clear-color",clearColor[0],clearColor[1],clearColor[2],clearColor[3])) {}


    unsigned int samples = 0;
    if (arguments.read("--samples",samples)) fc->setSamples(samples);

    unsigned int sampleBuffers = 0;
    if (arguments.read("--sampleBuffers",sampleBuffers)) fc->setSampleBuffers(sampleBuffers);

    unsigned int ms = 0;
    if (arguments.read("--ms",ms))
    {
        fc->setSamples(ms);
        fc->setSampleBuffers(1);
    }

    if (arguments.read("-f",fps)) fc->setFrameRate(fps);

    if (arguments.read("-n",nframes)) fc->setNumberOfFrames(nframes);

    if (arguments.read("-d",duration)) {}


    std::string key;
    double time;
    while(arguments.read("--key-down",time, key) && key.size()>=1)
    {
        OSG_NOTICE<<"keydown "<<key<<", "<<time<<std::endl;
        osg::ref_ptr<osgGA::GUIEventAdapter> event = new osgGA::GUIEventAdapter;
        event->setTime(time);
        event->setEventType(osgGA::GUIEventAdapter::KEYDOWN);
        event->setKey(key[0]);
        fc->addUpdateProperty(new gsc::EventProperty(event.get()));
    }

    while(arguments.read("--key-up",time, key) && key.size()>=1)
    {
        OSG_NOTICE<<"keyup "<<key<<", "<<time<<std::endl;
        osg::ref_ptr<osgGA::GUIEventAdapter> event = new osgGA::GUIEventAdapter;
        event->setTime(time);
        event->setEventType(osgGA::GUIEventAdapter::KEYUP);
        event->setKey(key[0]);
        fc->addUpdateProperty(new gsc::EventProperty(event.get()));
    }

    double mouse_x, mouse_y;
    while(arguments.read("--mouse-move",time, mouse_x, mouse_y))
    {
        OSG_NOTICE<<"mouse move "<<time<<", "<<mouse_x<<", "<<mouse_y<<std::endl;
        osg::ref_ptr<osgGA::GUIEventAdapter> event = new osgGA::GUIEventAdapter;
        event->setTime(time);
        event->setEventType(osgGA::GUIEventAdapter::MOVE);
        event->setX(mouse_x);
        event->setY(mouse_y);
        fc->addUpdateProperty(new gsc::EventProperty(event.get()));
    }

    while(arguments.read("--mouse-drag",time, mouse_x, mouse_y))
    {
        OSG_NOTICE<<"mouse drag "<<time<<", "<<mouse_x<<", "<<mouse_y<<std::endl;
        osg::ref_ptr<osgGA::GUIEventAdapter> event = new osgGA::GUIEventAdapter;
        event->setTime(time);
        event->setEventType(osgGA::GUIEventAdapter::DRAG);
        event->setX(mouse_x);
        event->setY(mouse_y);
        fc->addUpdateProperty(new gsc::EventProperty(event.get()));
    }


    if (!readCaptureSettings)
    {
        if (duration!=0.0)
        {
            if (fps!=0.0) nframes = static_cast<unsigned int>(ceil(duration*fps));
            else if (nframes!=0) fps = duration/static_cast<double>(nframes);
            else
            {
                fps = 60.0;
                nframes = static_cast<unsigned int>(ceil(duration/fps));
            }
        }
        else // duration == 0.0
        {
            if (fps==0.0) fps=60.0;
            if (nframes==0) nframes=1;

            duration = static_cast<double>(nframes)/fps;
        }

        fc->setNumberOfFrames(nframes);
        fc->setFrameRate(fps);
        OSG_NOTICE<<"Duration="<<duration<<", FPS="<<fps<<", Number of Frames="<<nframes<<std::endl;
    }
    



    if (arguments.read("--output-cs",filename))
    {
        osgDB::writeObjectFile(*fc, filename);
        return 1;
    }

    

    if (fc.valid())
    {
        frameCaptureList.push_back(fc);
    }

    if (frameCaptureList.empty())
    {
        OSG_NOTICE<<"No settings provided"<<std::endl;
        return 1;
    }


    // setup viewer
    {
        osg::ref_ptr<osg::DisplaySettings> ds = new osg::DisplaySettings;

        bool stereo = fc->getStereoMode()!=gsc::CaptureSettings::OFF;
        osg::DisplaySettings::StereoMode stereoMode = fc->getStereoMode()==gsc::CaptureSettings::VERTICAL_SPLIT ? osg::DisplaySettings::VERTICAL_SPLIT : osg::DisplaySettings::HORIZONTAL_SPLIT;
        double fovx_multiple = fc->getStereoMode()==gsc::CaptureSettings::HORIZONTAL_SPLIT ? 2.0 : 1;
        double fovy_multiple = fc->getStereoMode()==gsc::CaptureSettings::VERTICAL_SPLIT ? 2.0 : 1;
        ds->setStereoMode(stereoMode);
        ds->setStereo(stereo);

        if (fc->getScreenWidth()!=0.0) ds->setScreenWidth(fc->getScreenWidth());
        if (fc->getScreenHeight()!=0.0) ds->setScreenHeight(fc->getScreenHeight());
        if (fc->getScreenDistance()!=0.0) ds->setScreenDistance(fc->getScreenDistance());

        
        osg::ref_ptr<osg::GraphicsContext::Traits> traits = new osg::GraphicsContext::Traits(ds.get());

        traits->readDISPLAY();
        if (traits->displayNum<0) traits->displayNum = 0;

        traits->x = 0;
        traits->y = 0;
        traits->width = fc->getWidth();
        traits->height = fc->getHeight();
        traits->alpha = (fc->getPixelFormat() == gsc::CaptureSettings::RGBA) ? 8 : 0;
        traits->samples = fc->getSamples();
        traits->sampleBuffers = fc->getSampleBuffers();
        traits->windowDecoration = !(fc->getOffscreen());
        traits->doubleBuffer = true;
        traits->sharedContext = 0;
        traits->pbuffer = fc->getOffscreen();

        osg::ref_ptr<osg::GraphicsContext> gc = osg::GraphicsContext::createGraphicsContext(traits.get());
        if (!gc)
        {
            OSG_NOTICE<<"Failed to created requested graphics context"<<std::endl;
            return 1;
        }

        viewer.getCamera()->setClearColor(clearColor);
        viewer.getCamera()->setGraphicsContext(gc.get());
        viewer.getCamera()->setDisplaySettings(ds.get());

        osgViewer::GraphicsWindow* gw = dynamic_cast<osgViewer::GraphicsWindow*>(gc.get());
        if (gw)
        {
            OSG_INFO<<"GraphicsWindow has been created successfully."<<std::endl;
            gw->getEventQueue()->getCurrentEventState()->setWindowRectangle(0, 0, fc->getWidth(),  fc->getHeight());
        }
        else
        {
            OSG_NOTICE<<"PixelBuffer has been created succseffully "<<traits->width<<", "<<traits->height<<std::endl;
        }

        if (useScreenSizeForProjectionMatrix)
        {
            OSG_NOTICE<<"Setting projection matrix"<<std::endl;
            
            double vfov = osg::RadiansToDegrees(atan2(screenHeight/2.0f,screenDistance)*2.0);
            // double hfov = osg::RadiansToDegrees(atan2(width/2.0f,distance)*2.0);

            viewer.getCamera()->setProjectionMatrixAsPerspective( vfov*fovy_multiple, (screenWidth/screenHeight)*fovx_multiple, 0.1, 1000.0);

            OSG_NOTICE<<"setProjectionMatrixAsPerspective( "<<vfov*fovy_multiple<<", "<<(screenWidth/screenHeight)*fovx_multiple<<", "<<0.1<<", "<<1000.0<<");"<<std::endl;

            
        }
        else
        {
            double fovy, aspectRatio, zNear, zFar;
            viewer.getCamera()->getProjectionMatrixAsPerspective(fovy, aspectRatio, zNear, zFar);

            double newAspectRatio = double(traits->width) / double(traits->height);
            double aspectRatioChange = newAspectRatio / aspectRatio;
            if (aspectRatioChange != 1.0)
            {
                viewer.getCamera()->getProjectionMatrix() *= osg::Matrix::scale(fovx_multiple/aspectRatioChange,fovy_multiple,1.0);
            }
        }

        // set up stereo masks
        viewer.getCamera()->setCullMask(0xffffffff);
        viewer.getCamera()->setCullMaskLeft(0x00000001);
        viewer.getCamera()->setCullMaskRight(0x00000002);

        viewer.getCamera()->setViewport(new osg::Viewport(0, 0, traits->width, traits->height));

        GLenum buffer = traits->doubleBuffer ? GL_BACK : GL_FRONT;

        viewer.getCamera()->setDrawBuffer(buffer);
        viewer.getCamera()->setReadBuffer(buffer);
    }

    std::string outputPath = osgDB::getFilePath(fc->getOutputFileName());
    if (!outputPath.empty())
    {
        osgDB::FileType type = osgDB::fileType(outputPath);
        switch(type)
        {
            case(osgDB::FILE_NOT_FOUND):
                if (!osgDB::makeDirectory(outputPath))
                {
                    OSG_NOTICE<<"Error: could not create directory ["<<outputPath<<"]."<<std::endl;                    
                    return 1;
                }
                OSG_NOTICE<<"Created directory ["<<outputPath<<"]."<<std::endl;
                break;
            case(osgDB::REGULAR_FILE):
                OSG_NOTICE<<"Error: filepath for output files is regular file, not a directory as required."<<std::endl;
                return 1;
            case(osgDB::DIRECTORY):
                OSG_NOTICE<<"Valid path["<<outputPath<<"] provided for output files."<<std::endl;
                break;
        }
    }

    GLenum pixelFormat = (fc->getPixelFormat()==gsc::CaptureSettings::RGBA) ? GL_RGBA : GL_RGB;
    

    viewer.setThreadingModel(osgViewer::Viewer::SingleThreaded);
    viewer.realize();

    // set up screen shot
    osg::ref_ptr<ScreenShot> screenShot = new ScreenShot(pixelFormat, fc->getOutputImageFlip());;
    {

        osgViewer::Viewer::Cameras cameras;
        viewer.getCameras(cameras);
        if (cameras.size()>1)
        {
            unsigned int cameraNum = 0;
            for(osgViewer::Viewer::Cameras::iterator itr = cameras.begin();
                itr != cameras.end();
                ++itr, ++cameraNum)
            {
                osg::Camera* camera = *itr;
                camera->setFinalDrawCallback(screenShot.get());
                screenShot->_cameraNumMap[camera] = cameraNum;
            }
        }
        else if (cameras.size()==1)
        {
            osg::Camera* camera = cameras.front();
            camera->setFinalDrawCallback(screenShot.get());
        }
        else
        {
            OSG_NOTICE<<"No usable Cameras created."<<std::endl;
            return 1;
        }
    }

    for(CaptureSettingsList::iterator itr = frameCaptureList.begin();
        itr != frameCaptureList.end();
        ++itr)
    {
        gsc::CaptureSettings* fc = itr->get();
        screenShot->_frameCapture = fc;

        osg::ref_ptr<osg::Node> model = osgDB::readNodeFile(fc->getInputFileName());
        if (!model) break;

        viewer.setSceneData(model.get());

        double simulationTime = 0.0;
                
        for(unsigned int i=0; i<fc->getNumberOfFrames(); ++i)
        {
            OSG_NOTICE<<"fc.getOutputFileName("<<i<<")="<<fc->getOutputFileName(i)<<std::endl;

            viewer.advance(simulationTime);
            
            gsc::CaptureSettings::Properties& pl = fc->getProperties();
            for(gsc::CaptureSettings::Properties::iterator plitr = pl.begin();
                plitr != pl.end();
                ++plitr)
            {
                (*plitr)->update(&viewer);
            }

            viewer.eventTraversal();
            viewer.updateTraversal();
            viewer.renderingTraversals();

            // advance simulationTime and number of frames rendered
            simulationTime += 1.0/fc->getFrameRate();
        }
    }

    osg::ref_ptr<osg::Object> object = new osgGA::StateSetManipulator;
    osg::ref_ptr<osgGA::StateSetManipulator> ss = dynamic_cast<osgGA::StateSetManipulator*>(object.get());
   
    return 0;
}
Exemplo n.º 12
0
    void Step3(int v, int w)
    {
        int i,k,c,c1,c2,c3;
        int wArray[100];

        DEBUG1(printf("debug - step 3: v=%d, w=%d\n",v,w));

        for (c=1;c<=degree+1;c++)
            if (vertexAcrossColor[v][c]==(-1) && vertexAcrossColor[w][c]==(-1))
                break;
        if (c<=degree+1)
        {
            setColor(v,w,c);
            return; // back to Step 2 loop
        }

        for (c1=1;
                c1<=degree+1 && vertexAcrossColor[v][c1]>(-1);
                c1++)
            ;
        if (c1>degree+1)
        {
            printf("Fatal 1\n");
            exit(0);
        }
        DEBUG1(printf("debug - c1=%d\n",c1));
        for (c2=1;
                c2<=degree+1 && vertexAcrossColor[w][c2]>(-1);
                c2++)
            ;
        if (c2>degree+1)
        {
            printf("Fatal 2\n");
            exit(0);
        }
        DEBUG1(printf("debug - c2=%d\n",c2));

        while (1)
        {
            // Step 4

            DEBUG1(printf("debug - step 4\n"));

            wArray[0]=w;
            k=0;
            while (1)
            {
                DEBUG1(printf("debug - step 4:  searching for k=%d wArray[%d]=%d\n",k,k,wArray[k]));
                if (k%2==0)
                    i=vertexAcrossColor[wArray[k]][c1];
                else
                    i=vertexAcrossColor[wArray[k]][c2];
                if (i==(-1))
                    break;
                k++;
                wArray[k]=i;
            }
            if (v!=wArray[k])
            {
                // Step 5

                DEBUG1(printf("debug - step 5\n"));

                for (i=0;i<k;i+=2)
                    clearColor(wArray[i],wArray[i+1],c1);
                for (i=1;i<k;i+=2)
                    clearColor(wArray[i],wArray[i+1],c2);
                setColor(v,w,c1);
                for (i=0;i<k;i+=2)
                    setColor(wArray[i],wArray[i+1],c2);
                for (i=1;i<k;i+=2)
                    setColor(wArray[i],wArray[i+1],c1);
                return; // Back to Step 2 loop
            }

            // Step 6

            DEBUG1(printf("debug - step 6\n"));

            for (c3=1;c3<=degree+1;c3++)
                if (vertexAcrossColor[v][c3]==(-1) &&
                        vertexAcrossColor[wArray[k-1]][c3]==(-1))
                    break;
            if (c3<=degree+1)
            {
                clearColor(v,wArray[k-1],c2);
                setColor(v,w,c2);
                setColor(v,wArray[k-1],c3);
                return; // Back to Step 2 loop
            }

            // Step 7

            DEBUG1(printf("debug - step 7\n"));

            for (c3=1;c3<=degree+1;c3++)
                if (vertexAcrossColor[wArray[k-1]][c3]==(-1))
                    break;
            if (c3>degree+1)
            {
                printf("Fatal 3\n");
                exit(0);
            }
            clearColor(v,wArray[k-1],c2);
            setColor(v,w,c2);
            w=wArray[k-1];
            c2=c3;
        }
    }
Exemplo n.º 13
0
//! ---|> Evaluator
void OccOverheadEvaluator::measure(FrameContext & context,Node & node,const Geometry::Rect & /*r*/) {

	//float currentRenderingTime=0;
	//int numTests=0;
	//int numVBO=0;
	int numPoly=0;
	Util::Color4f clearColor(0.5f, 0.5f, 0.9f, 1.0f);
	{ // 1. normal render pass
//        rc.clearScreen(0.5,0.5,0.9);
//        context.getRenderingContext().finish();
//        node.display(rc,renderingFlags);
		context.getRenderingContext().clearScreen(clearColor);
		node.display(context,renderingFlags|USE_WORLD_MATRIX);

		context.getRenderingContext().clearScreen(clearColor);
		context.getRenderingContext().finish();
		context.getStatistics().beginFrame();
		node.display(context,renderingFlags|USE_WORLD_MATRIX);
		context.getRenderingContext().finish();
		context.getStatistics().endFrame();
		numPoly=static_cast<int>(context.getStatistics().getValueAsDouble(context.getStatistics().getTrianglesCounter()));
	}

	// 2. test all group nodes if their bb is visible and collect them
	std::deque<GroupNode *> visibleGroupNodes;

	{
		// collect geometry nodes in frustum
		const auto groupNodesInVFList = collectNodesInFrustum<GroupNode>(&node,context.getCamera()->getFrustum());

		// setup occlusion queries
		int numQueries=groupNodesInVFList.size();
		if (numQueries==0) {
			values->push_back(nullptr); // TODO!!!
			return; // TODO??
		}

		auto queries = new Rendering::OcclusionQuery[numQueries];

		//  test bounding boxes
		context.getRenderingContext().pushAndSetDepthBuffer(Rendering::DepthBufferParameters(true, true, Rendering::Comparison::LEQUAL));
		int i=0;
		// start queries
		Rendering::OcclusionQuery::enableTestMode(context.getRenderingContext());
		for(const auto & groupNode : groupNodesInVFList) {
			queries[i].begin();
//            queries[i].fastBoxTest((*it)->getWorldBB());
			Rendering::drawAbsBox(context.getRenderingContext(), groupNode->getWorldBB() );
			queries[i].end();
			++i;
		}
		Rendering::OcclusionQuery::disableTestMode(context.getRenderingContext());
		i=0;
		// get results
		for(const auto & groupNode : groupNodesInVFList) {
			Geometry::Box extendedBox=groupNode->getWorldBB();
			extendedBox.resizeAbs(context.getCamera()->getNearPlane());

			if (queries[i].getResult() > 0 || extendedBox.contains(context.getCamera()->getWorldOrigin()) ) {
				visibleGroupNodes.push_back(groupNode);
			}
			++i;
		}
		context.getRenderingContext().popDepthBuffer();

		delete [] queries;
	}
	std::deque<GeometryNode *> geoNodesInVisibleGroupNodes;

	// 3. collect all direct geometry-node children
	{
		struct GeoChildrenCollector:public NodeVisitor {
			GeoChildrenCollector(std::deque<GeometryNode*> & _geoNodes,FrameContext & _context):
				geoNodes(_geoNodes),firstNode(true),frameContext(_context) {}
			virtual ~GeoChildrenCollector() {}

			std::deque<GeometryNode*> & geoNodes;
			bool firstNode;
			FrameContext & frameContext;

			// ---|> NodeVisitor
			NodeVisitor::status enter(Node * _node) override {
				// if this is the first call, continue...
				if(firstNode) {
					firstNode=false;
					return CONTINUE_TRAVERSAL;
				}
				// else collect the geometry children and break the traversal.
				GeometryNode * gn=dynamic_cast<GeometryNode *>(_node);
				if(gn){
					if(frameContext.getCamera()->testBoxFrustumIntersection( _node->getWorldBB()) != Geometry::Frustum::intersection_t::OUTSIDE)
						geoNodes.push_back(gn);
				}
				return BREAK_TRAVERSAL;
			}
		};
		for (auto & visibleGroupNode : visibleGroupNodes) {
			GeoChildrenCollector vis(geoNodesInVisibleGroupNodes,context);
			(visibleGroupNode)->traverse(vis);
		}

	}

	// 4. clear screen and measure time drawing children
	float perfectRenderingTime=0;
	{
		context.getRenderingContext().clearScreen(clearColor);

		context.getRenderingContext().finish();
		Util::Timer timer;
		timer.reset();

		for (auto & geoNodesInVisibleGroupNode : geoNodesInVisibleGroupNodes) {
			context.displayNode((geoNodesInVisibleGroupNode),renderingFlags|USE_WORLD_MATRIX);
		}

		context.getRenderingContext().finish();
		timer.stop();
		perfectRenderingTime=timer.getMilliseconds();
	}

//    // Test-costs:
//    float result=numTests!=0?(currentRenderingTime-perfectRenderingTime)/numTests:0;

	// currentRenderingTime-perfRendering:
//    float result=currentRenderingTime-perfectRenderingTime;

	float result=numPoly>0?perfectRenderingTime*1000/numPoly:0;

//    std::cout <<currentRenderingTime<<"-"<<perfectRenderingTime<<"="<<result<<"\t"<<numTests<<"\n";
//    std::cout <<numVBO<<":"<<geoNodesInVisibleGroupNodes.size()<<"\n";

//   float result=perfectRenderingTime;
//    values.push_back(currentRenderingTime);
//    values.push_back(perfectRenderingTime);
	values->push_back(new Util::_NumberAttribute<float>(result));

//    if(mode==SINGLE_VALUE){
//        if(result>renderingTime || renderingTime==0)
//            renderingTime=result;
//    }else{
//        values.push_back(result);
//    }

//    renderingTime=0;
}
Exemplo n.º 14
0
void IrrDriver::renderGLSL(float dt)
{
    BoundingBoxes.clear();
    World *world = World::getWorld(); // Never NULL.

    Track *track = world->getTrack();

    for (unsigned i = 0; i < PowerupManager::POWERUP_MAX; i++)
    {
        scene::IMesh *mesh = powerup_manager->m_all_meshes[i];
        if (!mesh)
            continue;
        for (unsigned j = 0; j < mesh->getMeshBufferCount(); j++)
        {
            scene::IMeshBuffer *mb = mesh->getMeshBuffer(j);
            if (!mb)
                continue;
            for (unsigned k = 0; k < 4; k++)
            {
                video::ITexture *tex = mb->getMaterial().getTexture(k);
                if (!tex)
                    continue;
                compressTexture(tex, true);
            }
        }
    }


    // Overrides
    video::SOverrideMaterial &overridemat = m_video_driver->getOverrideMaterial();
    overridemat.EnablePasses = scene::ESNRP_SOLID | scene::ESNRP_TRANSPARENT;
    overridemat.EnableFlags = 0;

    if (m_wireframe)
    {
        overridemat.Material.Wireframe = 1;
        overridemat.EnableFlags |= video::EMF_WIREFRAME;
    }
    if (m_mipviz)
    {
        overridemat.Material.MaterialType = m_shaders->getShader(ES_MIPVIZ);
        overridemat.EnableFlags |= video::EMF_MATERIAL_TYPE;
        overridemat.EnablePasses = scene::ESNRP_SOLID;
    }

    // Get a list of all glowing things. The driver's list contains the static ones,
    // here we add items, as they may disappear each frame.
    std::vector<GlowData> glows = m_glowing;

    ItemManager * const items = ItemManager::get();
    const u32 itemcount = items->getNumberOfItems();
    u32 i;

    for (i = 0; i < itemcount; i++)
    {
        Item * const item = items->getItem(i);
        if (!item) continue;
        const Item::ItemType type = item->getType();

        if (type != Item::ITEM_NITRO_BIG && type != Item::ITEM_NITRO_SMALL &&
            type != Item::ITEM_BONUS_BOX && type != Item::ITEM_BANANA && type != Item::ITEM_BUBBLEGUM)
            continue;

        LODNode * const lod = (LODNode *) item->getSceneNode();
        if (!lod->isVisible()) continue;

        const int level = lod->getLevel();
        if (level < 0) continue;

        scene::ISceneNode * const node = lod->getAllNodes()[level];
        node->updateAbsolutePosition();

        GlowData dat;
        dat.node = node;

        dat.r = 1.0f;
        dat.g = 1.0f;
        dat.b = 1.0f;

        const video::SColorf &c = ItemManager::getGlowColor(type);
        dat.r = c.getRed();
        dat.g = c.getGreen();
        dat.b = c.getBlue();

        glows.push_back(dat);
    }

    // Start the RTT for post-processing.
    // We do this before beginScene() because we want to capture the glClear()
    // because of tracks that do not have skyboxes (generally add-on tracks)
    m_post_processing->begin();

    RaceGUIBase *rg = world->getRaceGUI();
    if (rg) rg->update(dt);

    if (!CVS->isDefferedEnabled())
    {
        SColor clearColor(0, 150, 150, 150);
        if (World::getWorld() != NULL)
            clearColor = World::getWorld()->getClearColor();

        glClear(GL_COLOR_BUFFER_BIT);
        glDepthMask(GL_TRUE);
        glBindFramebuffer(GL_FRAMEBUFFER, 0);
        glClearColor(clearColor.getRed() / 255.f, clearColor.getGreen() / 255.f,
            clearColor.getBlue() / 255.f, clearColor.getAlpha() / 255.f);
        glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT);
    }

    for(unsigned int cam = 0; cam < Camera::getNumCameras(); cam++)
    {
        Camera * const camera = Camera::getCamera(cam);
        scene::ICameraSceneNode * const camnode = camera->getCameraSceneNode();

        std::ostringstream oss;
        oss << "drawAll() for kart " << cam;
        PROFILER_PUSH_CPU_MARKER(oss.str().c_str(), (cam+1)*60,
                                 0x00, 0x00);
        camera->activate(!CVS->isDefferedEnabled());
        rg->preRenderCallback(camera);   // adjusts start referee
        m_scene_manager->setActiveCamera(camnode);

        const core::recti &viewport = camera->getViewport();

        if (World::getWorld() && World::getWorld()->getTrack()->hasShadows() && !SphericalHarmonicsTextures.empty())
            irr_driver->getSceneManager()->setAmbientLight(SColor(0, 0, 0, 0));

        // TODO: put this outside of the rendering loop
        if (!m_skybox_ready)
        {
            prepareSkybox();
            m_skybox_ready = true;
        }
        if (!CVS->isDefferedEnabled())
            glEnable(GL_FRAMEBUFFER_SRGB);

        PROFILER_PUSH_CPU_MARKER("Update Light Info", 0xFF, 0x0, 0x0);
        unsigned plc = UpdateLightsInfo(camnode, dt);
        PROFILER_POP_CPU_MARKER();
        PROFILER_PUSH_CPU_MARKER("UBO upload", 0x0, 0xFF, 0x0);
        computeMatrixesAndCameras(camnode, viewport.LowerRightCorner.X - viewport.UpperLeftCorner.X, viewport.LowerRightCorner.Y - viewport.UpperLeftCorner.Y);
        uploadLightingData();
        PROFILER_POP_CPU_MARKER();
        renderScene(camnode, plc, glows, dt, track->hasShadows(), false);

        // Render bounding boxes
        if (irr_driver->getBoundingBoxesViz())
        {
            glUseProgram(UtilShader::ColoredLine::getInstance()->Program);
            glBindVertexArray(UtilShader::ColoredLine::getInstance()->vao);
            glBindBuffer(GL_ARRAY_BUFFER, UtilShader::ColoredLine::getInstance()->vbo);
            UtilShader::ColoredLine::getInstance()->setUniforms(SColor(255, 255, 0, 0));
            const float *tmp = BoundingBoxes.data();
            for (unsigned int i = 0; i < BoundingBoxes.size(); i += 1024 * 6)
            {
                unsigned count = MIN2((int)BoundingBoxes.size() - i, 1024 * 6);
                glBufferSubData(GL_ARRAY_BUFFER, 0, count * sizeof(float), &tmp[i]);

                glDrawArrays(GL_LINES, 0, count / 3);
            }
        }

        // Debug physic
        // Note that drawAll must be called before rendering
        // the bullet debug view, since otherwise the camera
        // is not set up properly. This is only used for
        // the bullet debug view.
        if (UserConfigParams::m_artist_debug_mode)
            World::getWorld()->getPhysics()->draw();
        if (world != NULL && world->getPhysics() != NULL)
        {
            IrrDebugDrawer* debug_drawer = world->getPhysics()->getDebugDrawer();
            if (debug_drawer != NULL && debug_drawer->debugEnabled())
            {
                const std::map<video::SColor, std::vector<float> >& lines = debug_drawer->getLines();
                std::map<video::SColor, std::vector<float> >::const_iterator it;

                glUseProgram(UtilShader::ColoredLine::getInstance()->Program);
                glBindVertexArray(UtilShader::ColoredLine::getInstance()->vao);
                glBindBuffer(GL_ARRAY_BUFFER, UtilShader::ColoredLine::getInstance()->vbo);
                for (it = lines.begin(); it != lines.end(); it++)
                {
                    UtilShader::ColoredLine::getInstance()->setUniforms(it->first);
                    const std::vector<float> &vertex = it->second;
                    const float *tmp = vertex.data();
                    for (unsigned int i = 0; i < vertex.size(); i += 1024 * 6)
                    {
                        unsigned count = MIN2((int)vertex.size() - i, 1024 * 6);
                        glBufferSubData(GL_ARRAY_BUFFER, 0, count * sizeof(float), &tmp[i]);

                        glDrawArrays(GL_LINES, 0, count / 3);
                    }
                }
                glUseProgram(0);
                glBindVertexArray(0);
            }
        }

        // Render the post-processed scene
        if (CVS->isDefferedEnabled())
        {
            bool isRace = StateManager::get()->getGameState() == GUIEngine::GAME;
            FrameBuffer *fbo = m_post_processing->render(camnode, isRace);

            if (irr_driver->getNormals())
                irr_driver->getFBO(FBO_NORMAL_AND_DEPTHS).BlitToDefault(viewport.UpperLeftCorner.X, viewport.UpperLeftCorner.Y, viewport.LowerRightCorner.X, viewport.LowerRightCorner.Y);
            else if (irr_driver->getSSAOViz())
            {
                glBindFramebuffer(GL_FRAMEBUFFER, 0);
                glViewport(viewport.UpperLeftCorner.X, viewport.UpperLeftCorner.Y, viewport.LowerRightCorner.X, viewport.LowerRightCorner.Y);
                m_post_processing->renderPassThrough(m_rtts->getFBO(FBO_HALF1_R).getRTT()[0], viewport.LowerRightCorner.X - viewport.UpperLeftCorner.X, viewport.LowerRightCorner.Y - viewport.UpperLeftCorner.Y);
            }
            else if (irr_driver->getRSM())
            {
                glBindFramebuffer(GL_FRAMEBUFFER, 0);
                glViewport(viewport.UpperLeftCorner.X, viewport.UpperLeftCorner.Y, viewport.LowerRightCorner.X, viewport.LowerRightCorner.Y);
                m_post_processing->renderPassThrough(m_rtts->getRSM().getRTT()[0], viewport.LowerRightCorner.X - viewport.UpperLeftCorner.X, viewport.LowerRightCorner.Y - viewport.UpperLeftCorner.Y);
            }
            else if (irr_driver->getShadowViz())
            {
                renderShadowsDebug();
            }
            else
            {
                glEnable(GL_FRAMEBUFFER_SRGB);
                glBindFramebuffer(GL_FRAMEBUFFER, 0);
                if (CVS->isDefferedEnabled())
                    camera->activate();
                m_post_processing->renderPassThrough(fbo->getRTT()[0], viewport.LowerRightCorner.X - viewport.UpperLeftCorner.X, viewport.LowerRightCorner.Y - viewport.UpperLeftCorner.Y);
                glDisable(GL_FRAMEBUFFER_SRGB);
            }
        }
        // Save projection-view matrix for the next frame
        camera->setPreviousPVMatrix(m_ProjViewMatrix);

        PROFILER_POP_CPU_MARKER();
    }   // for i<world->getNumKarts()

    // Use full screen size
    float tmp[2];
    tmp[0] = float(m_actual_screen_size.Width);
    tmp[1] = float(m_actual_screen_size.Height);
    glBindBuffer(GL_UNIFORM_BUFFER, SharedObject::ViewProjectionMatrixesUBO);
    glBufferSubData(GL_UNIFORM_BUFFER, (16 * 9) * sizeof(float), 2 * sizeof(float), tmp);

    glBindVertexArray(0);
    glBindBuffer(GL_ARRAY_BUFFER, 0);
    glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
    glUseProgram(0);

    // Set the viewport back to the full screen for race gui
    m_video_driver->setViewPort(core::recti(0, 0,
        irr_driver->getActualScreenSize().Width,
        irr_driver->getActualScreenSize().Height));

    for(unsigned int i=0; i<Camera::getNumCameras(); i++)
    {
        Camera *camera = Camera::getCamera(i);
        std::ostringstream oss;
        oss << "renderPlayerView() for kart " << i;

        PROFILER_PUSH_CPU_MARKER(oss.str().c_str(), 0x00, 0x00, (i+1)*60);
        rg->renderPlayerView(camera, dt);

        PROFILER_POP_CPU_MARKER();
    }  // for i<getNumKarts

    {
        ScopedGPUTimer Timer(getGPUTimer(Q_GUI));
        PROFILER_PUSH_CPU_MARKER("GUIEngine", 0x75, 0x75, 0x75);
        // Either render the gui, or the global elements of the race gui.
        GUIEngine::render(dt);
        PROFILER_POP_CPU_MARKER();
    }

    // Render the profiler
    if(UserConfigParams::m_profiler_enabled)
    {
        PROFILER_DRAW();
    }


#ifdef DEBUG
    drawDebugMeshes();
#endif


    PROFILER_PUSH_CPU_MARKER("EndSccene", 0x45, 0x75, 0x45);
    m_video_driver->endScene();
    PROFILER_POP_CPU_MARKER();

    getPostProcessing()->update(dt);
}
Exemplo n.º 15
0
void IrrDriver::renderScene(scene::ICameraSceneNode * const camnode, unsigned pointlightcount, std::vector<GlowData>& glows, float dt, bool hasShadow, bool forceRTT)
{
    glBindBufferBase(GL_UNIFORM_BUFFER, 0, SharedObject::ViewProjectionMatrixesUBO);
    glBindBufferBase(GL_UNIFORM_BUFFER, 1, SharedObject::LightingDataUBO);
    m_scene_manager->setActiveCamera(camnode);

    PROFILER_PUSH_CPU_MARKER("- Draw Call Generation", 0xFF, 0xFF, 0xFF);
    PrepareDrawCalls(camnode);
    PROFILER_POP_CPU_MARKER();
    // Shadows
    {
        // To avoid wrong culling, use the largest view possible
        m_scene_manager->setActiveCamera(m_suncam);
        if (CVS->isDefferedEnabled() &&
            CVS->isShadowEnabled() && hasShadow)
        {
            PROFILER_PUSH_CPU_MARKER("- Shadow", 0x30, 0x6F, 0x90);
            renderShadows();
            PROFILER_POP_CPU_MARKER();
            if (CVS->isGlobalIlluminationEnabled())
            {
                PROFILER_PUSH_CPU_MARKER("- RSM", 0xFF, 0x0, 0xFF);
                renderRSM();
                PROFILER_POP_CPU_MARKER();
            }
        }
        m_scene_manager->setActiveCamera(camnode);

    }

    PROFILER_PUSH_CPU_MARKER("- Solid Pass 1", 0xFF, 0x00, 0x00);
    glDepthMask(GL_TRUE);
    glDepthFunc(GL_LEQUAL);
    glEnable(GL_DEPTH_TEST);
    glDisable(GL_BLEND);
    glEnable(GL_CULL_FACE);
    if (CVS->isDefferedEnabled() || forceRTT)
    {
        m_rtts->getFBO(FBO_NORMAL_AND_DEPTHS).Bind();
        glClearColor(0., 0., 0., 0.);
        glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT);
        renderSolidFirstPass();
    }
    else
    {
        // We need a cleared depth buffer for some effect (eg particles depth blending)
        if (GraphicsRestrictions::isDisabled(GraphicsRestrictions::GR_FRAMEBUFFER_SRGB_WORKING))
            glDisable(GL_FRAMEBUFFER_SRGB);
        m_rtts->getFBO(FBO_NORMAL_AND_DEPTHS).Bind();
        // Bind() modifies the viewport. In order not to affect anything else,
        // the viewport is just reset here and not removed in Bind().
        const core::recti &vp = Camera::getActiveCamera()->getViewport();
        glViewport(vp.UpperLeftCorner.X,
                   irr_driver->getActualScreenSize().Height - vp.LowerRightCorner.Y,
                   vp.LowerRightCorner.X - vp.UpperLeftCorner.X,
                   vp.LowerRightCorner.Y - vp.UpperLeftCorner.Y);
        glClear(GL_DEPTH_BUFFER_BIT);
        if (GraphicsRestrictions::isDisabled(GraphicsRestrictions::GR_FRAMEBUFFER_SRGB_WORKING))
            glEnable(GL_FRAMEBUFFER_SRGB);
        glBindFramebuffer(GL_FRAMEBUFFER, 0);
    }
    PROFILER_POP_CPU_MARKER();



    // Lights
    {
        PROFILER_PUSH_CPU_MARKER("- Light", 0x00, 0xFF, 0x00);
        if (CVS->isDefferedEnabled())
            renderLights(pointlightcount, hasShadow);
        PROFILER_POP_CPU_MARKER();
    }

    // Handle SSAO
    {
        PROFILER_PUSH_CPU_MARKER("- SSAO", 0xFF, 0xFF, 0x00);
        ScopedGPUTimer Timer(getGPUTimer(Q_SSAO));
        if (UserConfigParams::m_ssao)
            renderSSAO();
        PROFILER_POP_CPU_MARKER();
    }

    PROFILER_PUSH_CPU_MARKER("- Solid Pass 2", 0x00, 0x00, 0xFF);
    if (CVS->isDefferedEnabled() || forceRTT)
    {
        m_rtts->getFBO(FBO_COLORS).Bind();
        SColor clearColor(0, 150, 150, 150);
        if (World::getWorld() != NULL)
            clearColor = World::getWorld()->getClearColor();

        glClearColor(clearColor.getRed() / 255.f, clearColor.getGreen() / 255.f,
            clearColor.getBlue() / 255.f, clearColor.getAlpha() / 255.f);
        glClear(GL_COLOR_BUFFER_BIT);
        glDepthMask(GL_FALSE);
    }
    renderSolidSecondPass();
    PROFILER_POP_CPU_MARKER();

    if (getNormals())
    {
        m_rtts->getFBO(FBO_NORMAL_AND_DEPTHS).Bind();
        renderNormalsVisualisation();
        m_rtts->getFBO(FBO_COLORS).Bind();
    }

    // Render ambient scattering
    if (CVS->isDefferedEnabled() && World::getWorld() != NULL &&
        World::getWorld()->isFogEnabled())
    {
        PROFILER_PUSH_CPU_MARKER("- Ambient scatter", 0xFF, 0x00, 0x00);
        ScopedGPUTimer Timer(getGPUTimer(Q_FOG));
        renderAmbientScatter();
        PROFILER_POP_CPU_MARKER();
    }

    {
        PROFILER_PUSH_CPU_MARKER("- Skybox", 0xFF, 0x00, 0xFF);
        ScopedGPUTimer Timer(getGPUTimer(Q_SKYBOX));
        renderSkybox(camnode);
        PROFILER_POP_CPU_MARKER();
    }

    // Render discrete lights scattering
    if (CVS->isDefferedEnabled() && World::getWorld() != NULL &&
        World::getWorld()->isFogEnabled())
    {
        PROFILER_PUSH_CPU_MARKER("- PointLight Scatter", 0xFF, 0x00, 0x00);
        ScopedGPUTimer Timer(getGPUTimer(Q_FOG));
        renderLightsScatter(pointlightcount);
        PROFILER_POP_CPU_MARKER();
    }

    if (getRH())
    {
        glDisable(GL_BLEND);
        m_rtts->getFBO(FBO_COLORS).Bind();
        m_post_processing->renderRHDebug(m_rtts->getRH().getRTT()[0], m_rtts->getRH().getRTT()[1], m_rtts->getRH().getRTT()[2], rh_matrix, rh_extend);
    }

    if (getGI())
    {
        glDisable(GL_BLEND);
        m_rtts->getFBO(FBO_COLORS).Bind();
        m_post_processing->renderGI(rh_matrix, rh_extend, m_rtts->getRH().getRTT()[0], m_rtts->getRH().getRTT()[1], m_rtts->getRH().getRTT()[2]);
    }

    PROFILER_PUSH_CPU_MARKER("- Glow", 0xFF, 0xFF, 0x00);
    // Render anything glowing.
    if (!m_mipviz && !m_wireframe && UserConfigParams::m_glow)
    {
        ScopedGPUTimer Timer(getGPUTimer(Q_GLOW));
        irr_driver->setPhase(GLOW_PASS);
        renderGlow(glows);
    } // end glow
    PROFILER_POP_CPU_MARKER();

    PROFILER_PUSH_CPU_MARKER("- Lensflare/godray", 0x00, 0xFF, 0xFF);
    computeSunVisibility();
    PROFILER_POP_CPU_MARKER();

    // Render transparent
    {
        PROFILER_PUSH_CPU_MARKER("- Transparent Pass", 0xFF, 0x00, 0x00);
        ScopedGPUTimer Timer(getGPUTimer(Q_TRANSPARENT));
        renderTransparent();
        PROFILER_POP_CPU_MARKER();
    }

    m_sync = glFenceSync(GL_SYNC_GPU_COMMANDS_COMPLETE, 0);

    // Render particles
    {
        PROFILER_PUSH_CPU_MARKER("- Particles", 0xFF, 0xFF, 0x00);
        ScopedGPUTimer Timer(getGPUTimer(Q_PARTICLES));
        renderParticles();
        PROFILER_POP_CPU_MARKER();
    }
    if (!CVS->isDefferedEnabled() && !forceRTT)
    {
        glDisable(GL_FRAMEBUFFER_SRGB);
        glDisable(GL_DEPTH_TEST);
        glDepthMask(GL_FALSE);
        return;
    }

    // Ensure that no object will be drawn after that by using invalid pass
    irr_driver->setPhase(PASS_COUNT);
}
Exemplo n.º 16
0
void Framebuffer::clearColor(const glm::vec4 & color)
{
    clearColor(color.r, color.g, color.b, color.a);
}
Exemplo n.º 17
0
int main( int argc, char **argv )
{
    // use an ArgumentParser object to manage the program arguments.
    osg::ArgumentParser arguments(&argc,argv);
    
    // set up the usage document, in case we need to print out how to use this program.
    arguments.getApplicationUsage()->setApplicationName(arguments.getApplicationName());
    arguments.getApplicationUsage()->setDescription(arguments.getApplicationName()+" is the application for presenting 3D interactive slide shows.");
    arguments.getApplicationUsage()->setCommandLineUsage(arguments.getApplicationName()+" [options] filename ...");
    arguments.getApplicationUsage()->addCommandLineOption("-h or --help","Display this information");
    arguments.getApplicationUsage()->addCommandLineOption("-a","Turn auto stepping on by default");
    arguments.getApplicationUsage()->addCommandLineOption("-d <float>","Time duration in seconds between layers/slides");
    arguments.getApplicationUsage()->addCommandLineOption("-s <float> <float> <float>","width, height, distance and of the screen away from the viewer");
    arguments.getApplicationUsage()->addCommandLineOption("--viewer","Start Present3D as the viewer version.");
    arguments.getApplicationUsage()->addCommandLineOption("--authoring","Start Present3D as the authoring version, license required.");
    arguments.getApplicationUsage()->addCommandLineOption("--master","Start Present3D as the master version, license required.");
    arguments.getApplicationUsage()->addCommandLineOption("--slave","Start Present3D as the slave version, license required.");
    arguments.getApplicationUsage()->addCommandLineOption("--publishing","Start Present3D as the publishing version, license required.");
    arguments.getApplicationUsage()->addCommandLineOption("--timeDelayOnNewSlideWithMovies","Set the time delay on new slide with movies, done to allow movie threads to get in sync with rendering thread.");
    arguments.getApplicationUsage()->addCommandLineOption("--targetFrameRate","Set the target frame rate, defaults to 80Hz.");
    arguments.getApplicationUsage()->addCommandLineOption("--version","Report the Present3D version.");
    arguments.getApplicationUsage()->addCommandLineOption("--print <filename>","Print out slides to a series of image files.");
    arguments.getApplicationUsage()->addCommandLineOption("--html <filename>","Print out slides to a series of html & image files.");
    arguments.getApplicationUsage()->addCommandLineOption("--loop","Switch on looping of presentation.");
    arguments.getApplicationUsage()->addCommandLineOption("--devices","Print the Video input capability via QuickTime and exit.");

    // add alias from xml to p3d to provide backwards compatibility for old p3d files.
    osgDB::Registry::instance()->addFileExtensionAlias("xml","p3d");

    // if user requests devices video capability.
    if (arguments.read("-devices") || arguments.read("--devices"))
    {
        // Force load QuickTime plugin, probe video capability, exit
        osgDB::readImageFile("devices.live");
        return 1;
    }


    // read any env vars from presentations before we create viewer to make sure the viewer
    // utilises these env vars
    if (p3d::readEnvVars(arguments))
    {
        osg::DisplaySettings::instance()->readEnvironmentalVariables();
    }

    // set up any logins required for http access
    std::string url, username, password;
    while(arguments.read("--login",url, username, password))
    {
        if (!osgDB::Registry::instance()->getAuthenticationMap())
        {
            osgDB::Registry::instance()->setAuthenticationMap(new osgDB::AuthenticationMap);
            osgDB::Registry::instance()->getAuthenticationMap()->addAuthenticationDetails(
                url,
                new osgDB::AuthenticationDetails(username, password)
            );
        }
    }



#ifdef USE_SDL
    SDLIntegration sdlIntegration;
    
    osg::notify(osg::INFO)<<"USE_SDL"<<std::endl;
#endif    
    
    bool doSetViewer = true;
    std::string configurationFile;

    // check env vars for configuration file
    const char* str = getenv("PRESENT3D_CONFIG_FILE");
    if (!str) str = getenv("OSG_CONFIG_FILE");
    if (str) configurationFile = str;

    // check command line parameters for configuration file.
    while (arguments.read("-c",configurationFile)) {}

    osg::Vec4 clearColor(0.0f,0.0f,0.0f,0.0f);
    
    while (arguments.read("--clear-color",clearColor[0],clearColor[1],clearColor[2],clearColor[3])) {}

    std::string filename;
    if (arguments.read("--spell-check",filename))
    {
        p3d::SpellChecker spellChecker;
        spellChecker.checkP3dXml(filename);
        return 1;
    }

    if (arguments.read("--strip-text",filename))
    {
        p3d::XmlPatcher patcher;
        // patcher.stripP3dXml(filename, osg::notify(osg::NOTICE));

        osg::ref_ptr<osgDB::XmlNode> newNode = patcher.simplifyP3dXml(filename);
        if (newNode.valid())
        {
            newNode->write(std::cout);
        }
        return 1;
    }

    std::string lhs_filename, rhs_filename;
    if (arguments.read("--merge",lhs_filename, rhs_filename))
    {
        p3d::XmlPatcher patcher;
        osg::ref_ptr<osgDB::XmlNode> newNode = patcher.mergeP3dXml(lhs_filename, rhs_filename);
        if (newNode.valid())
        {
            newNode->write(std::cout);
        }
        return 1;
    }


    // construct the viewer.
    osgViewer::Viewer viewer(arguments);
    
    // set clear colour to black by default.
    viewer.getCamera()->setClearColor(clearColor);

    if (!configurationFile.empty())
    {
        viewer.readConfiguration(configurationFile);
        doSetViewer = false;
    }

    const char* p3dDevice = getenv("P3D_DEVICE");
    if (p3dDevice)
    {
        osgDB::StringList devices;
        osgDB::split(p3dDevice, devices);
        for(osgDB::StringList::iterator i = devices.begin(); i != devices.end(); ++i)
        {
            addDeviceTo(viewer, *i);
        }
    }


    std::string device;
    while (arguments.read("--device", device))
    {
        addDeviceTo(viewer, device);
        
    }

    if (arguments.read("--http-control"))
    {
    
        std::string server_address = "localhost";
        std::string server_port = "8080";
        std::string document_root = "htdocs";

        while (arguments.read("--http-server-address", server_address)) {}
        while (arguments.read("--http-server-port", server_port)) {}
        while (arguments.read("--http-document-root", document_root)) {}

        osg::ref_ptr<osgDB::Options> device_options = new osgDB::Options("documentRegisteredHandlers");

        osg::ref_ptr<osgGA::Device> rest_http_device = osgDB::readFile<osgGA::Device>(server_address+":"+server_port+"/"+document_root+".resthttp", device_options.get());
        if (rest_http_device.valid())
        {
            viewer.addDevice(rest_http_device.get());
        }
    }
    
    // set up stereo masks
    viewer.getCamera()->setCullMask(0xffffffff);
    viewer.getCamera()->setCullMaskLeft(0x00000001);
    viewer.getCamera()->setCullMaskRight(0x00000002);   

    // set up the camera manipulators.
    {
        osg::ref_ptr<osgGA::KeySwitchMatrixManipulator> keyswitchManipulator = new osgGA::KeySwitchMatrixManipulator;

        keyswitchManipulator->addMatrixManipulator( '1', "Trackball", new osgGA::TrackballManipulator() );
        keyswitchManipulator->addMatrixManipulator( '2', "Flight", new osgGA::FlightManipulator() );
        keyswitchManipulator->addMatrixManipulator( '3', "Drive", new osgGA::DriveManipulator() );
        keyswitchManipulator->addMatrixManipulator( '4', "Terrain", new osgGA::TerrainManipulator() );

        std::string pathfile;
        char keyForAnimationPath = '5';
        while (arguments.read("-p",pathfile))
        {
            osgGA::AnimationPathManipulator* apm = new osgGA::AnimationPathManipulator(pathfile);
            if (apm || !apm->valid()) 
            {
                unsigned int num = keyswitchManipulator->getNumMatrixManipulators();
                keyswitchManipulator->addMatrixManipulator( keyForAnimationPath, "Path", apm );
                keyswitchManipulator->selectMatrixManipulator(num);
                ++keyForAnimationPath;
            }
        }

        viewer.setCameraManipulator( keyswitchManipulator.get() );
    }

    // add the state manipulator
    osg::ref_ptr<osgGA::StateSetManipulator> ssManipulator = new osgGA::StateSetManipulator(viewer.getCamera()->getOrCreateStateSet());
    ssManipulator->setKeyEventToggleTexturing('e');
    viewer.addEventHandler( ssManipulator.get() );

    // add the state manipulator
    viewer.addEventHandler( new osgViewer::StatsHandler() );

    viewer.addEventHandler( new osgViewer::WindowSizeHandler() );

    // neeed to address.
    // viewer.getScene()->getUpdateVisitor()->setTraversalMode(osg::NodeVisitor::TRAVERSE_ACTIVE_CHILDREN);


    const char* p3dCursor = getenv("P3D_CURSOR");
    std::string cursorFileName( p3dCursor ? p3dCursor : "");
    while (arguments.read("--cursor",cursorFileName)) {}


    while (arguments.read("--set-viewer")) { doSetViewer = true; }
    
    while (arguments.read("--no-set-viewer")) { doSetViewer = false; }
    

    // cluster related entries.
    int socketNumber=8100;
    while (arguments.read("-n",socketNumber)) {}

    float camera_fov=-1.0f;
    while (arguments.read("-f",camera_fov)) {}

    float camera_offset=45.0f;
    while (arguments.read("-o",camera_offset)) {}


    std::string exportName;
    while (arguments.read("--print",exportName)) {}

    while (arguments.read("--html",exportName)) {}

    // read any time delay argument.
    float timeDelayBetweenSlides = 1.0f;
    while (arguments.read("-d",timeDelayBetweenSlides)) {}

    bool autoSteppingActive = false;
    while (arguments.read("-a")) autoSteppingActive = true;

    bool loopPresentation = false;
    while (arguments.read("--loop")) loopPresentation = true;

    {
        // set update hte default traversal mode settings for update visitor
        // default to osg::NodeVisitor::TRAVERSE_ACTIVE_CHILDREN.
        osg::NodeVisitor::TraversalMode updateTraversalMode = osg::NodeVisitor::TRAVERSE_ACTIVE_CHILDREN; // viewer.getUpdateVisitor()->getTraversalMode();

        const char* p3dUpdateStr = getenv("P3D_UPDATE");
        if (p3dUpdateStr)
        {
            std::string updateStr(p3dUpdateStr);
            if (updateStr=="active" || updateStr=="Active" || updateStr=="ACTIVE") updateTraversalMode = osg::NodeVisitor::TRAVERSE_ACTIVE_CHILDREN;
            else if (updateStr=="all" || updateStr=="All" || updateStr=="ALL") updateTraversalMode = osg::NodeVisitor::TRAVERSE_ALL_CHILDREN;
        }

        while(arguments.read("--update-active")) updateTraversalMode = osg::NodeVisitor::TRAVERSE_ACTIVE_CHILDREN;
        while(arguments.read("--update-all")) updateTraversalMode = osg::NodeVisitor::TRAVERSE_ALL_CHILDREN;

        viewer.getUpdateVisitor()->setTraversalMode(updateTraversalMode);
    }

    
    // register the slide event handler - which moves the presentation from slide to slide, layer to layer.
    osg::ref_ptr<osgPresentation::SlideEventHandler> seh = new osgPresentation::SlideEventHandler(&viewer);
    viewer.addEventHandler(seh.get());

    seh->setAutoSteppingActive(autoSteppingActive);
    seh->setTimeDelayBetweenSlides(timeDelayBetweenSlides);
    seh->setLoopPresentation(loopPresentation);

    double targetFrameRate = 80.0;
    while (arguments.read("--targetFrameRate",targetFrameRate)) {}


    // set the time delay
    float timeDelayOnNewSlideWithMovies = 0.4f;
    while (arguments.read("--timeDelayOnNewSlideWithMovies",timeDelayOnNewSlideWithMovies)) {}
    seh->setTimeDelayOnNewSlideWithMovies(timeDelayOnNewSlideWithMovies);

    // set up optimizer options
    unsigned int optimizer_options = osgUtil::Optimizer::DEFAULT_OPTIMIZATIONS;
    bool relase_and_compile = false;
    while (arguments.read("--release-and-compile")) 
    {
        relase_and_compile = true;
    }
    seh->setReleaseAndCompileOnEachNewSlide(relase_and_compile);
    if (relase_and_compile)
    {
        // make sure that imagery stays around after being applied to textures.
        viewer.getDatabasePager()->setUnrefImageDataAfterApplyPolicy(true,false);
        optimizer_options &= ~osgUtil::Optimizer::OPTIMIZE_TEXTURE_SETTINGS;
    }
// 
//     osgDB::Registry::instance()->getOrCreateDatabasePager()->setUnrefImageDataAfterApplyPolicy(true,false);
//     optimizer_options &= ~osgUtil::Optimizer::OPTIMIZE_TEXTURE_SETTINGS;
//     osg::Texture::getTextureObjectManager()->setExpiryDelay(0.0f);
//     osgDB::Registry::instance()->getOrCreateDatabasePager()->setExpiryDelay(1.0f);

    // register the handler for modifying the point size
    osg::ref_ptr<PointsEventHandler> peh = new PointsEventHandler;
    viewer.addEventHandler(peh.get());

    // add the screen capture handler
    std::string screenCaptureFilename = "screen_shot.jpg";
    while(arguments.read("--screenshot", screenCaptureFilename)) {}
    osg::ref_ptr<osgViewer::ScreenCaptureHandler::WriteToFile> writeFile = new osgViewer::ScreenCaptureHandler::WriteToFile(
        osgDB::getNameLessExtension(screenCaptureFilename),
        osgDB::getFileExtension(screenCaptureFilename) );
    osg::ref_ptr<osgViewer::ScreenCaptureHandler> screenCaptureHandler = new osgViewer::ScreenCaptureHandler(writeFile.get());
    screenCaptureHandler->setKeyEventTakeScreenShot('m');//osgGA::GUIEventAdapter::KEY_Print);
    screenCaptureHandler->setKeyEventToggleContinuousCapture('M');
    viewer.addEventHandler(screenCaptureHandler.get());

    // osg::DisplaySettings::instance()->setSplitStereoAutoAjustAspectRatio(false);

    float width = osg::DisplaySettings::instance()->getScreenWidth();
    float height = osg::DisplaySettings::instance()->getScreenHeight();
    float distance = osg::DisplaySettings::instance()->getScreenDistance();
    while (arguments.read("-s", width, height, distance)) 
    {
        osg::DisplaySettings::instance()->setScreenDistance(distance);
        osg::DisplaySettings::instance()->setScreenHeight(height);
        osg::DisplaySettings::instance()->setScreenWidth(width);
    }

    std::string outputFileName;
    while(arguments.read("--output",outputFileName)) {}


    // get details on keyboard and mouse bindings used by the viewer.
    viewer.getUsage(*arguments.getApplicationUsage());

    // if user request help write it out to cout.
    if (arguments.read("-h") || arguments.read("--help"))
    {
        arguments.getApplicationUsage()->write(osg::notify(osg::NOTICE));
        return 1;
    }

    P3DApplicationType P3DApplicationType = VIEWER;

    str = getenv("PRESENT3D_TYPE");
    if (str)
    {
        if (strcmp(str,"viewer")==0) P3DApplicationType = VIEWER;
        else if (strcmp(str,"master")==0) P3DApplicationType = MASTER;
        else if (strcmp(str,"slave")==0) P3DApplicationType = SLAVE;
    }
        
    while (arguments.read("--viewer")) { P3DApplicationType = VIEWER; }
    while (arguments.read("--master")) { P3DApplicationType = MASTER; }
    while (arguments.read("--slave")) { P3DApplicationType = SLAVE; }
    
    while (arguments.read("--version"))
    {
        std::string appTypeName = "invalid";
        switch(P3DApplicationType)
        {
            case(VIEWER): appTypeName = "viewer"; break;
            case(MASTER): appTypeName = "master"; break;
            case(SLAVE): appTypeName = "slave"; break;
        }

        osg::notify(osg::NOTICE)<<std::endl;
        osg::notify(osg::NOTICE)<<"Present3D "<<appTypeName<<" version : "<<s_version<<std::endl;
        osg::notify(osg::NOTICE)<<std::endl;

        return 0;
    }

    // any option left unread are converted into errors to write out later.
    //arguments.reportRemainingOptionsAsUnrecognized();

    // report any errors if they have ocured when parsing the program aguments.
    if (arguments.errors())
    {
        arguments.writeErrorMessages(osg::notify(osg::INFO));
        return 1;
    }


    // read files name from arguments.
    p3d::FileNameList xmlFiles, normalFiles;
    if (!p3d::getFileNames(arguments, xmlFiles, normalFiles))
    {
        osg::notify(osg::NOTICE)<<std::endl;
        osg::notify(osg::NOTICE)<<"No file specified, please specify and file to load."<<std::endl;
        osg::notify(osg::NOTICE)<<std::endl;
        return 1;
    }



    bool viewerInitialized = false;
    if (!xmlFiles.empty())
    {
        osg::ref_ptr<osg::Node> holdingModel = p3d::readHoldingSlide(xmlFiles.front());

        if (holdingModel.valid())
        {
            viewer.setSceneData(holdingModel.get());

            seh->selectSlide(0);

            if (!viewerInitialized)
            {
                // pass the global stateset to the point event handler so that it can
                // alter the point size of all points in the scene.
                peh->setStateSet(viewer.getCamera()->getOrCreateStateSet());

                // create the windows and run the threads.
                viewer.realize();

                if (doSetViewer) setViewer(viewer, width, height, distance);

                viewerInitialized = true;
            }

            seh->home();

            // render a frame
            viewer.frame();
        }
    }

    osg::Timer timer;
    osg::Timer_t start_tick = timer.tick();


    osg::ref_ptr<osgDB::ReaderWriter::Options> cacheAllOption = new osgDB::ReaderWriter::Options;
    cacheAllOption->setObjectCacheHint(osgDB::ReaderWriter::Options::CACHE_ALL);
    osgDB::Registry::instance()->setOptions(cacheAllOption.get());

    // read the scene from the list of file specified commandline args.
    osg::ref_ptr<osg::Node> loadedModel = p3d::readShowFiles(arguments,cacheAllOption.get()); // osgDB::readNodeFiles(arguments, cacheAllOption.get());


    osgDB::Registry::instance()->setOptions( 0 );


    // if no model has been successfully loaded report failure.
    if (!loadedModel) 
    {
        osg::notify(osg::INFO) << arguments.getApplicationName() <<": No data loaded" << std::endl;
        return 1;
    }

    osg::Timer_t end_tick = timer.tick();

    osg::notify(osg::INFO) << "Time to load = "<<timer.delta_s(start_tick,end_tick)<<std::endl;


    if (loadedModel->getNumDescriptions()>0)
    {
        for(unsigned int i=0; i<loadedModel->getNumDescriptions(); ++i)
        {
            const std::string& desc = loadedModel->getDescription(i);
            if (desc=="loop") 
            {
                osg::notify(osg::NOTICE)<<"Enabling looping"<<std::endl;
                seh->setLoopPresentation(true);
            }
            else if (desc=="auto") 
            {
                osg::notify(osg::NOTICE)<<"Enabling auto run"<<std::endl;
                seh->setAutoSteppingActive(true);
            }
        }
    }


    processLoadedModel(loadedModel, optimizer_options, cursorFileName);

    // set the scene to render
    viewer.setSceneData(loadedModel.get());

    if (!viewerInitialized)
    {
        // pass the global stateset to the point event handler so that it can
        // alter the point size of all points in the scene.
        peh->setStateSet(viewer.getCamera()->getOrCreateStateSet());

        // create the windows and run the threads.
        viewer.realize();

        if (doSetViewer) setViewer(viewer, width, height, distance);

        viewerInitialized = true;
    }


    

    // pass the model to the slide event handler so it knows which to manipulate.
    seh->set(loadedModel.get());
    seh->selectSlide(0);

    seh->home();

    if (!outputFileName.empty())
    {
        osgDB::writeNodeFile(*loadedModel,outputFileName);
        return 0;
    }
    

    if (!cursorFileName.empty())
    {
        // have to add a frame in here to avoid problems with X11 threading issue on switching off the cursor
        // not yet sure why it makes a difference, but it at least fixes the crash that would otherwise occur
        // under X11.
        viewer.frame();

        // switch off the cursor
        osgViewer::Viewer::Windows windows;
        viewer.getWindows(windows);
        for(osgViewer::Viewer::Windows::iterator itr = windows.begin();
            itr != windows.end();
            ++itr)
        {
            (*itr)->useCursor(false);
        }
    }

    osg::Timer_t startOfFrameTick = osg::Timer::instance()->tick();
    double targetFrameTime = 1.0/targetFrameRate;
    
    if (exportName.empty())
    {
        // objects for managing the broadcasting and recieving of camera packets.
        CameraPacket cp;
        Broadcaster  bc;
        Receiver     rc;
        bc.setPort(static_cast<short int>(socketNumber));
        rc.setPort(static_cast<short int>(socketNumber));

        bool masterKilled = false;
        DataConverter scratchPad(1024);

        while( !viewer.done() && !masterKilled)
        {
            // wait for all cull and draw threads to complete.
            viewer.advance();

            osg::Timer_t currentTick = osg::Timer::instance()->tick();
            double deltaTime = osg::Timer::instance()->delta_s(startOfFrameTick, currentTick);


            if (deltaTime<targetFrameTime)
            {
                OpenThreads::Thread::microSleep(static_cast<unsigned int>((targetFrameTime-deltaTime)*1000000.0));
            }

            startOfFrameTick =  osg::Timer::instance()->tick();

#if 0            
            if (kmcb)
            {
                double time = kmcb->getTime();
                viewer.getFrameStamp()->setReferenceTime(time);
            }
#endif

#ifdef USE_SDL
            sdlIntegration.update(viewer);
#endif

            if (P3DApplicationType==MASTER)
            {
                // take camera zero as the guide.
                osg::Matrix modelview(viewer.getCamera()->getViewMatrix());
                
                cp.setPacket(modelview,viewer.getFrameStamp());
                
                // cp.readEventQueue(viewer);

                scratchPad.reset();
                scratchPad.write(cp);

                scratchPad.reset();
                scratchPad.read(cp);

                bc.setBuffer(scratchPad.startPtr(), scratchPad.numBytes());
                
                std::cout << "bc.sync()"<<scratchPad.numBytes()<<std::endl;

                bc.sync();
            }
            else if (P3DApplicationType==SLAVE)
            {
                rc.setBuffer(scratchPad.startPtr(), scratchPad.numBytes());

                rc.sync();
                
                scratchPad.reset();
                scratchPad.read(cp);
    
                // cp.writeEventQueue(viewer);

                if (cp.getMasterKilled()) 
                {
                    std::cout << "Received master killed."<<std::endl;
                    // break out of while (!done) loop since we've now want to shut down.
                    masterKilled = true;
                }
            }

            // update the scene by traversing it with the the update visitor which will
            // call all node update callbacks and animations.
            viewer.eventTraversal();

            if (seh->getRequestReload())
            {
                OSG_INFO<<"Reload requested"<<std::endl;
                seh->setRequestReload(false);
                int previous_ActiveSlide = seh->getActiveSlide();
                int previous_ActiveLayer = seh->getActiveLayer();

                // reset time so any event key generate

                loadedModel = p3d::readShowFiles(arguments,cacheAllOption.get());
                processLoadedModel(loadedModel, optimizer_options, cursorFileName);

                if (!loadedModel)
                {
                    return 0;
                }

                viewer.setSceneData(loadedModel.get());
                seh->set(loadedModel.get());
                seh->selectSlide(previous_ActiveSlide, previous_ActiveLayer);

                continue;
                
            }

            // update the scene by traversing it with the the update visitor which will
            // call all node update callbacks and animations.
            viewer.updateTraversal();

            if (P3DApplicationType==SLAVE)
            {
                osg::Matrix modelview;
                cp.getModelView(modelview,camera_offset);

                viewer.getCamera()->setViewMatrix(modelview);
            }

            // fire off the cull and draw traversals of the scene.
            if(!masterKilled)
                viewer.renderingTraversals();
        }
    }
    else
    {
        ExportHTML::write(seh.get(), viewer, exportName);
    }
    
    return 0;
}
Exemplo n.º 18
0
int main(int argc, char* argv[])
{
    if (SDL_Init(SDL_INIT_VIDEO | SDL_INIT_JOYSTICK) < 0)
        fprintf(stderr, "Couldn't init SDL!\n");

    atexit(SDL_Quit);

    // Get the current desktop width & height
    const SDL_VideoInfo* videoInfo = SDL_GetVideoInfo();

    // TODO: get this from config file.
    s_config = new Config(false, false, 768/2, 1024/2);
    Config& config = *s_config;
    config.title = "glyphblaster test";

    // msaa
    if (config.msaa)
    {
        SDL_GL_SetAttribute(SDL_GL_MULTISAMPLEBUFFERS, 1);
        SDL_GL_SetAttribute(SDL_GL_MULTISAMPLESAMPLES, config.msaaSamples);
    }

    SDL_Surface* screen;
    if (config.fullscreen)
    {
        int width = videoInfo->current_w;
        int height = videoInfo->current_h;
        int bpp = videoInfo->vfmt->BitsPerPixel;
        screen = SDL_SetVideoMode(width, height, bpp,
                                  SDL_HWSURFACE | SDL_OPENGL | SDL_FULLSCREEN);
    }
    else
    {
        screen = SDL_SetVideoMode(config.width, config.height, 32, SDL_HWSURFACE | SDL_OPENGL);
    }

    SDL_WM_SetCaption(config.title.c_str(), config.title.c_str());

    if (!screen)
        fprintf(stderr, "Couldn't create SDL screen!\n");

    // clear to white
    Vector4f clearColor(0, 0, 0, 1);
    glClearColor(clearColor.x, clearColor.y, clearColor.z, clearColor.w);
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
    SDL_GL_SwapBuffers();

    // create the context
    GB_ERROR err;
    GB_Context* gb;
    err = GB_ContextMake(512, 3, GB_TEXTURE_FORMAT_ALPHA, &gb);
    if (err != GB_ERROR_NONE) {
        fprintf(stderr, "GB_Init Error %d\n", err);
        exit(1);
    }

    // load lorem.txt
    int fd = open("utf8-test.txt", O_RDONLY);
    if (fd < 0) {
        fprintf(stderr, "open failed\n");
        exit(1);
    }
    struct stat s;
    if (fstat(fd, &s) < 0) {
        fprintf(stderr, "fstat failed\n errno = %d\n", errno);
        exit(1);
    }
    const uint8_t* lorem = (uint8_t*)mmap(0, s.st_size + 1, PROT_READ, MAP_PRIVATE, fd, 0);
    if (lorem < 0) {
        fprintf(stderr, "mmap failed\n errno = %d\n", errno);
        exit(1);
    }
    // we are lazy, don't unmap the file.

    // create a font
    GB_Font* mainFont = NULL;
    //err = GB_FontMake(gb, "Droid-Sans/DroidSans.ttf", 20, GB_RENDER_NORMAL, GB_HINT_FORCE_AUTO, &mainFont);
    //err = GB_FontMake(gb, "Arial.ttf", 48, GB_RENDER_NORMAL, GB_HINT_DEFAULT, &mainFont);
    //err = GB_FontMake(gb, "Ayuthaya.ttf", 16, GB_RENDER_NORMAL, GB_HINT_DEFAULT, &mainFont);
    err = GB_FontMake(gb, "dejavu-fonts-ttf-2.33/ttf/DejaVuSans.ttf", 10, GB_RENDER_NORMAL, GB_HINT_DEFAULT, &mainFont);
    //err = GB_FontMake(gb, "Zar/XB Zar.ttf", 48, GB_RENDER_NORMAL, GB_HINT_DEFAULT, &mainFont);
    //err = GB_FontMake(gb, "Times New Roman.ttf", 20, GB_RENDER_NORMAL, GB_HINT_FORCE_AUTO, &mainFont);
    if (err != GB_ERROR_NONE) {
        fprintf(stderr, "GB_MakeFont Error %s\n", GB_ErrorToString(err));
        exit(1);
    }


    GB_Font* arabicFont = NULL;
    /*
    // create an arabic font
    err = GB_FontMake(gb, "Zar/XB Zar.ttf", 48, &arabicFont);
    if (err != GB_ERROR_NONE) {
    fprintf(stderr, "GB_MakeFont Error %s\n", GB_ErrorToString(err));
    exit(1);
    }
    */

    // create a text
    uint32_t origin[2] = {0, 0};
    uint32_t size[2] = {videoInfo->current_w - 1, videoInfo->current_h};
    GB_Text* helloText = NULL;
    uint32_t textColor = MakeColor(255, 255, 255, 255);
    uint32_t* userData = (uint32_t*)malloc(sizeof(uint32_t));
    *userData = textColor;
    err = GB_TextMake(gb, (uint8_t*)lorem, mainFont, userData, origin, size,
                      GB_HORIZONTAL_ALIGN_LEFT, GB_VERTICAL_ALIGN_CENTER, 0, &helloText);
    if (err != GB_ERROR_NONE) {
        fprintf(stderr, "GB_MakeText Error %s\n", GB_ErrorToString(err));
        exit(1);
    }

    //GB_TextRelease(gb, helloText);

    /*
    // أبجد hello
    const char abjad[] = {0xd8, 0xa3, 0xd8, 0xa8, 0xd8, 0xac, 0xd8, 0xaf, 0x00};
    char XXX[1024];
    sprintf(XXX, "%s hello", abjad);
    err = GB_TextMake(gb, XXX, mainFont, 0xffffffff, origin, size,
    GB_HORIZONTAL_ALIGN_CENTER, GB_VERTICAL_ALIGN_CENTER, &helloText);
    */

    int done = 0;
    while (!done)
    {
        SDL_Event event;
        while (SDL_PollEvent(&event))
        {
            switch (event.type)
            {
            case SDL_QUIT:
                done = 1;
                break;

            case SDL_MOUSEMOTION:
                if (event.motion.state & SDL_BUTTON(1)) {
                    // move touch
                }
                break;

            case SDL_MOUSEBUTTONDOWN:
                if (event.button.button == SDL_BUTTON_LEFT) {
                    // start touch
                }
                break;

            case SDL_MOUSEBUTTONUP:
                if (event.button.button == SDL_BUTTON_LEFT) {
                    // end touch
                }
                break;

            case SDL_JOYAXISMOTION:
                // stick move
                break;

            case SDL_JOYBUTTONDOWN:
            case SDL_JOYBUTTONUP:
                // joy pad press
                break;
            }
        }

        if (!done)
        {
            glClearColor(clearColor.x, clearColor.y, clearColor.z, clearColor.w);
            glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

            //DebugDrawGlyphCache(gb, config);

            RenderText(helloText->glyph_quads, helloText->num_glyph_quads);

            SDL_GL_SwapBuffers();
        }
    }

    err = GB_TextRelease(gb, helloText);
    if (err != GB_ERROR_NONE) {
        fprintf(stderr, "GB_ReleaseText Error %s\n", GB_ErrorToString(err));
        exit(1);
    }
    err = GB_FontRelease(gb, mainFont);
    if (err != GB_ERROR_NONE) {
        fprintf(stderr, "GB_ReleaseFont Error %s\n", GB_ErrorToString(err));
        exit(1);
    }
    if (arabicFont) {
        err = GB_FontRelease(gb, arabicFont);
        if (err != GB_ERROR_NONE) {
            fprintf(stderr, "GB_ReleaseFont Error %s\n", GB_ErrorToString(err));
            exit(1);
        }
    }
    err = GB_ContextRelease(gb);
    if (err != GB_ERROR_NONE) {
        fprintf(stderr, "GB_Shutdown Error %s\n", GB_ErrorToString(err));
        exit(1);
    }

    return 0;
}
Exemplo n.º 19
0
void Window::clearColor(glm::vec3 color) {
  clearColor(glm::vec4(color, 1.0f));
}
Exemplo n.º 20
0
//! Convenience, set glClearColor from a 4-element array.
inline void clearColor(GLclampf const color[4]) {
  clearColor(color[0], color[1], color[2], color[3]);
}
Exemplo n.º 21
0
void IrrDriver::renderScene(scene::ICameraSceneNode * const camnode, unsigned pointlightcount, std::vector<GlowData>& glows, float dt, bool hasShadow, bool forceRTT)
{
    glBindBufferBase(GL_UNIFORM_BUFFER, 0, SharedObject::ViewProjectionMatrixesUBO);
    m_scene_manager->setActiveCamera(camnode);

    PROFILER_PUSH_CPU_MARKER("- Draw Call Generation", 0xFF, 0xFF, 0xFF);
    PrepareDrawCalls(camnode);
    PROFILER_POP_CPU_MARKER();
    // Shadows
    {
        // To avoid wrong culling, use the largest view possible
        m_scene_manager->setActiveCamera(m_suncam);
        if (!m_mipviz && !m_wireframe && UserConfigParams::m_dynamic_lights &&
            UserConfigParams::m_shadows && !irr_driver->needUBOWorkaround() && hasShadow)
        {
            PROFILER_PUSH_CPU_MARKER("- Shadow", 0x30, 0x6F, 0x90);
            renderShadows();
            PROFILER_POP_CPU_MARKER();
            if (UserConfigParams::m_gi)
            {
                PROFILER_PUSH_CPU_MARKER("- RSM", 0xFF, 0x0, 0xFF);
                renderRSM();
                PROFILER_POP_CPU_MARKER();
            }
        }
        m_scene_manager->setActiveCamera(camnode);

    }

    PROFILER_PUSH_CPU_MARKER("- Solid Pass 1", 0xFF, 0x00, 0x00);
    glDepthMask(GL_TRUE);
    glDepthFunc(GL_LEQUAL);
    glEnable(GL_DEPTH_TEST);
    glDisable(GL_BLEND);
    glEnable(GL_CULL_FACE);
    if (UserConfigParams::m_dynamic_lights || forceRTT)
    {
        m_rtts->getFBO(FBO_NORMAL_AND_DEPTHS).Bind();
        glClearColor(0., 0., 0., 0.);
        glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT);
        renderSolidFirstPass();
    }
    PROFILER_POP_CPU_MARKER();



    // Lights
    {
        PROFILER_PUSH_CPU_MARKER("- Light", 0x00, 0xFF, 0x00);
        if (UserConfigParams::m_dynamic_lights)
            renderLights(pointlightcount, hasShadow);
        PROFILER_POP_CPU_MARKER();
    }

    // Handle SSAO
    {
        PROFILER_PUSH_CPU_MARKER("- SSAO", 0xFF, 0xFF, 0x00);
        ScopedGPUTimer Timer(getGPUTimer(Q_SSAO));
        if (UserConfigParams::m_ssao)
            renderSSAO();
        PROFILER_POP_CPU_MARKER();
    }

    PROFILER_PUSH_CPU_MARKER("- Solid Pass 2", 0x00, 0x00, 0xFF);
    if (UserConfigParams::m_dynamic_lights || forceRTT)
    {
        m_rtts->getFBO(FBO_COLORS).Bind();
        SColor clearColor(0, 150, 150, 150);
        if (World::getWorld() != NULL)
            clearColor = World::getWorld()->getClearColor();

        glClearColor(clearColor.getRed() / 255.f, clearColor.getGreen() / 255.f,
            clearColor.getBlue() / 255.f, clearColor.getAlpha() / 255.f);
        glClear(GL_COLOR_BUFFER_BIT);
        glDepthMask(GL_FALSE);
    }
    renderSolidSecondPass();
    PROFILER_POP_CPU_MARKER();

    if (getNormals())
    {
        m_rtts->getFBO(FBO_NORMAL_AND_DEPTHS).Bind();
        renderNormalsVisualisation();
        m_rtts->getFBO(FBO_COLORS).Bind();
    }

    if (UserConfigParams::m_dynamic_lights && World::getWorld() != NULL &&
        World::getWorld()->isFogEnabled())
    {
        PROFILER_PUSH_CPU_MARKER("- Fog", 0xFF, 0x00, 0x00);
        m_post_processing->renderFog();
        PROFILER_POP_CPU_MARKER();
    }

    PROFILER_PUSH_CPU_MARKER("- Skybox", 0xFF, 0x00, 0xFF);
    renderSkybox(camnode);
    PROFILER_POP_CPU_MARKER();

    if (getRH())
    {
        m_rtts->getFBO(FBO_COLORS).Bind();
        m_post_processing->renderRHDebug(m_rtts->getRH().getRTT()[0], m_rtts->getRH().getRTT()[1], m_rtts->getRH().getRTT()[2], rh_matrix, rh_extend);
    }

    if (getGI())
    {
        m_rtts->getFBO(FBO_COLORS).Bind();
        m_post_processing->renderGI(rh_matrix, rh_extend, m_rtts->getRH().getRTT()[0], m_rtts->getRH().getRTT()[1], m_rtts->getRH().getRTT()[2]);
    }

    PROFILER_PUSH_CPU_MARKER("- Glow", 0xFF, 0xFF, 0x00);
    // Render anything glowing.
    if (!m_mipviz && !m_wireframe && UserConfigParams::m_glow)
    {
        irr_driver->setPhase(GLOW_PASS);
        renderGlow(glows);
    } // end glow
    PROFILER_POP_CPU_MARKER();

    PROFILER_PUSH_CPU_MARKER("- Lensflare/godray", 0x00, 0xFF, 0xFF);
    computeSunVisibility();
    PROFILER_POP_CPU_MARKER();

    // Render transparent
    {
        PROFILER_PUSH_CPU_MARKER("- Transparent Pass", 0xFF, 0x00, 0x00);
        ScopedGPUTimer Timer(getGPUTimer(Q_TRANSPARENT));
        renderTransparent();
        PROFILER_POP_CPU_MARKER();
    }

    m_sync = glFenceSync(GL_SYNC_GPU_COMMANDS_COMPLETE, 0);

    // Render particles
    {
        PROFILER_PUSH_CPU_MARKER("- Particles", 0xFF, 0xFF, 0x00);
        ScopedGPUTimer Timer(getGPUTimer(Q_PARTICLES));
        renderParticles();
        PROFILER_POP_CPU_MARKER();
    }
    if (!UserConfigParams::m_dynamic_lights && !forceRTT)
    {
        glDisable(GL_FRAMEBUFFER_SRGB);
        glDisable(GL_DEPTH_TEST);
        glDepthMask(GL_FALSE);
        return;
    }

    // Ensure that no object will be drawn after that by using invalid pass
    irr_driver->setPhase(PASS_COUNT);
}
Exemplo n.º 22
0
int main(int argc, char *argv[])
{
	int ret = 0;
	try {
		// initialize directx 
		IDirect3D9 *d3d = Direct3DCreate9(D3D_SDK_VERSION);
		if (!d3d)
			throw std::string("This application requires DirectX 9");

		// create a window
		HWND hwnd = CreateWindowEx(0, "static", "GNU Rocket Example",
		    WS_POPUP | WS_VISIBLE, 0, 0, width, height, 0, 0,
		    GetModuleHandle(0), 0);

		// create the device
		IDirect3DDevice9 *device = NULL;
		static D3DPRESENT_PARAMETERS present_parameters = {width,
		    height, D3DFMT_X8R8G8B8, 3, D3DMULTISAMPLE_NONE, 0,
		    D3DSWAPEFFECT_DISCARD, 0, WINDOWED, 1, D3DFMT_D24S8,
		    0, WINDOWED ? 0 : D3DPRESENT_RATE_DEFAULT, 0
		};
		if (FAILED(d3d->CreateDevice(D3DADAPTER_DEFAULT,
		    D3DDEVTYPE_HAL, hwnd, D3DCREATE_HARDWARE_VERTEXPROCESSING,
		    &present_parameters, &device)))
			throw std::string("Failed to create device");

		// init BASS
		int soundDevice = 1;
		if (!BASS_Init(soundDevice, 44100, 0, hwnd, 0))
			throw std::string("Failed to init bass");

		// load tune
		HSTREAM stream = BASS_StreamCreateFile(false, "tune.ogg", 0, 0,
		    BASS_MP3_SETPOS | (!soundDevice ? BASS_STREAM_DECODE : 0));
		if (!stream)
			throw std::string("Failed to open tune");

		// let's just assume 150 BPM (this holds true for the included tune)
		float bpm = 150.0f;

		// setup timer and construct sync-device
		BassTimer timer(stream, bpm, 8);
		std::auto_ptr<sync::Device> syncDevice = std::auto_ptr<sync::Device>(
		    sync::createDevice("sync", timer));
		if (!syncDevice.get())
			throw std::string("Failed to connect to host?");

		// get tracks
		sync::Track &clearRTrack = syncDevice->getTrack("clear.r");
		sync::Track &clearGTrack = syncDevice->getTrack("clear.g");
		sync::Track &clearBTrack = syncDevice->getTrack("clear.b");

		sync::Track &camRotTrack  = syncDevice->getTrack("cam.rot");
		sync::Track &camDistTrack = syncDevice->getTrack("cam.dist");

		LPD3DXMESH cubeMesh = NULL;
		if (FAILED(D3DXCreateBox(device, 1.0f, 1.0f, 1.0f, &cubeMesh, NULL)))
			return -1;

		// let's roll!
		BASS_Start();
		timer.play();

		bool done = false;
		while (!done) {
			float row = float(timer.getRow());
			if (!syncDevice->update(row))
				done = true;

			// setup clear color
			D3DXCOLOR clearColor(clearRTrack.getValue(row), clearGTrack.getValue(row), clearBTrack.getValue(row), 0.0);

			// paint the window
			device->BeginScene();
			device->Clear(0, 0, D3DCLEAR_TARGET | D3DCLEAR_ZBUFFER | D3DCLEAR_STENCIL, clearColor, 1.0f, 0);

/*			D3DXMATRIX world;
			device->SetTransform(D3DTS_WORLD, &world); */

			float rot = camRotTrack.getValue(row);
			float dist = camDistTrack.getValue(row);
			D3DXVECTOR3 eye(sin(rot) * dist, 0, cos(rot) * dist);
			D3DXVECTOR3 at(0, 0, 0);
			D3DXVECTOR3 up(0, 1, 0);
			D3DXMATRIX view;
			D3DXMatrixLookAtLH(&view, &(eye + at), &at, &up);
			device->SetTransform(D3DTS_WORLD, &view);

			D3DXMATRIX proj;
			D3DXMatrixPerspectiveFovLH(&proj, D3DXToRadian(60), 4.0f / 3, 0.1f, 1000.f);
			device->SetTransform(D3DTS_PROJECTION, &proj);

			cubeMesh->DrawSubset(0);

			device->EndScene();
			device->Present(0, 0, 0, 0);

			BASS_Update(0); // decrease the chance of missing vsync
			MSG msg;
			while (PeekMessage(&msg, NULL, 0, 0, PM_REMOVE)) {
				TranslateMessage(&msg);
				DispatchMessage(&msg);

				if (WM_QUIT == msg.message) done = true;
				if ((WM_KEYDOWN == msg.message) && (VK_ESCAPE == LOWORD(msg.wParam))) done = true;
			}
		}

		BASS_StreamFree(stream);
		BASS_Free();

		device->Release();
		d3d->Release();
		DestroyWindow(hwnd);
	} catch (const std::exception &e) {
#ifdef _CONSOLE
		fprintf(stderr, "*** error: %s\n", e.what());
#else
		MessageBox(NULL, e.what(), NULL, MB_OK | MB_ICONERROR | MB_SETFOREGROUND);
#endif
		ret = -1;
	} catch (const std::string &str) {
#ifdef _CONSOLE
		fprintf(stderr, "*** error: %s\n", str.c_str());
#else
		MessageBox(NULL, e.what(), NULL, MB_OK | MB_ICONERROR | MB_SETFOREGROUND);
#endif
		ret = -1;
	}

	return ret;
}
Exemplo n.º 23
0
int CALLBACK WinMain(HINSTANCE hInstance, HINSTANCE hPrevInstance, LPSTR lpCmdLine, int nCmdShow)
{
	WNDCLASSEX wcex = { sizeof(WNDCLASSEX) };
	wcex.style = CS_HREDRAW | CS_VREDRAW;
	wcex.lpfnWndProc = WndProc;
	wcex.cbClsExtra = 0;
	wcex.cbWndExtra = sizeof(LONG_PTR);
	wcex.hInstance = hInstance;
	wcex.hbrBackground = NULL;
	wcex.lpszMenuName = NULL;
	wcex.hCursor = LoadCursor(NULL, IDI_APPLICATION);
	wcex.lpszClassName = L"DirectXTutorial";

	RegisterClassEx(&wcex);

	RECT rc = { 0, 0, windowWidth, windowHeight };
	AdjustWindowRect(&rc, WS_OVERLAPPEDWINDOW, FALSE);

	HWND hwnd = CreateWindow(
		L"DirectXTutorial", 
		L"DirectX Tutorial 06", 
		WS_OVERLAPPEDWINDOW, 
		CW_USEDEFAULT, 
		CW_USEDEFAULT, 
		rc.right - rc.left, 
		rc.bottom - rc.top, 
		NULL, 
		NULL, 
		hInstance, 
		NULL);

	ShowWindow(hwnd, SW_SHOWNORMAL);
	UpdateWindow(hwnd);

	HRESULT hr = S_OK;

	DXGI_SWAP_CHAIN_DESC swapChainDesc = {};
	swapChainDesc.BufferDesc.Width = windowWidth;
	swapChainDesc.BufferDesc.Height = windowHeight;
	swapChainDesc.BufferDesc.RefreshRate.Numerator = 0;
	swapChainDesc.BufferDesc.RefreshRate.Denominator = 0;
	swapChainDesc.BufferDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM;
	swapChainDesc.SampleDesc.Count = 1;
	swapChainDesc.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT;
	swapChainDesc.BufferCount = 2;
	swapChainDesc.OutputWindow = hwnd;
	swapChainDesc.Windowed = TRUE;
	swapChainDesc.SwapEffect = DXGI_SWAP_EFFECT_SEQUENTIAL;

	hr = D3D11CreateDeviceAndSwapChain(NULL, D3D_DRIVER_TYPE_HARDWARE, NULL, D3D11_CREATE_DEVICE_DEBUG, NULL, 0, D3D11_SDK_VERSION, &swapChainDesc, &g_pSwapChain, &g_pD3D11Device, NULL, &g_pD3D11DeviceContext);

	ID3D11Texture2D* pBackBuffer;
	hr = g_pSwapChain->GetBuffer(0, __uuidof(ID3D11Texture2D), (void**)&pBackBuffer);

	hr = g_pD3D11Device->CreateRenderTargetView(pBackBuffer, NULL, &g_pRenderTarget);
	pBackBuffer->Release();

	D3D11_BLEND_DESC BlendState = {};
	BlendState.RenderTarget[0].BlendEnable = TRUE;
	BlendState.RenderTarget[0].SrcBlend = D3D11_BLEND_SRC_ALPHA;
	BlendState.RenderTarget[0].DestBlend = D3D11_BLEND_INV_SRC_ALPHA;
	BlendState.RenderTarget[0].BlendOp = D3D11_BLEND_OP_ADD;
	BlendState.RenderTarget[0].SrcBlendAlpha = D3D11_BLEND_ONE;
	BlendState.RenderTarget[0].DestBlendAlpha = D3D11_BLEND_ZERO;
	BlendState.RenderTarget[0].BlendOpAlpha = D3D11_BLEND_OP_ADD;
	BlendState.RenderTarget[0].RenderTargetWriteMask = D3D11_COLOR_WRITE_ENABLE_ALL;
	hr = g_pD3D11Device->CreateBlendState(&BlendState, &g_pBlendState);

	D3D11_RASTERIZER_DESC rasterizerDesc = {};
	rasterizerDesc.FillMode = D3D11_FILL_SOLID;
	rasterizerDesc.CullMode = D3D11_CULL_NONE;
	hr = g_pD3D11Device->CreateRasterizerState(&rasterizerDesc, &g_pRasterizerState);

	hr = g_pD3D11Device->CreateVertexShader(g_VertexShader, sizeof(g_VertexShader), NULL, &g_pVertexShader);
	hr = g_pD3D11Device->CreatePixelShader(g_PixelShader, sizeof(g_PixelShader), NULL, &g_pPixelShader);

	D3D11_BUFFER_DESC vertexBufferDesc;
	vertexBufferDesc.ByteWidth = 3 * sizeof(Vertex);
	vertexBufferDesc.Usage = D3D11_USAGE_DEFAULT;
	vertexBufferDesc.BindFlags = D3D11_BIND_VERTEX_BUFFER;
	vertexBufferDesc.CPUAccessFlags = 0;
	vertexBufferDesc.MiscFlags = 0;
	vertexBufferDesc.StructureByteStride = 0;

	D3D11_SUBRESOURCE_DATA vertexBufferInitData;
	vertexBufferInitData.pSysMem = triangle;
	vertexBufferInitData.SysMemPitch = 0;
	vertexBufferInitData.SysMemSlicePitch = 0;

	hr = g_pD3D11Device->CreateBuffer(&vertexBufferDesc, &vertexBufferInitData, &g_pVertexBuffer);

	CB_WVP WVP;
	WVP.world = DirectX::XMMatrixIdentity();
	WVP.view = DirectX::XMMatrixIdentity();
	WVP.proj = DirectX::XMMatrixIdentity();

	D3D11_BUFFER_DESC constantBufferDesc;
	constantBufferDesc.ByteWidth = sizeof(CB_WVP);
	constantBufferDesc.Usage = D3D11_USAGE_DYNAMIC;
	constantBufferDesc.BindFlags = D3D11_BIND_CONSTANT_BUFFER;
	constantBufferDesc.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE;
	constantBufferDesc.MiscFlags = 0;
	constantBufferDesc.StructureByteStride = 0;

	// Fill in the subresource data.
	D3D11_SUBRESOURCE_DATA constantBufferInitData;
	constantBufferInitData.pSysMem = &WVP;
	constantBufferInitData.SysMemPitch = 0;
	constantBufferInitData.SysMemSlicePitch = 0;

	hr = g_pD3D11Device->CreateBuffer(&constantBufferDesc, &constantBufferInitData, &g_pWVPConstantBuffer);

	D3D11_INPUT_ELEMENT_DESC inputElementDescs[] =
	{
		{ "POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT,    0, 0,  D3D11_INPUT_PER_VERTEX_DATA, 0 },
		{ "COLOR",    0, DXGI_FORMAT_R32G32B32A32_FLOAT, 0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0 },
	};

	hr = g_pD3D11Device->CreateInputLayout(inputElementDescs, 2, g_VertexShader, sizeof(g_VertexShader), &g_pInputLayout);

	UINT stride = sizeof(Vertex);
	UINT offset = 0;
	g_pD3D11DeviceContext->IASetVertexBuffers(0, 1, &g_pVertexBuffer, &stride, &offset);
	g_pD3D11DeviceContext->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST);
	g_pD3D11DeviceContext->IASetInputLayout(g_pInputLayout);

	g_pD3D11DeviceContext->VSSetShader(g_pVertexShader, 0, 0);
	g_pD3D11DeviceContext->VSSetConstantBuffers(0, 1, &g_pWVPConstantBuffer);

    D3D11_VIEWPORT viewport = {};
	viewport.TopLeftX = 0;
	viewport.TopLeftY = 0;
	viewport.Width = (FLOAT)windowWidth;
	viewport.Height = (FLOAT)windowHeight;

	g_pD3D11DeviceContext->RSSetViewports(1, &viewport);
	g_pD3D11DeviceContext->RSSetState(g_pRasterizerState);

	g_pD3D11DeviceContext->PSSetShader(g_pPixelShader, 0, 0);

	g_pD3D11DeviceContext->OMSetRenderTargets(1, &g_pRenderTarget, NULL);
	g_pD3D11DeviceContext->OMSetBlendState(g_pBlendState, NULL, 0xffffffff);
	
	MSG msg;
	float angle = 0.0f;
	do
	{
		if (PeekMessage(&msg, NULL, 0, 0, TRUE))
		{
			TranslateMessage(&msg);
			DispatchMessage(&msg);
		}
		else
		{
			D3D11_MAPPED_SUBRESOURCE mappedResource;
			hr = g_pD3D11DeviceContext->Map(g_pWVPConstantBuffer, 0, D3D11_MAP_WRITE_DISCARD, 0, &mappedResource);

			CB_WVP* dataPtr = (CB_WVP*)mappedResource.pData;
			dataPtr->world = DirectX::XMMatrixTranspose(DirectX::XMMatrixRotationY(angle) * (DirectX::XMMatrixTranslation(0.f, 0.f, 1.f)));
			dataPtr->view = DirectX::XMMatrixTranspose(DirectX::XMMatrixLookAtLH(DirectX::XMVectorSet(0.f, 0.f, -1.f, 0.f), DirectX::XMVectorSet(0.f, 0.f, 1.f, 0.f), DirectX::XMVectorSet(0.f, 1.f, 0.f, 0.f)));
			//dataPtr->proj = DirectX::XMMatrixOrthographicLH(2.f, 2.f, 0.f, 10.f);
			dataPtr->proj = DirectX::XMMatrixTranspose(DirectX::XMMatrixPerspectiveFovLH(DirectX::XM_PI / 2.f, float(windowWidth) / windowHeight, 0.1f, 1000.f));

			g_pD3D11DeviceContext->Unmap(g_pWVPConstantBuffer, 0);

			DirectX::XMFLOAT4 clearColor(0.f, 0.2f, 0.4f, 0.f);
			g_pD3D11DeviceContext->ClearRenderTargetView(g_pRenderTarget, (CONST FLOAT*)&clearColor);
			g_pD3D11DeviceContext->Draw(3, 0);
			g_pSwapChain->Present(1, 0);

			angle += 0.1f;
		}
	} while (msg.message != WM_QUIT);

	g_pRasterizerState->Release();
	g_pBlendState->Release();
	g_pWVPConstantBuffer->Release();
	g_pVertexBuffer->Release();
	g_pInputLayout->Release();
	g_pVertexShader->Release();
	g_pPixelShader->Release();
	g_pRenderTarget->Release();
	g_pSwapChain->Release();
	g_pD3D11DeviceContext->Release();
	g_pD3D11Device->Release();

	return 0;
}
Exemplo n.º 24
0
void DrawScene()
{
	// 次のバックバッファを取得する
	uint32_t currentBuffer = g_VkSwapchain.AcquireNextImage(g_VkPresentComplete);

	// コマンドバッファの積み込み
	{
		auto& cmdBuffer = g_VkCmdBuffers[currentBuffer];

		cmdBuffer.reset(vk::CommandBufferResetFlagBits::eReleaseResources);
		vk::CommandBufferBeginInfo cmdBufInfo;
		cmdBuffer.begin(cmdBufInfo);

		vk::ClearColorValue clearColor(std::array<float, 4>{ 0.0f, 0.0f, 0.5f, 1.0f });

		// カラーバッファクリア
		{
			MySwapchain::Image& colorImage = g_VkSwapchain.GetImages()[currentBuffer];
			vk::ImageSubresourceRange subresourceRange;
			subresourceRange.aspectMask = vk::ImageAspectFlagBits::eColor;
			subresourceRange.levelCount = 1;
			subresourceRange.layerCount = 1;
			
			// カラーバッファクリアのため、レイアウトを変更する
			SetImageLayout(
				cmdBuffer,
				colorImage.image,
				vk::ImageLayout::eUndefined,
				vk::ImageLayout::eTransferDstOptimal,
				subresourceRange);

			// クリアコマンド
			cmdBuffer.clearColorImage(colorImage.image, vk::ImageLayout::eTransferDstOptimal, clearColor, subresourceRange);

			// Presentのため、レイアウトを変更する
			SetImageLayout(
				cmdBuffer,
				colorImage.image,
				vk::ImageLayout::eTransferDstOptimal,
				vk::ImageLayout::ePresentSrcKHR,
				subresourceRange);
		}

		cmdBuffer.end();
	}

	// Submit
	{
		vk::PipelineStageFlags pipelineStages = vk::PipelineStageFlagBits::eBottomOfPipe;
		vk::SubmitInfo submitInfo;
		submitInfo.pWaitDstStageMask = &pipelineStages;
		// 待つ必要があるセマフォの数とその配列を渡す
		submitInfo.waitSemaphoreCount = 1;
		submitInfo.pWaitSemaphores = &g_VkPresentComplete;
		// Submitするコマンドバッファの配列を渡す
		// 複数のコマンドバッファをSubmitしたい場合は配列にする
		submitInfo.commandBufferCount = 1;
		submitInfo.pCommandBuffers = &g_VkCmdBuffers[currentBuffer];
		// 描画完了を知らせるセマフォを登録する
		submitInfo.signalSemaphoreCount = 1;
		submitInfo.pSignalSemaphores = &g_VkRenderComplete;

		// Queueに対してSubmitする
		vk::Fence fence = g_VkSwapchain.GetSubmitFence(true);
		g_VkQueue.submit(submitInfo, fence);
		vk::Result fenceRes = g_VkDevice.waitForFences(fence, VK_TRUE, kFenceTimeout);
		assert(fenceRes == vk::Result::eSuccess);
		//g_VkQueue.waitIdle();
	}

	// Present
	g_VkSwapchain.Present(g_VkRenderComplete);
}