コード例 #1
0
ファイル: ParallelPlane.cpp プロジェクト: Jerdak/MassHeatMap
ParallelPlane::ParallelPlane(osg::Geode *geode,osg::MatrixTransform *transform,Database *db):
    geode_(geode),
    db_(db),
    transform_(transform),
    filter_radius_(-1.0f),
    filter_width_(-1.0f),
    filter_height_(-1.0f),
    filter_angle_(0.0f),
    filter_position_(osg::Vec3f(0,0,0)),
    is_filtered_(false)
{
    QSettings settings("massheatmap.ini",QSettings::IniFormat);
    QString imageFileName = settings.value("gradient_image","gradient3.bmp").toString();
    QString colorImageFileName = settings.value("color_image","gradient2.bmp").toString();

    image_ = osgDB::readImageFile(imageFileName.toStdString());
    image_color_ = osgDB::readImageFile(colorImageFileName.toStdString());

    osg::ref_ptr<osg::Drawable> drawable = myCreateTexturedQuadGeometry(osg::Vec3(0,0,0),1,1,image_,false,true,false);
    osgText::Font* font = osgText::readFontFile("./fonts/arial_bold.ttf");
    osg::Vec4 layoutColor(0.0f,0.0f,0.0f,1.0f);
    float layoutCharacterSize = 0.05f;
    {
        text_ = new osgText::Text;
        text_->setFont(font);
        text_->setColor(layoutColor);
        text_->setCharacterSize(layoutCharacterSize);
        text_->setPosition(osg::Vec3(0.02,0.1,0.01f));

        // right to left layouts would be used for hebrew or arabic fonts.
        text_->setLayout(osgText::Text::LEFT_TO_RIGHT);
        text_->setFontResolution(20,20);
        geode_->addDrawable(text_);
    }
    geode_->addDrawable(drawable);

}
コード例 #2
0
ファイル: osgmovie.cpp プロジェクト: AdriCS/osg
int main(int argc, char** argv)
{
    // use an ArgumentParser object to manage the program arguments.
    osg::ArgumentParser arguments(&argc,argv);

    // set up the usage document, in case we need to print out how to use this program.
    arguments.getApplicationUsage()->setApplicationName(arguments.getApplicationName());
    arguments.getApplicationUsage()->setDescription(arguments.getApplicationName()+" example demonstrates the use of ImageStream for rendering movies as textures.");
    arguments.getApplicationUsage()->setCommandLineUsage(arguments.getApplicationName()+" [options] filename ...");
    arguments.getApplicationUsage()->addCommandLineOption("-h or --help","Display this information");
    arguments.getApplicationUsage()->addCommandLineOption("--texture2D","Use Texture2D rather than TextureRectangle.");
    arguments.getApplicationUsage()->addCommandLineOption("--shader","Use shaders to post process the video.");
    arguments.getApplicationUsage()->addCommandLineOption("--interactive","Use camera manipulator to allow movement around movie.");
    arguments.getApplicationUsage()->addCommandLineOption("--flip","Flip the movie so top becomes bottom.");
#if defined(WIN32) || defined(__APPLE__)
    arguments.getApplicationUsage()->addCommandLineOption("--devices","Print the Video input capability via QuickTime and exit.");
#endif

    bool useTextureRectangle = true;
    bool useShader = false;

    // construct the viewer.
    osgViewer::Viewer viewer(arguments);

    if (arguments.argc()<=1)
    {
        arguments.getApplicationUsage()->write(std::cout,osg::ApplicationUsage::COMMAND_LINE_OPTION);
        return 1;
    }

#if defined(WIN32) || defined(__APPLE__)
    // if user requests devices video capability.
    if (arguments.read("-devices") || arguments.read("--devices"))
    {
        // Force load QuickTime plugin, probe video capability, exit
        osgDB::readImageFile("devices.live");
        return 1;
    }
#endif

    while (arguments.read("--texture2D")) useTextureRectangle=false;
    while (arguments.read("--shader")) useShader=true;

    bool mouseTracking = false;
    while (arguments.read("--mouse")) mouseTracking=true;


    // if user request help write it out to cout.
    if (arguments.read("-h") || arguments.read("--help"))
    {
        arguments.getApplicationUsage()->write(std::cout);
        return 1;
    }

    bool fullscreen = !arguments.read("--interactive");
    bool flip = arguments.read("--flip");

    osg::ref_ptr<osg::Geode> geode = new osg::Geode;

    osg::StateSet* stateset = geode->getOrCreateStateSet();
    stateset->setMode(GL_LIGHTING,osg::StateAttribute::OFF);

    if (useShader)
    {
        //useTextureRectangle = false;

        static const char *shaderSourceTextureRec = {
            "uniform vec4 cutoff_color;\n"
            "uniform samplerRect movie_texture;\n"
            "void main(void)\n"
            "{\n"
            "    vec4 texture_color = textureRect(movie_texture, gl_TexCoord[0].st); \n"
            "    if (all(lessThanEqual(texture_color,cutoff_color))) discard; \n"
            "    gl_FragColor = texture_color;\n"
            "}\n"
        };

        static const char *shaderSourceTexture2D = {
            "uniform vec4 cutoff_color;\n"
            "uniform sampler2D movie_texture;\n"
            "void main(void)\n"
            "{\n"
            "    vec4 texture_color = texture2D(movie_texture, gl_TexCoord[0].st); \n"
            "    if (all(lessThanEqual(texture_color,cutoff_color))) discard; \n"
            "    gl_FragColor = texture_color;\n"
            "}\n"
        };

        osg::Program* program = new osg::Program;

        program->addShader(new osg::Shader(osg::Shader::FRAGMENT,
                                           useTextureRectangle ? shaderSourceTextureRec : shaderSourceTexture2D));

        stateset->addUniform(new osg::Uniform("cutoff_color",osg::Vec4(0.1f,0.1f,0.1f,1.0f)));
        stateset->addUniform(new osg::Uniform("movie_texture",0));

        stateset->setAttribute(program);

    }

    osg::Vec3 pos(0.0f,0.0f,0.0f);
    osg::Vec3 topleft = pos;
    osg::Vec3 bottomright = pos;

    bool xyPlane = fullscreen;
    
    bool useAudioSink = false;
    while(arguments.read("--audio")) { useAudioSink = true; }
    
#if USE_SDL
    unsigned int numAudioStreamsEnabled = 0;
#endif

    for(int i=1;i<arguments.argc();++i)
    {
        if (arguments.isString(i))
        {
            osg::Image* image = osgDB::readImageFile(arguments[i]);
            osg::ImageStream* imagestream = dynamic_cast<osg::ImageStream*>(image);
            if (imagestream) 
            {
                osg::ImageStream::AudioStreams& audioStreams = imagestream->getAudioStreams();
                if (useAudioSink && !audioStreams.empty())
                {
                    osg::AudioStream* audioStream = audioStreams[0].get();
                    osg::notify(osg::NOTICE)<<"AudioStream read ["<<audioStream->getName()<<"]"<<std::endl;
#if USE_SDL
                    if (numAudioStreamsEnabled==0)
                    {
                        audioStream->setAudioSink(new SDLAudioSink(audioStream));
                        
                        ++numAudioStreamsEnabled;
                    }
#endif
                }


                imagestream->play();
            }

            if (image)
            {
                osg::notify(osg::NOTICE)<<"image->s()"<<image->s()<<" image-t()="<<image->t()<<" aspectRatio="<<image->getPixelAspectRatio()<<std::endl;

                float width = image->s() * image->getPixelAspectRatio();
                float height = image->t();

                osg::ref_ptr<osg::Drawable> drawable = myCreateTexturedQuadGeometry(pos, width, height,image, useTextureRectangle, xyPlane, flip);
                
                if (image->isImageTranslucent())
                {
                    osg::notify(osg::NOTICE)<<"Transparent movie, enabling blending."<<std::endl;

                    drawable->getOrCreateStateSet()->setMode(GL_BLEND, osg::StateAttribute::ON);
                    drawable->getOrCreateStateSet()->setRenderingHint(osg::StateSet::TRANSPARENT_BIN);
                }

                geode->addDrawable(drawable.get());

                bottomright = pos + osg::Vec3(width,height,0.0f);

                if (xyPlane) pos.y() += height*1.05f;
                else pos.z() += height*1.05f;
            }
            else
            {
                std::cout<<"Unable to read file "<<arguments[i]<<std::endl;
            }
        }
    }

    // set the scene to render
    viewer.setSceneData(geode.get());

    if (viewer.getSceneData()==0)
    {
        arguments.getApplicationUsage()->write(std::cout);
        return 1;
    }

    // pass the model to the MovieEventHandler so it can pick out ImageStream's to manipulate.
    MovieEventHandler* meh = new MovieEventHandler();
    meh->setMouseTracking( mouseTracking );
    meh->set( viewer.getSceneData() );
    viewer.addEventHandler( meh );

    viewer.addEventHandler( new osgViewer::StatsHandler );
    viewer.addEventHandler( new osgGA::StateSetManipulator( viewer.getCamera()->getOrCreateStateSet() ) );
    viewer.addEventHandler( new osgViewer::WindowSizeHandler );

    // add the record camera path handler
    viewer.addEventHandler(new osgViewer::RecordCameraPathHandler);

    // report any errors if they have occurred when parsing the program arguments.
    if (arguments.errors())
    {
        arguments.writeErrorMessages(std::cout);
        return 1;
    }

    if (fullscreen)
    {
        viewer.realize();
        
        viewer.getCamera()->setClearColor(osg::Vec4(0.0f,0.0f,0.0f,1.0f));

        float screenAspectRatio = 1280.0f/1024.0f;

        osg::GraphicsContext::WindowingSystemInterface* wsi = osg::GraphicsContext::getWindowingSystemInterface();
        if (wsi) 
        {
            unsigned int width, height;
            wsi->getScreenResolution(osg::GraphicsContext::ScreenIdentifier(0), width, height);
            
            screenAspectRatio = float(width) / float(height);
        }
        
        float modelAspectRatio = (bottomright.x()-topleft.x())/(bottomright.y()-topleft.y());
        
        viewer.getCamera()->setViewMatrix(osg::Matrix::identity());


        osg::Vec3 center = (bottomright + topleft)*0.5f;
        osg::Vec3 dx(bottomright.x()-center.x(), 0.0f, 0.0f);
        osg::Vec3 dy(0.0f, topleft.y()-center.y(), 0.0f);

        float ratio = modelAspectRatio/screenAspectRatio;

        if (ratio>1.0f)
        {
            // use model width as the control on model size.
            bottomright = center + dx - dy * ratio;
            topleft = center - dx + dy * ratio;
        }
        else
        {
            // use model height as the control on model size.
            bottomright = center + dx / ratio - dy;
            topleft = center - dx / ratio + dy;
        }

        viewer.getCamera()->setProjectionMatrixAsOrtho2D(topleft.x(),bottomright.x(),topleft.y(),bottomright.y());

        while(!viewer.done())
        {
            viewer.frame();
        }
        return 0;
    }
    else
    {
        // create the windows and run the threads.
        return viewer.run();
    }
}
コード例 #3
0
int main(int argc, char** argv)
{
    // use an ArgumentParser object to manage the program arguments.
    osg::ArgumentParser arguments(&argc,argv);

    // set up the usage document, in case we need to print out how to use this program.
    arguments.getApplicationUsage()->setApplicationName(arguments.getApplicationName());
    arguments.getApplicationUsage()->setDescription(arguments.getApplicationName()+" example demonstrates the use of ImageStream for rendering movies as textures.");
    arguments.getApplicationUsage()->setCommandLineUsage(arguments.getApplicationName()+" [options] filename ...");
    arguments.getApplicationUsage()->addCommandLineOption("-h or --help","Display this information");
    arguments.getApplicationUsage()->addCommandLineOption("--texture2D","Use Texture2D rather than TextureRectangle.");
    arguments.getApplicationUsage()->addCommandLineOption("--shader","Use shaders to post process the video.");
    arguments.getApplicationUsage()->addCommandLineOption("--interactive","Use camera manipulator to allow movement around movie.");
    arguments.getApplicationUsage()->addCommandLineOption("--flip","Flip the movie so top becomes bottom.");
#if defined(WIN32) || defined(__APPLE__)
    arguments.getApplicationUsage()->addCommandLineOption("--devices","Print the Video input capability via QuickTime and exit.");
#endif

    bool useTextureRectangle = true;
    bool useShader = false;

    // construct the viewer.
    osgViewer::Viewer viewer(arguments);

    if (arguments.argc()<=1)
    {
        arguments.getApplicationUsage()->write(std::cout,osg::ApplicationUsage::COMMAND_LINE_OPTION);
        return 1;
    }

#if defined(WIN32) || defined(__APPLE__)
    // if user requests devices video capability.
    if (arguments.read("-devices") || arguments.read("--devices"))
    {
        // Force load QuickTime plugin, probe video capability, exit
        osgDB::readImageFile("devices.live");
        return 1;
    }
#endif

    while (arguments.read("--texture2D")) useTextureRectangle=false;
    while (arguments.read("--shader")) useShader=true;

    bool mouseTracking = false;
    while (arguments.read("--mouse")) mouseTracking=true;


    // if user request help write it out to cout.
    if (arguments.read("-h") || arguments.read("--help"))
    {
        arguments.getApplicationUsage()->write(std::cout);
        return 1;
    }

    bool fullscreen = !arguments.read("--interactive");
    bool flip = arguments.read("--flip");

    osg::ref_ptr<osg::Geode> geode = new osg::Geode;

    osg::StateSet* stateset = geode->getOrCreateStateSet();
    stateset->setMode(GL_LIGHTING,osg::StateAttribute::OFF);

    if (useShader)
    {
        //useTextureRectangle = false;

        static const char *shaderSourceTextureRec = {
            "uniform vec4 cutoff_color;\n"
            "uniform samplerRect movie_texture;\n"
            "void main(void)\n"
            "{\n"
            "    vec4 texture_color = textureRect(movie_texture, gl_TexCoord[0].st); \n"
            "    if (all(lessThanEqual(texture_color,cutoff_color))) discard; \n"
            "    gl_FragColor = texture_color;\n"
            "}\n"
        };

        static const char *shaderSourceTexture2D = {
            "uniform vec4 cutoff_color;\n"
            "uniform sampler2D movie_texture;\n"
            "void main(void)\n"
            "{\n"
            "    vec4 texture_color = texture2D(movie_texture, gl_TexCoord[0].st); \n"
            "    if (all(lessThanEqual(texture_color,cutoff_color))) discard; \n"
            "    gl_FragColor = texture_color;\n"
            "}\n"
        };

        osg::Program* program = new osg::Program;

        program->addShader(new osg::Shader(osg::Shader::FRAGMENT,
                                           useTextureRectangle ? shaderSourceTextureRec : shaderSourceTexture2D));

        stateset->addUniform(new osg::Uniform("cutoff_color",osg::Vec4(0.1f,0.1f,0.1f,1.0f)));
        stateset->addUniform(new osg::Uniform("movie_texture",0));

        stateset->setAttribute(program);

    }

    osg::Vec3 pos(0.0f,0.0f,0.0f);
    osg::Vec3 topleft = pos;
    osg::Vec3 bottomright = pos;

    bool xyPlane = fullscreen;

    for(int i=1;i<arguments.argc();++i)
    {
        if (arguments.isString(i))
        {
            osg::Image* image = osgDB::readImageFile(arguments[i]);
            osg::ImageStream* imagestream = dynamic_cast<osg::ImageStream*>(image);
            if (imagestream) imagestream->play();

            if (image)
            {
                osg::notify(osg::NOTICE)<<"image->s()"<<image->s()<<" image-t()="<<image->t()<<std::endl;

                geode->addDrawable(myCreateTexturedQuadGeometry(pos,image->s(),image->t(),image, useTextureRectangle, xyPlane, flip));

                bottomright = pos + osg::Vec3(static_cast<float>(image->s()),static_cast<float>(image->t()),0.0f);

                if (xyPlane) pos.y() += image->t()*1.05f;
                else pos.z() += image->t()*1.05f;
            }
            else
            {
                std::cout<<"Unable to read file "<<arguments[i]<<std::endl;
            }
        }
    }

    // set the scene to render
    viewer.setSceneData(geode.get());

    if (viewer.getSceneData()==0)
    {
        arguments.getApplicationUsage()->write(std::cout);
        return 1;
    }

    // pass the model to the MovieEventHandler so it can pick out ImageStream's to manipulate.
    MovieEventHandler* meh = new MovieEventHandler();
    meh->setMouseTracking( mouseTracking );
    meh->set( viewer.getSceneData() );
    viewer.addEventHandler( meh );

    viewer.addEventHandler( new osgViewer::StatsHandler );
    viewer.addEventHandler( new osgGA::StateSetManipulator( viewer.getCamera()->getOrCreateStateSet() ) );
    viewer.addEventHandler( new osgViewer::WindowSizeHandler );

    // add the record camera path handler
    viewer.addEventHandler(new osgViewer::RecordCameraPathHandler);

    // report any errors if they have occurred when parsing the program arguments.
    if (arguments.errors())
    {
        arguments.writeErrorMessages(std::cout);
        return 1;
    }

    if (fullscreen)
    {
        viewer.realize();

        viewer.getCamera()->setViewMatrix(osg::Matrix::identity());
        viewer.getCamera()->setProjectionMatrixAsOrtho2D(topleft.x(),bottomright.x(),topleft.y(),bottomright.y());

        while(!viewer.done())
        {
            viewer.frame();
        }
        return 0;
    }
    else
    {
        // create the windows and run the threads.
        return viewer.run();
    }
}