Ejemplo n.º 1
0
bool DummySensor::update(osg::NodeVisitor* nv) {

	//this is the main function of your video plugin
	//you can either retrieve images from your video stream/camera/file
	//or communicate with a thread to synchronize and get the data out
	
	//the most important is to synchronize your data
	//and copy the result to the VideoImageSteam used in this plugin
	//

	//0. you can collect some stats, for that you can use a timer
	osg::Timer t;

	{

	//1. mutex lock access to the image video stream
	OpenThreads::ScopedLock<OpenThreads::Mutex> _lock(this->getMutex());

	//2. you can copy here the video buffer to the main image video stream
	//with a call like
    //std::memcpy(_videoStreamList[0]->data(),newImage, _videoStreamList[0]->getImageSizeInBytes());
	// the newImage can be retrieved from another thread
	// in this example we do nothing (already make a dummy copy in init())

    osg::notify(osg::DEBUG_INFO)<<"osgART::DummySensor::update() get new image.."<<std::endl;

	//3. don't forget to call this to notify the rest of the application
	//that you have a new video image
    //_sensorDataList[0]->dirty();
	}

	//4. hopefully report some interesting data
	if (nv) {

		const osg::FrameStamp *framestamp = nv->getFrameStamp();

		if (framestamp && _stats.valid())
		{
			_stats->setAttribute(framestamp->getFrameNumber(),
				"Capture time taken", t.time_m());
		}
	}


	// Increase modified count every X ms to ensure tracker updates
	if (updateTimer.time_m() > 50) {
      //  _sensorDataList[0]->dirty();
		updateTimer.setStartTick();
	}

	return true;
}
Ejemplo n.º 2
0
void HeartBeat::timerEvent( QTimerEvent */*event*/ )
{
    osg::ref_ptr< osgViewer::ViewerBase > viewer;
    if( !_viewer.lock( viewer ) )
    {
        // viewer has been deleted -> stop timer
        stopTimer();
        return;
    }

    // limit the frame rate
    if( viewer->getRunMaxFrameRate() > 0.0)
    {
        double dt = _lastFrameStartTime.time_s();
        double minFrameTime = 1.0 / viewer->getRunMaxFrameRate();
        if (dt < minFrameTime)
            OpenThreads::Thread::microSleep(static_cast<unsigned int>(1000000.0*(minFrameTime-dt)));
    }
    else
    {
        // avoid excessive CPU loading when no frame is required in ON_DEMAND mode
        if( viewer->getRunFrameScheme() == osgViewer::ViewerBase::ON_DEMAND )
        {
            double dt = _lastFrameStartTime.time_s();
            if (dt < 0.01)
                OpenThreads::Thread::microSleep(static_cast<unsigned int>(1000000.0*(0.01-dt)));
        }

        // record start frame time
        _lastFrameStartTime.setStartTick();

        // make frame
        if( viewer->getRunFrameScheme() == osgViewer::ViewerBase::ON_DEMAND )
        {
            if( viewer->checkNeedToDoFrame() )
            {
                viewer->frame();
            }
        }
        else
        {
            viewer->frame();
        }
    }
}
Ejemplo n.º 3
0
/// Initializes the loop for viewer. Must be called from main thread.
void HeartBeat::init( osgViewer::ViewerBase *viewer )
{
    if( _viewer == viewer )
        return;

    stopTimer();

    _viewer = viewer;

    if( viewer )
    {
        _timerId = startTimer( 0 );
        _lastFrameStartTime.setStartTick( 0 );
    }
}
Ejemplo n.º 4
0
    virtual void operator()(osg::Node* node, osg::NodeVisitor* nv)
    {
        osg::Timer_t currTime = _timer.tick();

        if (_timer.delta_s(_prevShaderUpdateTime, currTime) > 1.0) //one  second interval for shader-changed-do-reload check
        {
            osg::ref_ptr<osg::Shader> reloadedshader;
            std::string runningSource;
            std::string reloadedstring;

            if (_computeNode->_computeShader.valid())
            {
                runningSource = _computeNode->_computeShader->getShaderSource();
                reloadedshader = osgDB::readRefShaderFile(osg::Shader::COMPUTE, _computeNode->_computeShaderSourcePath);

                reloadedstring = reloadedshader->getShaderSource();
                if (!osgDB::equalCaseInsensitive(runningSource.c_str(), reloadedstring.c_str()))
                {
                    _computeNode->_computeProgram->removeShader(_computeNode->_computeShader.get());
                    _computeNode->_computeShader = reloadedshader.get();
                    _computeNode->_computeProgram->addShader(_computeNode->_computeShader.get());
                }
            }

            if (_computeNode->_vertexShader.valid())
            {

                runningSource = _computeNode->_vertexShader->getShaderSource();
                reloadedshader = osgDB::readRefShaderFile(osg::Shader::VERTEX, _computeNode->_vertexShaderSourcePath);

                reloadedstring = reloadedshader->getShaderSource();
                if (!osgDB::equalCaseInsensitive(runningSource.c_str(), reloadedstring.c_str()))
                {
                    _computeNode->_computationResultsRenderProgram->removeShader(_computeNode->_vertexShader.get());
                    _computeNode->_vertexShader = reloadedshader.get();
                    _computeNode->_computationResultsRenderProgram->addShader(_computeNode->_vertexShader.get());
                }
            }



            if (_computeNode->_geometryShader.valid())
            {
                runningSource = _computeNode->_geometryShader->getShaderSource();
                reloadedshader = osgDB::readRefShaderFile(osg::Shader::GEOMETRY, _computeNode->_geometryShaderSourcePath);

                reloadedstring = reloadedshader->getShaderSource();
                if (!osgDB::equalCaseInsensitive(runningSource.c_str(), reloadedstring.c_str()))
                {
                    _computeNode->_computationResultsRenderProgram->removeShader(_computeNode->_geometryShader.get());
                    _computeNode->_geometryShader = reloadedshader.get();
                    _computeNode->_computationResultsRenderProgram->addShader(_computeNode->_geometryShader.get());
                }
            }

            if (_computeNode->_fragmentShader.valid())
            {
                runningSource = _computeNode->_fragmentShader->getShaderSource();
                reloadedshader = osgDB::readRefShaderFile(osg::Shader::FRAGMENT, _computeNode->_fragmentShaderSourcePath);

                reloadedstring = reloadedshader->getShaderSource();
                if (!osgDB::equalCaseInsensitive(runningSource.c_str(), reloadedstring.c_str()))
                {
                    _computeNode->_computationResultsRenderProgram->removeShader(_computeNode->_fragmentShader.get());
                    _computeNode->_fragmentShader = reloadedshader.get();
                    _computeNode->_computationResultsRenderProgram->addShader(_computeNode->_fragmentShader.get());
                }
            }


            _prevShaderUpdateTime = _timer.tick();
        }

        traverse(node, nv);

    }
Ejemplo n.º 5
0
//==========================================================
//!Compare the accuracy of the probability functions.
void testAccuracy()
{
    osg::Timer_t startTick=0.0, endTick=0.0;
    
    std::ofstream fout("probAccuracy.csv");
    
    std::cout << "  Testing accuracy of prob func...\n    ";
    std::cout.flush();
    
    osg::ref_ptr<osg::Geode> osgGeode=new osg::Geode();
    osg::ref_ptr<osg::Vec3Array> osgVertices=new osg::Vec3Array();
    osg::ref_ptr<osg::Vec4Array> osgColours=new osg::Vec4Array();
    
    const size_t numSamples=10000;
    
    startTick=timer.tick();
    
    fout << "probRef" << ", " << "probVolumeLookup" << ", " << "probGaus" << ", " << "Q.length()" << ", " << "area" << "\n";
    
    for (size_t sampleNum=0; sampleNum<numSamples; ++sampleNum)
    {
        
        const stitch::Vec3 Q=stitch::Vec3::randDisc()*6.0f;
        const float radius=0.25f;
        const float theta=stitch::GlobalRand::uniformSampler()*(2.0*M_PI);
        const stitch::Vec3 A=Q + stitch::Vec3(radius*cos(theta - (0.0f*M_PI/180.0f)), radius*sin(theta - (0.0f*M_PI/180.0f)), 0.0);
        const stitch::Vec3 B=Q + stitch::Vec3(radius*cos(theta - (120.0f*M_PI/180.0f)), radius*sin(theta - (120.0f*M_PI/180.0f)), 0.0);
        const stitch::Vec3 C=Q + stitch::Vec3(radius*cos(theta - (240.0f*M_PI/180.0f)), radius*sin(theta - (240.0f*M_PI/180.0f)), 0.0);
        

        /*
        const stitch::Vec3 A=stitch::Vec3::randDisc()*5.0f;
        const stitch::Vec3 B=stitch::Vec3::randDisc()*5.0f;
        const stitch::Vec3 C=stitch::Vec3::randDisc()*5.0f;
        const stitch::Vec3 Q(A, B, C, 0.5f, 0.5f, 0.5f);
        */
        
        const float probRef=stitch::BeamSegment::gaussVolumeBarycentricRandomABC(A, B, C, 1.0f, 25000).length();
        
        const float probGaus=stitch::BeamSegment::gaussVolumeGaussSubd(A, B, C);
        const float probVolumeLookup=stitch::BeamSegment::gaussVolumeLookUpOptimisedABC(A, B, C, 1.0f).length();
        //const float prob=stitch::Beam::gaussVolumeRecurABCNormInit(A, B, C);
        //const float prob=stitch::Beam::gaussVolumeBarycentricRandomABC(A, B, C, 1.0f, 250000).length();
        
        const float area=stitch::Vec3::crossLength(A, B, C)*0.5f;
        const float err=fabsf(probGaus - probRef)*1000.0f;
        
        {
            fout << probRef << ", " << probVolumeLookup << ", " << probGaus << ", " << Q.length() << ", " << area << "\n";
            fout.flush();
            
            const float colour=probRef;
            
            osgVertices->push_back(osg::Vec3(A.x(), A.y(), A.z()+err));
            osgColours->push_back(osg::Vec4(colour,
                                            colour,
                                            colour,
                                            1.0));
            
            osgVertices->push_back(osg::Vec3(B.x(), B.y(), B.z()+err));
            osgColours->push_back(osg::Vec4(colour,
                                            colour,
                                            colour,
                                            1.0));
            
            osgVertices->push_back(osg::Vec3(C.x(), C.y(), C.z()+err));
            osgColours->push_back(osg::Vec4(colour,
                                            colour,
                                            colour,
                                            1.0));
        }
        
    }
    
    endTick=timer.tick();
    
    if (osgVertices->size()>0)
    {
        osg::ref_ptr<osg::Geometry> osgGeometry=new osg::Geometry();
        osgGeometry->setVertexArray(osgVertices.get());
        osgGeometry->setColorArray(osgColours.get());
        osgGeometry->setColorBinding(osg::Geometry::BIND_PER_VERTEX);
        
        osgGeometry->addPrimitiveSet(new osg::DrawArrays(osg::PrimitiveSet::TRIANGLES,0,osgVertices->size()));
        
        osg::ref_ptr<osg::StateSet> osgStateset=osgGeometry->getOrCreateStateSet();
        osgStateset->setNestRenderBins(false);
        osgStateset->setRenderingHint(osg::StateSet::TRANSPARENT_BIN);
        osgStateset->setMode(GL_LIGHTING, osg::StateAttribute::OFF);
        osgStateset->setMode(GL_BLEND,osg::StateAttribute::ON);
        
        osg::Depth* depth = new osg::Depth();
        osgStateset->setAttributeAndModes(depth, osg::StateAttribute::ON);
        
        osg::BlendFunc *fn = new osg::BlendFunc();
        fn->setFunction(osg::BlendFunc::SRC_ALPHA, osg::BlendFunc::ONE_MINUS_SRC_ALPHA);
        osgStateset->setAttributeAndModes(fn, osg::StateAttribute::ON);
        
        osg::Material *material = new osg::Material();
        material->setColorMode(osg::Material::DIFFUSE);
        material->setDiffuse(osg::Material::FRONT_AND_BACK, osg::Vec4(1.0f, 1.0f, 1.0f, 1.0f));
        osgStateset->setAttributeAndModes(material, osg::StateAttribute::ON);
        
        osgGeode->addDrawable(osgGeometry.get());
        
        {
            OpenThreads::ScopedLock<OpenThreads::Mutex> sceneGraphLock(g_sceneGraphMutex);
            g_rootGroup_->removeChildren(0, g_rootGroup_->getNumChildren());
            g_rootGroup_->addChild(osgGeode);
        }
    }
    
    std::cout << "  " << timer.delta_m(startTick, endTick) << " ms...done.\n\n";
    std::cout.flush();
    
    fout.close();
}
Ejemplo n.º 6
0
//==========================================================
int main(void)
{
    stitch::GlobalRand::initialiseUniformSamplerArray();
    //stitch::BeamSegment::generateVolumeTexture();
    
    {
        int argc=1;
        char **argv=new char *[10];
        argv[0]=new char [256];
        strcpy(argv[0], "Prob Calc Test");
        
        osg::ArgumentParser arguments(&argc,argv);
        g_viewer=new osgViewer::CompositeViewer(arguments);
        
        delete [] argv[0];
        delete [] argv;
    }
    
    g_viewer->setThreadingModel(osgViewer::ViewerBase::SingleThreaded);
    g_rootGroup_=new osg::Group;
    g_view3D=new stitch::View3D(g_ulWindowWidth, g_ulWindowHeight, g_rootGroup_);
    g_view3D->init();
    //g_view3D->getOSGView()->addEventHandler(new KeyboardEventHandler());
    g_viewer->addView(g_view3D->getOSGView());
    
    if (!g_viewer->isRealized())
    {
        g_viewer->realize();
    }
    
    g_view3D->setCamera(stitch::Vec3(0.0f, g_eye_distance, g_eye_distance), stitch::Vec3(0.0f, 0.0f, 0.0f), stitch::Vec3(0.0f, 0.0f, 1.0f));
    
    g_renderInBackground=true;
    std::thread displayUpdateThread(displayUpdateRun);
    
    osg::Timer_t startTick=0.0, endTick=0.0;
    startTick=timer.tick();
    
    //========================//
    //=== Start Prob Calc tests ===//
    //========================//
    //visualiseProbFunc();
    testAccuracy();
    //========================//
    //========================//
    //========================//
    
    endTick=timer.tick();
    
    std::cout << "Completed in " << timer.delta_s(startTick, endTick) << " seconds.\n\n";
    std::cout.flush();
    
    //====================================//
    //=== Continue rendering until ESC ===//
    //====================================//
    g_renderInBackground=false;
    displayUpdateThread.join();//Wait for background render thread to exit.
    
    //Continue rendering in main thread until viewer exited.
    while (!g_viewer->done())
    {
        g_view3D->preframe();
        g_viewer->frame();
        
        std::chrono::milliseconds dura(50);
        std::this_thread::sleep_for(dura);
    }
    //====================================//
    //====================================//
    //====================================//
    
    std::chrono::milliseconds dura(1000);
    std::this_thread::sleep_for(dura);
    
    delete g_viewer;
}
Ejemplo n.º 7
0
//==========================================================
//!Visualise the probability function.
void visualiseProbFunc()
{
    osg::Timer_t startTick=0.0, endTick=0.0;
    startTick=timer.tick();
    
    std::cout << "  Visualising prob func...\n    ";
    std::cout.flush();
    
    osg::ref_ptr<osg::Geode> osgGeode=new osg::Geode();
    osg::ref_ptr<osg::Vec3Array> osgVertices=new osg::Vec3Array();
    osg::ref_ptr<osg::Vec4Array> osgColours=new osg::Vec4Array();
    
    
    for (size_t iz=0; iz<256; iz+=4)
    {
        for (size_t iy=0; iy<512; iy+=4)
        {
            for (size_t ix=0; ix<512; ix+=4)
            {
                const stitch::Vec3 B=stitch::Vec3(0.0f, 0.0f, 0.0f);
                
                const stitch::Vec3 A=stitch::Vec3(1.0f, 0.0f, 0.0f)*(((ix+0.5f)/512.0f)*4.0f) + B;
                const stitch::Vec3 orthA=stitch::Vec3(0.0f, 1.0f, 0.0f);
                
                const float theta=((iz+0.5f)/256.0f)*M_PI;
                
                const stitch::Vec3 C=(A.normalised()*cos(theta) + orthA.normalised()*sin(theta)) * (((iy+0.5f)/512.0f)*4.0f) + B;
                
                const float prob=stitch::BeamSegment::gaussVolumeBarycentricRandomABC(A, B, C, 1.0f, 10000).length();
                //const float prob=stitch::Beam::gaussVolumeGaussMC(A, B, C);
                //const float prob=stitch::Beam::gaussVolumeGaussSubd(A, B, C);
                //const float prob=stitch::Beam::gaussVolumeLookUpOptimisedABC(A, B, C, 1.0f).length();
                //const float prob=stitch::Beam::gaussVolumeRecurABCNormInit(A, B, C);
                
                if (prob>0.075f)
                {
                    osg::Vec3 coord=osg::Vec3(ix/512.0f,
                                              iy/512.0f,
                                              iz/256.0f);
                    
                    osgVertices->push_back(coord);
                    
                    osgColours->push_back(osg::Vec4(prob, prob, prob, 1.0));
                }
            }
        }
    }
    
    if (osgVertices->size()>0)
    {
        osg::ref_ptr<osg::Geometry> osgGeometry=new osg::Geometry();
        osgGeometry->setVertexArray(osgVertices.get());
        osgGeometry->setColorArray(osgColours.get());
        osgGeometry->setColorBinding(osg::Geometry::BIND_PER_VERTEX);
        
        osgGeometry->addPrimitiveSet(new osg::DrawArrays(osg::PrimitiveSet::POINTS,0,osgVertices->size()));
        osgGeometry->getOrCreateStateSet()->setAttribute( new osg::Point( 6.0f ), osg::StateAttribute::ON);
        
        osg::ref_ptr<osg::StateSet> osgStateset=osgGeometry->getOrCreateStateSet();
        osgStateset->setNestRenderBins(false);
        osgStateset->setRenderingHint(osg::StateSet::TRANSPARENT_BIN);
        osgStateset->setMode(GL_LIGHTING, osg::StateAttribute::OFF);
        osgStateset->setMode(GL_BLEND,osg::StateAttribute::ON);
        
        osg::Depth* depth = new osg::Depth();
        osgStateset->setAttributeAndModes(depth, osg::StateAttribute::ON);
        
        osg::BlendFunc *fn = new osg::BlendFunc();
        fn->setFunction(osg::BlendFunc::SRC_ALPHA, osg::BlendFunc::ONE_MINUS_SRC_ALPHA);
        osgStateset->setAttributeAndModes(fn, osg::StateAttribute::ON);
        
        osg::Material *material = new osg::Material();
        material->setColorMode(osg::Material::DIFFUSE);
        material->setDiffuse(osg::Material::FRONT_AND_BACK, osg::Vec4(1.0f, 1.0f, 1.0f, 1.0f));
        osgStateset->setAttributeAndModes(material, osg::StateAttribute::ON);
        
        osgGeode->addDrawable(osgGeometry.get());
        
        {
            OpenThreads::ScopedLock<OpenThreads::Mutex> sceneGraphLock(g_sceneGraphMutex);
            g_rootGroup_->removeChildren(0, g_rootGroup_->getNumChildren());
            g_rootGroup_->addChild(osgGeode);
        }
    }
    
    endTick=timer.tick();
    
    std::cout << "  " << timer.delta_m(startTick, endTick) << " ms...done.\n\n";
    std::cout.flush();
}
Ejemplo n.º 8
0
bool OpenNIVideo::update(osg::NodeVisitor* nv) {

	//this is the main function of your video plugin
	//you can either retrieve images from your video stream/camera/file
	//or communicate with a thread to synchronize and get the data out
	
	//the most important is to synchronize your data
	//and copy the result to the VideoImageSteam used in this plugin
	//

	//0. you can collect some stats, for that you can use a timer
	osg::Timer t;

	{

	//1. mutex lock access to the image video stream
	OpenThreads::ScopedLock<OpenThreads::Mutex> _lock(this->getMutex());

	osg::notify(osg::DEBUG_INFO)<<"osgART::OpenNIVideo::update() get new image.."<<std::endl;

	XnStatus nRetVal = XN_STATUS_OK;

	nRetVal=context.WaitAndUpdateAll();
	CHECK_RC(nRetVal, "Update Data");

	xnFPSMarkFrame(&xnFPS);

	depth_generator.GetMetaData(depthMD);
	const XnDepthPixel* pDepthMap = depthMD.Data();
	//depth pixel floating point depth map.
	
	image_generator.GetMetaData(imageMD);
	const XnUInt8* pImageMap = imageMD.Data();

	// Hybrid mode isn't supported in this sample
	if (imageMD.FullXRes() != depthMD.FullXRes() || imageMD.FullYRes() != depthMD.FullYRes())
	{
		std::cerr<<"The device depth and image resolution must be equal!"<<std::endl;
		exit(1);
	}

	// RGB is the only image format supported.
	if (imageMD.PixelFormat() != XN_PIXEL_FORMAT_RGB24)
	{
		std::cerr<<"The device image format must be RGB24"<<std::endl;
		exit(1);
	}
	
	const XnDepthPixel* pDepth=pDepthMap;
	const XnUInt8* pImage=pImageMap;
	
	XnDepthPixel zMax = depthMD.ZRes();
    //convert float buffer to unsigned short
	for ( unsigned int i=0; i<(depthMD.XRes() * depthMD.YRes()); ++i )
    {
            *(_depthBufferByte + i) = 255 * (float(*(pDepth + i)) / float(zMax));
    }

	memcpy(_videoStreamList[0]->data(),pImage, _videoStreamList[0]->getImageSizeInBytes());
	
	memcpy(_videoStreamList[1]->data(),_depthBufferByte, _videoStreamList[1]->getImageSizeInBytes());

	//3. don't forget to call this to notify the rest of the application
	//that you have a new video image
	_videoStreamList[0]->dirty();
	_videoStreamList[1]->dirty();
	}

	//4. hopefully report some interesting data
	if (nv) {

		const osg::FrameStamp *framestamp = nv->getFrameStamp();

		if (framestamp && _stats.valid())
		{
			_stats->setAttribute(framestamp->getFrameNumber(),
				"Capture time taken", t.time_m());
		}
	}


	// Increase modified count every X ms to ensure tracker updates
	if (updateTimer.time_m() > 50) {
		_videoStreamList[0]->dirty();
		_videoStreamList[1]->dirty();
		updateTimer.setStartTick();
	}

	return true;
}