Exemplo n.º 1
0
// This method is called in any case for displaying a laser scan.
//  We keep an internal list of recent scans so they don't vanish instantaneously.
void CScanAnimation::BuildMapAndRefresh(CSensoryFrame *sf)
{
	WX_START_TRY

	// Preprocess: make sure 3D observations are ready:
	std::vector<CObservation3DRangeScanPtr> obs3D_to_clear;
	for (CSensoryFrame::iterator it=sf->begin();it!=sf->end();++it)
	{
		(*it)->load();
		// force generate 3D point clouds:
		if (IS_CLASS(*it, CObservation3DRangeScan))
		{
			CObservation3DRangeScanPtr o= CObservation3DRangeScanPtr(*it);
			if (o->hasRangeImage && !o->hasPoints3D)
			{
				o->project3DPointsFromDepthImage();
				obs3D_to_clear.push_back(o);
			}
		}
	}


	// Mix?
	if (!m_mixlasers)
	{
		// if not, just clear all old objects:
		for (TListGlObjects::iterator it = m_gl_objects.begin();it!=m_gl_objects.end();++it)
		{
			TRenderObject &ro = it->second;
			m_plot3D->m_openGLScene->removeObject(ro.obj);  // Remove from the opengl viewport
		}
		m_gl_objects.clear();
	}

	// Insert new scans:
	mrpt::system::TTimeStamp  	tim_last = INVALID_TIMESTAMP;
	bool						wereScans = false;
	for (CSensoryFrame::iterator it=sf->begin();it!=sf->end();++it)
	{
		if (IS_CLASS(*it,CObservation2DRangeScan))
		{
			CObservation2DRangeScanPtr obs = CObservation2DRangeScanPtr(*it);
			wereScans = true;
			if (tim_last==INVALID_TIMESTAMP || tim_last<obs->timestamp)
				tim_last = obs->timestamp;

			// Already in the map with the same sensor label?
			TListGlObjects::iterator it_gl = m_gl_objects.find(obs->sensorLabel);
			if (it_gl!=m_gl_objects.end())
			{
				// Update existing object:
				TRenderObject &ro = it_gl->second;
				CPlanarLaserScanPtr(ro.obj)->setScan(*obs);
				ro.timestamp = obs->timestamp;
			}
			else
			{
				// Create object:
				CPlanarLaserScanPtr gl_obj = CPlanarLaserScan::Create();
				gl_obj->setScan(*obs);

				TRenderObject ro;
				ro.obj = gl_obj;
				ro.timestamp = obs->timestamp;
				m_gl_objects[obs->sensorLabel]=ro;
				m_plot3D->m_openGLScene->insert( gl_obj );
			}
		}
		else
		if (IS_CLASS(*it,CObservation3DRangeScan))
		{
			CObservation3DRangeScanPtr obs = CObservation3DRangeScanPtr(*it);
			wereScans = true;
			if (tim_last==INVALID_TIMESTAMP || tim_last<obs->timestamp)
				tim_last = obs->timestamp;

			CColouredPointsMap  pointMap;
			pointMap.colorScheme.scheme = CColouredPointsMap::cmFromIntensityImage;
			pointMap.insertionOptions.minDistBetweenLaserPoints = 0;

			pointMap.insertObservation( obs.pointer() );

			// Already in the map with the same sensor label?
			TListGlObjects::iterator it_gl = m_gl_objects.find(obs->sensorLabel);
			if (it_gl!=m_gl_objects.end())
			{
				// Update existing object:
				TRenderObject &ro = it_gl->second;
				CPointCloudColouredPtr gl_obj = CPointCloudColouredPtr(ro.obj);
				gl_obj->loadFromPointsMap(&pointMap);
				ro.timestamp = obs->timestamp;
			}
			else
			{
				// Create object:
				CPointCloudColouredPtr gl_obj = CPointCloudColoured::Create();
				gl_obj->setPointSize(3.0);
				gl_obj->loadFromPointsMap(&pointMap);

				TRenderObject ro;
				ro.obj = gl_obj;
				ro.timestamp = obs->timestamp;
				m_gl_objects[obs->sensorLabel]=ro;
				m_plot3D->m_openGLScene->insert( gl_obj );
			}


//
			// Add to list:
//				m_lstScans[obs->sensorLabel] = obs;
		}
	}

	// Check what observations are too old and must be deleted:
	const double largest_period = 0.2;
	vector_string lst_to_delete;
	for (TListGlObjects::iterator it=m_gl_objects.begin();it!=m_gl_objects.end();++it)
	{
		TRenderObject &ro = it->second;

		if ((tim_last==INVALID_TIMESTAMP && wereScans)  // Scans without timestamps
			||
			(tim_last!=INVALID_TIMESTAMP && fabs(mrpt::system::timeDifference( ro.timestamp, tim_last)) > largest_period ))
		{
			lst_to_delete.push_back(it->first);
		}
	}

	// Remove too old observations:
	for (vector_string::iterator s=lst_to_delete.begin();s!=lst_to_delete.end();++s)
	{
		TRenderObject &ro = m_gl_objects[*s];

		m_plot3D->m_openGLScene->removeObject(ro.obj);  // Remove from the opengl viewport
		m_gl_objects.erase(*s); // and from my list
	}

	// Force refresh view:
	m_plot3D->Refresh();

	// Post-process: unload 3D observations.
	for (CSensoryFrame::iterator it=sf->begin();it!=sf->end();++it)
		(*it)->unload();
	for (size_t i=0;i<obs3D_to_clear.size();i++)
	{
		obs3D_to_clear[i]->resizePoints3DVectors(0);
		obs3D_to_clear[i]->hasPoints3D = false;
	}


	WX_END_TRY
}
Exemplo n.º 2
0
int main ( int argc, char** argv )
{
	try
	{
		if (argc>2)
		{
			cerr << "Usage: " << argv[0] << " <sensor_id/sensor_serial\n";
			cerr << "Example: " << argv[0] << " 0 \n";
			return 1;
		}

	//const unsigned sensor_id = 0;
	COpenNI2Sensor rgbd_sensor;
	//rgbd_sensor.loadConfig_sensorSpecific(const mrpt::utils::CConfigFileBase &configSource,	const std::string	 &iniSection );

	unsigned sensor_id_or_serial = 0;
	if(argc == 2)
	{
		sensor_id_or_serial = atoi(argv[1]);
		if(sensor_id_or_serial > 10)
		     rgbd_sensor.setSerialToOpen(sensor_id_or_serial);
		else rgbd_sensor.setSensorIDToOpen(sensor_id_or_serial);
	}

    // Open:
    //cout << "Calling COpenNI2Sensor::initialize()...";
    rgbd_sensor.initialize();

    if(rgbd_sensor.getNumDevices() == 0)
      return 0;

    cout << "OK " << rgbd_sensor.getNumDevices() << " available devices."  << endl;
    cout << "\nUse device " << sensor_id_or_serial << endl << endl;

      // Create window and prepare OpenGL object in the scene:
      // --------------------------------------------------------
      mrpt::gui::CDisplayWindow3D  win3D("OpenNI2 3D view",800,600);

      win3D.setCameraAzimuthDeg(140);
      win3D.setCameraElevationDeg(20);
      win3D.setCameraZoom(8.0);
      win3D.setFOV(90);
      win3D.setCameraPointingToPoint(2.5,0,0);

      mrpt::opengl::CPointCloudColouredPtr gl_points = mrpt::opengl::CPointCloudColoured::Create();
      gl_points->setPointSize(2.5);

	// The 2D "laser scan" OpenGL object:
	mrpt::opengl::CPlanarLaserScanPtr gl_2d_scan = mrpt::opengl::CPlanarLaserScan::Create();
	gl_2d_scan->enablePoints(true);
	gl_2d_scan->enableLine(true);
	gl_2d_scan->enableSurface(true);
	gl_2d_scan->setSurfaceColor(0,0,1, 0.3);  // RGBA

      opengl::COpenGLViewportPtr viewInt; // Extra viewports for the RGB images.
      {
        mrpt::opengl::COpenGLScenePtr &scene = win3D.get3DSceneAndLock();

        // Create the Opengl object for the point cloud:
        scene->insert( gl_points );
        scene->insert( mrpt::opengl::CGridPlaneXY::Create() );
        scene->insert( mrpt::opengl::stock_objects::CornerXYZ() );
        scene->insert( gl_2d_scan );

        const double aspect_ratio =  480.0 / 640.0;
        const int VW_WIDTH = 400;	// Size of the viewport into the window, in pixel units.
        const int VW_HEIGHT = aspect_ratio*VW_WIDTH;

        // Create an extra opengl viewport for the RGB image:
        viewInt = scene->createViewport("view2d_int");
        viewInt->setViewportPosition(5, 30, VW_WIDTH,VW_HEIGHT );
        win3D.addTextMessage(10, 30+VW_HEIGHT+10,"Intensity data",TColorf(1,1,1), 2, MRPT_GLUT_BITMAP_HELVETICA_12 );

        win3D.addTextMessage(5,5,
          format("'o'/'i'-zoom out/in, ESC: quit"),
            TColorf(0,0,1), 110, MRPT_GLUT_BITMAP_HELVETICA_18 );



        win3D.unlockAccess3DScene();
        win3D.repaint();
      }

      //							Grab frames continuously and show
      //========================================================================================

      bool bObs = false, bError = true;
      mrpt::system::TTimeStamp  last_obs_tim = INVALID_TIMESTAMP;

      while (!win3D.keyHit())	//Push any key to exit // win3D.isOpen()
      {
  //    cout << "Get new observation\n";
        CObservation3DRangeScanPtr newObs = CObservation3DRangeScan::Create();
        rgbd_sensor.getNextObservation(*newObs, bObs, bError);

	CObservation2DRangeScanPtr obs_2d; // The equivalent 2D scan

        if (bObs && !bError && newObs && newObs->timestamp!=INVALID_TIMESTAMP && newObs->timestamp!=last_obs_tim )
        {
          // It IS a new observation:
          last_obs_tim = newObs->timestamp;

		// Convert ranges to an equivalent 2D "fake laser" scan:
		if (newObs->hasRangeImage )
		{
			// Convert to scan:
			obs_2d = CObservation2DRangeScan::Create();

			T3DPointsTo2DScanParams p2s;
			p2s.angle_sup = .5f*vert_FOV;
			p2s.angle_inf = .5f*vert_FOV;
			p2s.sensorLabel = "KINECT_2D_SCAN";
			newObs->convertTo2DScan(*obs_2d, p2s);
		}


          // Update visualization ---------------------------------------

          win3D.get3DSceneAndLock();

          // Estimated grabbing rate:
          win3D.addTextMessage(-350,-13, format("Timestamp: %s", mrpt::system::dateTimeLocalToString(last_obs_tim).c_str()), TColorf(0.6,0.6,0.6),"mono",10,mrpt::opengl::FILL, 100);

          // Show intensity image:
          if (newObs->hasIntensityImage )
          {
            viewInt->setImageView(newObs->intensityImage); // This is not "_fast" since the intensity image may be needed later on.
          }
          win3D.unlockAccess3DScene();

          // -------------------------------------------------------
          //           Create 3D points from RGB+D data
          //
          // There are several methods to do this.
          //  Switch the #if's to select among the options:
          // See also: http://www.mrpt.org/Generating_3D_point_clouds_from_RGB_D_observations
          // -------------------------------------------------------
          if (newObs->hasRangeImage)
          {
    // Pathway: RGB+D --> XYZ+RGB opengl
            win3D.get3DSceneAndLock();
              newObs->project3DPointsFromDepthImageInto(*gl_points, false /* without obs.sensorPose */);
            win3D.unlockAccess3DScene();
          }

	// And load scan in the OpenGL object:
	gl_2d_scan->setScan(*obs_2d);


          win3D.repaint();
        } // end update visualization:
      }

    cout << "\nClosing RGBD sensor...\n";

    return 0;

	} catch (std::exception &e)
	{
		std::cout << "MRPT exception caught: " << e.what() << std::endl;
		return -1;
	}
	catch (...)
	{
		printf("Untyped exception!!");
		return -1;
	}
}
Exemplo n.º 3
0
// ------------------------------------------------------
//				TestCamera3DFaceDetection
// ------------------------------------------------------
void TestCamera3DFaceDetection( CCameraSensorPtr cam )
{
	CDisplayWindow  win("Live video");
	CDisplayWindow  win2("FaceDetected");

	cout << "Close the window to exit." << endl;

	mrpt::gui::CDisplayWindow3D  win3D("3D camera view",800,600);	
	mrpt::gui::CDisplayWindow3D  win3D2;
	
	win3D.setCameraAzimuthDeg(140);
	win3D.setCameraElevationDeg(20);
	win3D.setCameraZoom(6.0);
	win3D.setCameraPointingToPoint(2.5,0,0);

	mrpt::opengl::COpenGLScenePtr &scene = win3D.get3DSceneAndLock();
	mrpt::opengl::COpenGLScenePtr scene2;

	mrpt::opengl::CPointCloudColouredPtr gl_points = mrpt::opengl::CPointCloudColoured::Create();
	gl_points->setPointSize(4.5);

	mrpt::opengl::CPointCloudColouredPtr gl_points2 = mrpt::opengl::CPointCloudColoured::Create();
	gl_points2->setPointSize(4.5);

	// Create the Opengl object for the point cloud:
	scene->insert( gl_points );
	scene->insert( mrpt::opengl::CGridPlaneXY::Create() );
	scene->insert( mrpt::opengl::stock_objects::CornerXYZ() );

	win3D.unlockAccess3DScene();

	if ( showEachDetectedFace )
	{
		win3D2.setWindowTitle("3D Face detected");
		win3D2.resize(400,300);

		win3D2.setCameraAzimuthDeg(140);
		win3D2.setCameraElevationDeg(20);
		win3D2.setCameraZoom(6.0);
		win3D2.setCameraPointingToPoint(2.5,0,0);

		scene2 = win3D2.get3DSceneAndLock();

		scene2->insert( gl_points2 );
		scene2->insert( mrpt::opengl::CGridPlaneXY::Create() );
		
		win3D2.unlockAccess3DScene();
	}

	double counter = 0;
	mrpt::utils::CTicTac	tictac;

	CVectorDouble fps;

	while (win.isOpen())
	{
		if( !counter )
			tictac.Tic();

		CObservation3DRangeScanPtr o;
		
		try{
			o = CObservation3DRangeScanPtr(cam->getNextFrame());
		}
		catch ( CExceptionEOF &)
		{
			break;
		}
		ASSERT_(o);

		vector_detectable_object detected;

		//CObservation3DRangeScanPtr o = CObservation3DRangeScanPtr(obs);
			
		faceDetector.detectObjects( o, detected );
				//static int x = 0;			
		
		if ( detected.size() > 0 )
		{	
			for ( unsigned int i = 0; i < detected.size(); i++ )
			{
				ASSERT_( IS_CLASS(detected[i],CDetectable3D ) )
				CDetectable3DPtr obj = CDetectable3DPtr( detected[i] );

				if ( showEachDetectedFace )
				{
					CObservation3DRangeScan face;
					o->getZoneAsObs( face, obj->m_y, obj->m_y + obj->m_height, obj->m_x, obj->m_x + obj->m_width );
					win2.showImage( face.intensityImage );
					
					if ( o->hasPoints3D )
					{
						win3D2.get3DSceneAndLock();

						CColouredPointsMap pntsMap;

						if ( !o->hasConfidenceImage )
						{
							pntsMap.colorScheme.scheme = CColouredPointsMap::cmFromIntensityImage;
							pntsMap.loadFromRangeScan( face );								
						}
						else
						{
							vector<float> xs, ys, zs;
							unsigned int i = 0;
							for ( unsigned int j = 0; j < face.confidenceImage.getHeight(); j++ )
								for ( unsigned int k = 0; k < face.confidenceImage.getWidth(); k++, i++ )
								{
									unsigned char c = *(face.confidenceImage.get_unsafe( k, j, 0 ));
									if ( c > faceDetector.m_options.confidenceThreshold )
									{
										xs.push_back( face.points3D_x[i] );
										ys.push_back( face.points3D_y[i] );
										zs.push_back( face.points3D_z[i] );
									}
								}

							pntsMap.setAllPoints( xs, ys, zs );
						}

						gl_points2->loadFromPointsMap(&pntsMap);

						win3D2.unlockAccess3DScene();
						win3D2.repaint();
					}
				}

				o->intensityImage.rectangle( obj->m_x, obj->m_y, obj->m_x+obj->m_width, obj->m_y + obj->m_height, TColor(255,0,0) );	

				//x++;
				//if (( showEachDetectedFace ) && ( x > 430 ) )
					//system::pause();
			}
		}

		win.showImage(o->intensityImage);			

		/*if (( showEachDetectedFace ) && ( detected.size() ))
				system::pause();*/

		win3D.get3DSceneAndLock();
		CColouredPointsMap pntsMap;
		pntsMap.colorScheme.scheme = CColouredPointsMap::cmFromIntensityImage;
		pntsMap.loadFromRangeScan(*(o.pointer()));		

		gl_points->loadFromPointsMap(&pntsMap);
		win3D.unlockAccess3DScene();
		win3D.repaint();

		if( ++counter == 10 )
		{
			double t = tictac.Tac();
			cout << "Frame Rate: " << counter/t << " fps" << endl;
			fps.push_back( counter/t );
			counter = 0;
		}
		mrpt::system::sleep(2);
	}

	cout << "Fps mean: " << fps.sumAll() / fps.size() << endl;

	faceDetector.experimental_showMeasurements();

	cout << "Closing..." << endl;
}
Exemplo n.º 4
0
int main ( int argc, char** argv )
{
	try
	{
		if (argc>2)
		{
			cerr << "Usage: " << argv[0] << " <sensor_id/sensor_serial\n";
			cerr << "Example: " << argv[0] << " 0 \n";
			return 1;
		}

    //const unsigned sensor_id = 0;
    COpenNI2Sensor rgbd_sensor;
//    rgbd_sensor.loadConfig_sensorSpecific(const mrpt::utils::CConfigFileBase &configSource,	const std::string	 &iniSection );

    unsigned sensor_id_or_serial = 0;
//		const string configFile = std::string( argv[2] );
		if(argc == 2)
		{
      sensor_id_or_serial = atoi(argv[1]);
		  if(sensor_id_or_serial > 10)
        rgbd_sensor.setSerialToOpen(sensor_id_or_serial);
      else
        rgbd_sensor.setSensorIDToOpen(sensor_id_or_serial);
		}

    // Open:
    //cout << "Calling COpenNI2Sensor::initialize()...";
    rgbd_sensor.initialize();

    if(rgbd_sensor.getNumDevices() == 0)
      return 0;

    cout << "OK " << rgbd_sensor.getNumDevices() << " available devices."  << endl;
    cout << "\nUse device " << sensor_id_or_serial << endl << endl;

    bool showImages = false;
    if(showImages)
    {
      mrpt::gui::CDisplayWindow   win("Video");

      CTicTac	tictac;
      tictac.Tic();
      unsigned int nFrames = 0;
      CObservation3DRangeScan newObs;
      bool bObs = false, bError = true;
      rgbd_sensor.getNextObservation(newObs, bObs, bError);

      while(!system::os::kbhit())
      {
      cout << "Get a new frame\n";
        rgbd_sensor.getNextObservation(newObs, bObs, bError);

        double fps = ++nFrames / tictac.Tac();
  //      newObs->intensityImage.textOut(5,5,format("%.02f fps",fps),TColor(0x80,0x80,0x80) );
        cout << "FPS: " << fps << endl;

        if (nFrames>100)
        {
          tictac.Tic();
          nFrames=0;
        }

        win.showImage(newObs.intensityImage);
        mrpt::system::sleep(10);

      }
    }
    else // Show point cloud and images
    {
      // Create window and prepare OpenGL object in the scene:
      // --------------------------------------------------------
      mrpt::gui::CDisplayWindow3D  win3D("OpenNI2 3D view",800,600);

      win3D.setCameraAzimuthDeg(140);
      win3D.setCameraElevationDeg(20);
      win3D.setCameraZoom(8.0);
      win3D.setFOV(90);
      win3D.setCameraPointingToPoint(2.5,0,0);

      mrpt::opengl::CPointCloudColouredPtr gl_points = mrpt::opengl::CPointCloudColoured::Create();
      gl_points->setPointSize(2.5);

      opengl::COpenGLViewportPtr viewInt; // Extra viewports for the RGB images.
      {
        mrpt::opengl::COpenGLScenePtr &scene = win3D.get3DSceneAndLock();

        // Create the Opengl object for the point cloud:
        scene->insert( gl_points );
        scene->insert( mrpt::opengl::CGridPlaneXY::Create() );
        scene->insert( mrpt::opengl::stock_objects::CornerXYZ() );

        const double aspect_ratio =  480.0 / 640.0;
        const int VW_WIDTH = 400;	// Size of the viewport into the window, in pixel units.
        const int VW_HEIGHT = aspect_ratio*VW_WIDTH;

        // Create an extra opengl viewport for the RGB image:
        viewInt = scene->createViewport("view2d_int");
        viewInt->setViewportPosition(5, 30, VW_WIDTH,VW_HEIGHT );
        win3D.addTextMessage(10, 30+VW_HEIGHT+10,"Intensity data",TColorf(1,1,1), 2, MRPT_GLUT_BITMAP_HELVETICA_12 );

        win3D.addTextMessage(5,5,
          format("'o'/'i'-zoom out/in, ESC: quit"),
            TColorf(0,0,1), 110, MRPT_GLUT_BITMAP_HELVETICA_18 );

        win3D.unlockAccess3DScene();
        win3D.repaint();
      }

      //							Grab frames continuously and show
      //========================================================================================

      bool bObs = false, bError = true;
      mrpt::system::TTimeStamp  last_obs_tim = INVALID_TIMESTAMP;

      while (!win3D.keyHit())	//Push any key to exit // win3D.isOpen()
      {
  //    cout << "Get new observation\n";
        CObservation3DRangeScanPtr newObs = CObservation3DRangeScan::Create();
        rgbd_sensor.getNextObservation(*newObs, bObs, bError);

        if (bObs && !bError && newObs && newObs->timestamp!=INVALID_TIMESTAMP && newObs->timestamp!=last_obs_tim )
        {
          // It IS a new observation:
          last_obs_tim = newObs->timestamp;

          // Update visualization ---------------------------------------

          win3D.get3DSceneAndLock();

          // Estimated grabbing rate:
          win3D.addTextMessage(-350,-13, format("Timestamp: %s", mrpt::system::dateTimeLocalToString(last_obs_tim).c_str()), TColorf(0.6,0.6,0.6),"mono",10,mrpt::opengl::FILL, 100);

          // Show intensity image:
          if (newObs->hasIntensityImage )
          {
            viewInt->setImageView(newObs->intensityImage); // This is not "_fast" since the intensity image may be needed later on.
          }
          win3D.unlockAccess3DScene();

          // -------------------------------------------------------
          //           Create 3D points from RGB+D data
          //
          // There are several methods to do this.
          //  Switch the #if's to select among the options:
          // See also: http://www.mrpt.org/Generating_3D_point_clouds_from_RGB_D_observations
          // -------------------------------------------------------
          if (newObs->hasRangeImage)
          {
    #if 0
            static pcl::PointCloud<pcl::PointXYZRGB> cloud;
            newObs->project3DPointsFromDepthImageInto(cloud, false /* without obs.sensorPose */);

            win3D.get3DSceneAndLock();
              gl_points->loadFromPointsMap(&cloud);
            win3D.unlockAccess3DScene();
    #endif

    // Pathway: RGB+D --> XYZ+RGB opengl
    #if 1
            win3D.get3DSceneAndLock();
              newObs->project3DPointsFromDepthImageInto(*gl_points, false /* without obs.sensorPose */);
            win3D.unlockAccess3DScene();
    #endif
          }

          win3D.repaint();
        } // end update visualization:
      }
    }

    cout << "\nClosing RGBD sensor...\n";

    return 0;

	} catch (std::exception &e)
	{
		std::cout << "MRPT exception caught: " << e.what() << std::endl;
		return -1;
	}
	catch (...)
	{
		printf("Untyped exception!!");
		return -1;
	}
}
Exemplo n.º 5
0
// ------------------------------------------------------
//				Test_KinectOnlineOffline
// ------------------------------------------------------
void Test_KinectOnlineOffline(bool is_online, const string &rawlog_file = string())
{
	// Launch grabbing thread:
	// --------------------------------------------------------
	TThreadParam thrPar(
		is_online,
		rawlog_file,
		false // generate_3D_pointcloud_in_this_thread -> Don't, we'll do it in this main thread.
		);

	mrpt::system::TThreadHandle thHandle= mrpt::system::createThreadRef(thread_grabbing ,thrPar);

	// Wait until data stream starts so we can say for sure the sensor has been initialized OK:
	cout << "Waiting for sensor initialization...\n";
	do {
		CObservation3DRangeScanPtr newObs = thrPar.new_obs.get();
		if (newObs && newObs->timestamp!=INVALID_TIMESTAMP)
				break;
		else 	mrpt::system::sleep(10);
	} while (!thrPar.quit);

	// Check error condition:
	if (thrPar.quit) return;
	cout << "OK! Sensor started to emit observations.\n";

	// Create window and prepare OpenGL object in the scene:
	// --------------------------------------------------------
	mrpt::gui::CDisplayWindow3D  win3D("Kinect 3D view",800,600);

	win3D.setCameraAzimuthDeg(140);
	win3D.setCameraElevationDeg(20);
	win3D.setCameraZoom(8.0);
	win3D.setFOV(90);
	win3D.setCameraPointingToPoint(2.5,0,0);

	mrpt::opengl::CPointCloudColouredPtr gl_points = mrpt::opengl::CPointCloudColoured::Create();
	gl_points->setPointSize(2.5);

	opengl::COpenGLViewportPtr viewInt; // Extra viewports for the RGB images.
	{
		mrpt::opengl::COpenGLScenePtr &scene = win3D.get3DSceneAndLock();

		// Create the Opengl object for the point cloud:
		scene->insert( gl_points );
		scene->insert( mrpt::opengl::CGridPlaneXY::Create() );
		scene->insert( mrpt::opengl::stock_objects::CornerXYZ() );

		const double aspect_ratio =  480.0 / 640.0;
		const int VW_WIDTH = 400;	// Size of the viewport into the window, in pixel units.
		const int VW_HEIGHT = aspect_ratio*VW_WIDTH;

		// Create an extra opengl viewport for the RGB image:
		viewInt = scene->createViewport("view2d_int");
		viewInt->setViewportPosition(5, 30, VW_WIDTH,VW_HEIGHT );
		win3D.addTextMessage(10, 30+VW_HEIGHT+10,"Intensity data",TColorf(1,1,1), 2, MRPT_GLUT_BITMAP_HELVETICA_12 );

		win3D.addTextMessage(5,5,
			format("'o'/'i'-zoom out/in, ESC: quit"),
				TColorf(0,0,1), 110, MRPT_GLUT_BITMAP_HELVETICA_18 );


		win3D.unlockAccess3DScene();
		win3D.repaint();
	}



	mrpt::system::TTimeStamp  last_obs_tim = INVALID_TIMESTAMP;

	while (win3D.isOpen() && !thrPar.quit)
	{
		CObservation3DRangeScanPtr newObs = thrPar.new_obs.get();
		if (newObs && newObs->timestamp!=INVALID_TIMESTAMP &&
			newObs->timestamp!=last_obs_tim )
		{
			// It IS a new observation:
			last_obs_tim = newObs->timestamp;

			// Update visualization ---------------------------------------

			win3D.get3DSceneAndLock();

			// Estimated grabbing rate:
			win3D.addTextMessage(-350,-13, format("Timestamp: %s", mrpt::system::dateTimeLocalToString(last_obs_tim).c_str()), TColorf(0.6,0.6,0.6),"mono",10,mrpt::opengl::FILL, 100);
			win3D.addTextMessage(-100,-30, format("%.02f Hz", thrPar.Hz ), TColorf(1,1,1),"mono",10,mrpt::opengl::FILL, 101);

			// Show intensity image:
			if (newObs->hasIntensityImage )
			{
				viewInt->setImageView(newObs->intensityImage); // This is not "_fast" since the intensity image may be needed later on.
			}
			win3D.unlockAccess3DScene();

			// -------------------------------------------------------
			//           Create 3D points from RGB+D data
			//
			// There are several methods to do this.
			//  Switch the #if's to select among the options:
			// See also: http://www.mrpt.org/Generating_3D_point_clouds_from_RGB_D_observations
			// -------------------------------------------------------
			if (newObs->hasRangeImage)
			{
				static mrpt::utils::CTimeLogger logger;
				logger.enter("RGBD->3D");

// Pathway: RGB+D --> PCL <PointXYZ> --> XYZ opengl
#if 0
				static pcl::PointCloud<pcl::PointXYZ> cloud;
				logger.enter("RGBD->3D.projectInto");
				newObs->project3DPointsFromDepthImageInto(cloud, false /* without obs.sensorPose */);
				logger.leave("RGBD->3D.projectInto");

				win3D.get3DSceneAndLock();
				logger.enter("RGBD->3D.load in OpenGL");
					gl_points->loadFromPointsMap(&cloud);
				logger.leave("RGBD->3D.load in OpenGL");
				win3D.unlockAccess3DScene();
#endif

// Pathway: RGB+D --> PCL <PointXYZRGB> --> XYZ+RGB opengl
#if 0
				static pcl::PointCloud<pcl::PointXYZRGB> cloud;
				logger.enter("RGBD->3D.projectInto");
				newObs->project3DPointsFromDepthImageInto(cloud, false /* without obs.sensorPose */);
				logger.leave("RGBD->3D.projectInto");

				win3D.get3DSceneAndLock();
				logger.enter("RGBD->3D.load in OpenGL");
					gl_points->loadFromPointsMap(&cloud);
				logger.leave("RGBD->3D.load in OpenGL");
				win3D.unlockAccess3DScene();
#endif

// Pathway: RGB+D --> XYZ+RGB opengl
#if 1
				win3D.get3DSceneAndLock();
				logger.enter("RGBD->3D.projectInto");
					newObs->project3DPointsFromDepthImageInto(*gl_points, false /* without obs.sensorPose */);
				logger.leave("RGBD->3D.projectInto");
				win3D.unlockAccess3DScene();
#endif

// Pathway: RGB+D --> XYZ+RGB opengl (With a 6D global pose of the robot)
#if 0
				const CPose3D globalPose(1,2,3,DEG2RAD(10),DEG2RAD(20),DEG2RAD(30));
				win3D.get3DSceneAndLock();
				logger.enter("RGBD->3D.projectInto");
					newObs->project3DPointsFromDepthImageInto(*gl_points, false /* without obs.sensorPose */, &globalPose);
				logger.leave("RGBD->3D.projectInto");
				win3D.unlockAccess3DScene();
#endif

// Pathway: RGB+D --> internal local XYZ pointcloud --> XYZ+RGB point cloud map --> XYZ+RGB opengl
#if 0
				// Project 3D points:
				if (!newObs->hasPoints3D)
				{
				logger.enter("RGBD->3D.projectInto");
					newObs->project3DPointsFromDepthImage();
				logger.leave("RGBD->3D.projectInto");
				}

				CColouredPointsMap pntsMap;
				pntsMap.colorScheme.scheme = CColouredPointsMap::cmFromIntensityImage;
				pntsMap.loadFromRangeScan(*newObs);

				win3D.get3DSceneAndLock();
				logger.enter("RGBD->3D.load in OpenGL");
					gl_points->loadFromPointsMap(&pntsMap);
				logger.leave("RGBD->3D.load in OpenGL");
				win3D.unlockAccess3DScene();
#endif

				logger.leave("RGBD->3D");
			}

			win3D.repaint();
		} // end update visualization:


		// Process possible keyboard commands:
		// --------------------------------------
		if (win3D.keyHit())
		{
			const int key = tolower( win3D.getPushedKey() );

			switch(key)
			{
				// Some of the keys are processed in this thread:
				case 'o':
					win3D.setCameraZoom( win3D.getCameraZoom() * 1.2 );
					win3D.repaint();
					break;
				case 'i':
					win3D.setCameraZoom( win3D.getCameraZoom() / 1.2 );
					win3D.repaint();
					break;
				case 27: // ESC
					thrPar.quit = true;
					break;
				default:
					break;
			};
		}

		mrpt::system::sleep(1);
	}


	cout << "Waiting for grabbing thread to exit...\n";
	thrPar.quit = true;
	mrpt::system::joinThread(thHandle);
	cout << "Bye!\n";
}