const AstraVideoMode astra_convert(const openni::VideoMode& input)
{
  AstraVideoMode output;

  output.x_resolution_ = input.getResolutionX();
  output.y_resolution_ = input.getResolutionY();
  output.frame_rate_ = input.getFps();
  output.pixel_format_ = static_cast<PixelFormat>(input.getPixelFormat());

  return output;
}
Example #2
0
 static void dumpVM( const openni::VideoMode& vm ){
     std::cout << "VideoMode: [" << vm.getResolutionX() << ", " << vm.getResolutionY() << "] @ " << vm.getFps();
     switch( vm.getPixelFormat() ){
         case openni::PIXEL_FORMAT_DEPTH_100_UM:    std::cout << " DEPTH_100_UM" << std::endl; return;
         case openni::PIXEL_FORMAT_DEPTH_1_MM:    std::cout << " DEPTH_1_MM" << std::endl; return;
         case openni::PIXEL_FORMAT_GRAY8:    std::cout << " GRAY_U8" << std::endl; return;
         case openni::PIXEL_FORMAT_GRAY16:   std::cout << " GRAY_U16" << std::endl; return;
         case openni::PIXEL_FORMAT_RGB888:   std::cout << " RGB_888" << std::endl; return;
         case openni::PIXEL_FORMAT_YUYV:     std::cout << " YUYV" << std::endl; return;
         case openni::PIXEL_FORMAT_YUV422:   std::cout << " YUV422" << std::endl; return;
         default:
             std::cout << " unknown pxformat" << std::endl;
     }
 }
bool OpenNI2Interface::isModeSupported(const openni::VideoStream& stream,const openni::VideoMode& mode)
{
    const auto& modes = stream.getSensorInfo().getSupportedVideoModes();

    for(int i = 0; i < modes.getSize(); i++)
    {
        if(modes[i].getResolutionX() == mode.getResolutionX() &&
            modes[i].getResolutionY() == mode.getResolutionY() &&
            modes[i].getFps() == mode.getFps() &&
            modes[i].getPixelFormat() == mode.getPixelFormat())
        {
            return true;
        }
    }
    return false;
}
void OpenNI2Interface::printMode(const openni::VideoMode& mode)
{
    std::map<int,std::string> formatNames;
    formatNames[openni::PIXEL_FORMAT_DEPTH_1_MM] = "1mm";
    formatNames[openni::PIXEL_FORMAT_DEPTH_100_UM] = "100um";
    formatNames[openni::PIXEL_FORMAT_SHIFT_9_2] = "Shift 9 2";
    formatNames[openni::PIXEL_FORMAT_SHIFT_9_3] = "Shift 9 3";

    formatNames[openni::PIXEL_FORMAT_RGB888] = "RGB888";
    formatNames[openni::PIXEL_FORMAT_YUV422] = "YUV422";
    formatNames[openni::PIXEL_FORMAT_GRAY8] = "GRAY8";
    formatNames[openni::PIXEL_FORMAT_GRAY16] = "GRAY16";
    formatNames[openni::PIXEL_FORMAT_JPEG] = "JPEG";

    cout << "(" << mode.getResolutionX() << "x" << mode.getResolutionY()
        << ", " << mode.getFps() << " fps, " << formatNames[mode.getPixelFormat()] << ")\n";
}
 void KinectInterfacePrimesense::printMode(const openni::VideoMode& mode) {
   std::cout << "Res: " << mode.getResolutionX() << "x";
   std::cout << mode.getResolutionY() << ", fps = " << mode.getFps();
   std::cout << ", format = " << formatToString(mode.getPixelFormat());
   std::cout << std::endl;
 }
Example #6
0
bool NIModule::Initialize( const string& sDevice, const openni::VideoMode& rMode )
{
	// initialize OpenNI
	m_funcOnInfo( "Initialize OpenNI" );
	if( OpenNI::initialize() != openni::STATUS_OK )
	{
		m_funcOnError( "Can't initialize OpenNI:\n " + string( OpenNI::getExtendedError() ) );
		return false;
	}

	// Open OpenNI Device
	m_funcOnInfo( "Open OpenNI Device" );
	if( sDevice == "" )
	{
		if( m_Device.open( openni::ANY_DEVICE ) != openni::STATUS_OK )
		{
			m_funcOnError( "Can't open OpenNI Device:\n " + string( OpenNI::getExtendedError() ) );
			return false;
		}
	}
	else
	{
		if( m_Device.open( sDevice.c_str() ) != openni::STATUS_OK )
		{
			m_funcOnError( "Can't open OpenNI Device:\n " + string( OpenNI::getExtendedError() ) );
			return false;
		}
	}

	// create depth VideoStream
	m_funcOnInfo( "Create OpenNI Depth VideoStream" );
	if( m_DepthStream.create( m_Device, SENSOR_DEPTH ) != openni::STATUS_OK )
	{
		m_funcOnError( "Can't create OpenNI Depth VideoStream:\n " + string( OpenNI::getExtendedError() ) );
		return false;
	}

	// Apply Video Mode
	if( rMode.getFps() != 0 )
	{
		m_funcOnInfo( "Set OpenNI Depth VideoStream VideoMode" );
		if( m_DepthStream.setVideoMode( rMode ) != openni::STATUS_OK )
		{
			m_funcOnError( "Can't apply OpenNI Depth VideoStream VideoMode:\n " + string( OpenNI::getExtendedError() ) );
			return false;
		}
	}
	m_DepthMode = m_DepthStream.getVideoMode();
	
	// Initialize NiTE
	m_funcOnInfo( "Initialize NiTE" );
	if( NiTE::initialize() != nite::STATUS_OK )
	{
		m_funcOnError( "Can't initialize NiTE" );
		return false;
	}

	// create UserTracker
	m_funcOnInfo( "Cretae NiTE UserTracker" );
	if( m_UserTracker.create( &m_Device ) != nite::STATUS_OK )
	{
		m_funcOnError( "Can't create NiTE User Tracker" );
		return false;
	}
	return true;
}