Exemplo n.º 1
0
openni::Status openDevice(const char* uri, bool defaultRightColor)
{
	openni::Status nRetVal = openni::OpenNI::initialize();
	if (nRetVal != openni::STATUS_OK)
	{
		return nRetVal;
	}

	// Register to OpenNI events.
	static OpenNIDeviceListener deviceListener;
	
	openni::OpenNI::addDeviceDisconnectedListener(&deviceListener);
	openni::OpenNI::addDeviceStateChangedListener(&deviceListener);

	// Open the requested device.
	nRetVal = g_device.open(uri);
	if (nRetVal != openni::STATUS_OK)
	{
		return nRetVal;
	}

	g_pPlaybackControl = g_device.getPlaybackControl();

	openCommon(g_device, defaultRightColor);

	return openni::STATUS_OK;
}
Exemplo n.º 2
0
void changeRegistration(int value)
{
	openni::ImageRegistrationMode mode = (openni::ImageRegistrationMode)value;
	if (!g_device.isValid() || !g_device.isImageRegistrationModeSupported(mode))
	{
		return;
	}

	g_device.setImageRegistrationMode(mode);
}
Exemplo n.º 3
0
openni::Status openDevice(const char* uri, DeviceConfig config)
{
	openni::Status nRetVal = openni::OpenNI::initialize();
	if (nRetVal != openni::STATUS_OK)
	{
		return nRetVal;
	}

	// Register to OpenNI events.
	static OpenNIDeviceListener deviceListener;
	
	openni::OpenNI::addDeviceDisconnectedListener(&deviceListener);
	openni::OpenNI::addDeviceStateChangedListener(&deviceListener);

	// Open the requested device.
	nRetVal = g_device.open(uri);
	if (nRetVal != openni::STATUS_OK)
	{
		return nRetVal;
	}

	if (0 != openCommon(g_device, config))
	{
		return openni::STATUS_ERROR;
	}

	// If we are here, then the depth camera has been opened okay.
	// Now let's try the sixense, if requested
	if (config.b_captureSixense) {
		return openSixenseDevice();
	}

	return openni::STATUS_OK;
}
Exemplo n.º 4
0
	virtual void onDeviceDisconnected(const openni::DeviceInfo* pInfo)
	{
		if (strcmp(pInfo->getUri(), g_device.getDeviceInfo().getUri()) == 0)
		{
			setErrorState("Device disconnected!");
		}
	}
void KinectCamera::closecamera(void)
{
    mColorStream.destroy();
    mDepthStream.destroy();
    mDevice.close();
    openni:: OpenNI::shutdown();
}
Exemplo n.º 6
0
openni::Status openDevice(const char* uri, DeviceConfig config)
{
    openni::Status nRetVal = openni::OpenNI::initialize();
    if (nRetVal != openni::STATUS_OK)
    {
        return nRetVal;
    }

    // Register to OpenNI events.
    static OpenNIDeviceListener deviceListener;

    openni::OpenNI::addDeviceDisconnectedListener(&deviceListener);
    openni::OpenNI::addDeviceStateChangedListener(&deviceListener);

    // Open the requested device.
    nRetVal = g_device.open(uri);
    if (nRetVal != openni::STATUS_OK)
    {
        return nRetVal;
    }

    if (0 != openCommon(g_device, config))
    {
        return openni::STATUS_ERROR;
    }

    return openni::STATUS_OK;
}
Exemplo n.º 7
0
int openCommon(openni::Device& device, DeviceConfig config)
{
    g_pPlaybackControl = g_device.getPlaybackControl();

    int ret;

    ret = openStream(device, "depth", openni::SENSOR_DEPTH, config.openDepth, g_depthStream, &g_depthSensorInfo, &g_bIsDepthOn);
    if (ret != 0)
    {
        return ret;
    }

    ret = openStream(device, "color", openni::SENSOR_COLOR, config.openColor, g_colorStream, &g_colorSensorInfo, &g_bIsColorOn);
    if (ret != 0)
    {
        return ret;
    }

    ret = openStream(device, "IR", openni::SENSOR_IR, config.openIR, g_irStream, &g_irSensorInfo, &g_bIsIROn);
    if (ret != 0)
    {
        return ret;
    }

    initConstants();

    readFrame();

    return 0;
}
Exemplo n.º 8
0
openni::VideoMode OpenNi2Video::FindOpenNI2Mode(
    openni::Device & device,
    openni::SensorType sensorType,
    int width, int height,
    int fps, openni::PixelFormat fmt
) {
    // Query supported modes for device
    const openni::Array<openni::VideoMode>& modes =
            device.getSensorInfo(sensorType)->getSupportedVideoModes();

    // Select last listed mode which matches parameters
    int best_mode = -1;
    for(int i = 0; i < modes.getSize(); i++) {
        if( (!width || modes[i].getResolutionX() == width) &&
            (!height || modes[i].getResolutionY() == height) &&
            (!fps || modes[i].getFps() == fps) &&
            (!fmt || modes[i].getPixelFormat() == fmt)
        ) {
            best_mode = i;
        }
    }

    if(best_mode >= 0) {
        return modes[best_mode];
    }

    throw pangolin::VideoException("Video mode not supported");
}
Exemplo n.º 9
0
openni::Status openDeviceFromList(bool defaultRightColor)
{
	openni::Status rc = openni::OpenNI::initialize();
	if (rc != openni::STATUS_OK)
	{
		return rc;
	}

	openni::Array<openni::DeviceInfo> deviceList;
	openni::OpenNI::enumerateDevices(&deviceList);

	for (int i = 0; i < deviceList.getSize(); ++i)
	{
		printf("[%d] %s [%s] (%s)\n", i+1, deviceList[i].getName(), deviceList[i].getVendor(), deviceList[i].getUri());
	}

	printf("\n");
	int chosen = 1;

	do
	{
		printf("Choose device to open (1) [0 to exit]: ");

		int rc = scanf("%d", &chosen);

		if (rc <= 0 || chosen == 0)
		{
			return openni::STATUS_ERROR;
		}

	} while (chosen < 1 || chosen > deviceList.getSize());

	g_device.open(deviceList[chosen-1].getUri());

	if (rc != openni::STATUS_OK)
	{
		return rc;
	}

	g_pPlaybackControl = g_device.getPlaybackControl();

	openCommon(g_device, defaultRightColor);

	return openni::STATUS_OK;
}
Exemplo n.º 10
0
int openStream(openni::Device& device, const char* name, openni::SensorType sensorType, SensorOpenType openType, openni::VideoStream& stream, const openni::SensorInfo** ppSensorInfo, bool* pbIsStreamOn)
{
    *ppSensorInfo = device.getSensorInfo(sensorType);
    *pbIsStreamOn = false;

    if (openType == SENSOR_OFF)
    {
        return 0;
    }

    if (*ppSensorInfo == NULL)
    {
        if (openType == SENSOR_ON)
        {
            printf("No %s sensor available\n", name);
            return -1;
        }
        else
        {
            return 0;
        }
    }

    openni::Status nRetVal = stream.create(device, sensorType);
    if (nRetVal != openni::STATUS_OK)
    {
        if (openType == SENSOR_ON)
        {
            printf("Failed to create %s stream:\n%s\n", openni::OpenNI::getExtendedError(), name);
            return -2;
        }
        else
        {
            return 0;
        }
    }

    nRetVal = stream.start();
    if (nRetVal != openni::STATUS_OK)
    {
        stream.destroy();

        if (openType == SENSOR_ON)
        {
            printf("Failed to start depth stream:\n%s\n", openni::OpenNI::getExtendedError());
            return -3;
        }
        else
        {
            return 0;
        }
    }

    *pbIsStreamOn = true;

    return 0;
}
void KinectCamera::startcamera(void)
{
    openni::OpenNI::initialize();//初始化
    mDevice.open( openni::ANY_DEVICE );//打开设备(已在全局变量中声明设备mDevice)
    mColorStream.create( mDevice, openni::SENSOR_COLOR );// 创建数据流
    mColorStream.start();//开启数据流
    mDepthStream.create( mDevice, openni::SENSOR_DEPTH );// 创建数据流
    mDepthStream.start();//开启数据流
    fig=1;
}
Exemplo n.º 12
0
void toggleFrameSync(int)
{
	if (g_bFrameSyncOn)
	{
		g_device.setDepthColorSyncEnabled(false);
		displayMessage("Frame sync off");
	}
	else
	{
		openni::Status rc = g_device.setDepthColorSyncEnabled(true);
		if (rc != openni::STATUS_OK)
		{
			displayMessage("Can't frame sync");
			return;
		}
		displayMessage("Frame sync on");
	}
	g_bFrameSyncOn = !g_bFrameSyncOn;
}
Exemplo n.º 13
0
void toggleImageRegistration(int)
{
	openni::ImageRegistrationMode mode = g_device.getImageRegistrationMode();

	openni::ImageRegistrationMode newMode = openni::IMAGE_REGISTRATION_OFF;
	if (mode == openni::IMAGE_REGISTRATION_OFF)
	{
		newMode = openni::IMAGE_REGISTRATION_DEPTH_TO_COLOR;
	}

	if (g_device.isImageRegistrationModeSupported(newMode))
	{
		g_device.setImageRegistrationMode(newMode);
	}
	else
	{
		displayError("Couldn't change image registration to unsupported mode");
	}

}
Exemplo n.º 14
0
void toggleEmitterState(int)
{
    static OniBool bEmitterState = TRUE;

    if (g_device.getProperty(XN_MODULE_PROPERTY_EMITTER_STATE, &bEmitterState) != XN_STATUS_OK &&
            g_device.getProperty(KINECT_DEVICE_PROPERTY_EMITTER_STATE, &bEmitterState) != XN_STATUS_OK)
    {
        // Continue with the latest value even in case of error
    }

    bEmitterState = !bEmitterState;

    if (g_device.setProperty(XN_MODULE_PROPERTY_EMITTER_STATE, bEmitterState) != XN_STATUS_OK &&
            g_device.setProperty(KINECT_DEVICE_PROPERTY_EMITTER_STATE, bEmitterState) != XN_STATUS_OK)
    {
        displayError("Couldn't set the emitter state");
        return;
    }

    displayMessage ("Emitter state: %s", bEmitterState?"On":"Off");
}
Exemplo n.º 15
0
void closeDevice()
{
	g_depthStream.stop();
	g_colorStream.stop();
	g_irStream.stop();

	g_depthStream.destroy();
	g_colorStream.destroy();
	g_irStream.destroy();

	g_device.close();

	openni::OpenNI::shutdown();
}
Exemplo n.º 16
0
	virtual void onDeviceStateChanged(const openni::DeviceInfo* pInfo, openni::DeviceState errorState)
	{
		if (strcmp(pInfo->getUri(), g_device.getDeviceInfo().getUri()) == 0)
		{
			if (errorState != 0)
			{
				setErrorState("Device is in error state! (error %d)", errorState);
			}
			else
			{
				setErrorState("");
			}
		}
	}
Exemplo n.º 17
0
  void open(const char* uri) {
    if (device.open(uri) != openni::STATUS_OK)
      BOOST_THROW_EXCEPTION(GrabberException("Failed to open device")
                            << GrabberException::ErrorInfo(openni::OpenNI::getExtendedError()));

    if (color_stream.create(device, openni::SENSOR_COLOR) != openni::STATUS_OK)
      BOOST_THROW_EXCEPTION(GrabberException("Failed to create color stream")
                            << GrabberException::ErrorInfo(openni::OpenNI::getExtendedError()));

    openni::VideoMode color_mode;
    color_mode.setFps(30);
    color_mode.setResolution(color_image_resolution.width, color_image_resolution.height);
    color_mode.setPixelFormat(openni::PIXEL_FORMAT_RGB888);
    color_stream.setVideoMode(color_mode);
    color_image_size = color_image_resolution.width * color_image_resolution.height * 3;
    color_stream.setMirroringEnabled(false);

    if (color_stream.start() != openni::STATUS_OK) {
      color_stream.destroy();
      BOOST_THROW_EXCEPTION(GrabberException("Failed to start color stream")
                            << GrabberException::ErrorInfo(openni::OpenNI::getExtendedError()));
    }

    streams.push_back(&color_stream);

    auto control = device.getPlaybackControl();
    if (control != nullptr) {
      // This is a file, make sure we get every frame
      control->setSpeed(-1.0f);
      control->setRepeatEnabled(false);
      num_frames = control->getNumberOfFrames(color_stream);
      is_file = true;
      if (num_frames == -1)
        BOOST_THROW_EXCEPTION(GrabberException("Unable to determine number of frames in ONI file"));
    }
  }
Exemplo n.º 18
0
int SensorOpenNI::initialize()
{
    LOG(INFO) << "Initializing OpenNI";
    ///< force shutdown before starting!!
    kinect::OpenNI::shutdown();

    kinect::Status rc;
    rc = kinect::STATUS_OK;

    /// Fetch the device URI to pass to Device::open()
    const char* deviceURI = kinect::ANY_DEVICE;

    /// Initialize the device
    rc = kinect::OpenNI::initialize();
    if(rc!=kinect::STATUS_OK)
    {
        mDebug()<<"Initialization Errors (if any): "<< kinect::OpenNI::getExtendedError();
        kinect::OpenNI::shutdown();
        exit(0);
    }

    /// Open the device using the previously fetched device URI
    rc = device.open(deviceURI);
    if (rc != kinect::STATUS_OK)
    {
        mDebug()<<"Device open failed: "<<kinect::OpenNI::getExtendedError();
        kinect::OpenNI::shutdown();
        exit(0);
    }

    /// Create the depth stream
    rc = g_depthStream.create(device, kinect::SENSOR_DEPTH);
    if (rc == kinect::STATUS_OK)
    {
        /// start the depth stream, if its creation was successful
        rc = g_depthStream.start();

        if (rc != kinect::STATUS_OK)
        {
            mDebug()<<"Couldn't start depth stream: "<<kinect::OpenNI::getExtendedError();
            g_depthStream.destroy();
            exit(0);
        }
    }
    else
    {
        mDebug()<<"Couldn't find depth stream: "<<kinect::OpenNI::getExtendedError();
        exit(0);
    }

    if (!g_depthStream.isValid())
    {
        mDebug()<<"No valid depth streams. Exiting";
        kinect::OpenNI::shutdown();
        exit(0);
    }

    /// Create the color stream
    rc = g_colorStream.create(device, kinect::SENSOR_COLOR);

    if (rc == kinect::STATUS_OK)
    {
        /// start the color stream, if its creation was successful
        rc = g_colorStream.start();

        if (rc != kinect::STATUS_OK)
        {
            mDebug()<<"Couldn't start color stream: "<<kinect::OpenNI::getExtendedError();
            g_colorStream.destroy();
            exit(0);
        }
    }
    else
    {
        mDebug()<<"Couldn't find color stream: "<<kinect::OpenNI::getExtendedError();
        exit(0);
    }

    if (!g_colorStream.isValid())
    {
        mDebug()<<"No valid color streams. Exiting";
        kinect::OpenNI::shutdown();
        exit(0);
    }

    /// Configure resolutions
    {
        /// Attempt to set for depth
        {
            kinect::VideoMode mode = g_depthStream.getVideoMode();
            if(((int)camera->FPS())==60)
                mode.setFps(60);
            else
                mode.setFps(30);
            mode.setResolution(camera->width(), camera->height());
            rc = g_depthStream.setVideoMode(mode);
            if (rc != kinect::STATUS_OK)
                std::cerr << "error setting video mode (depth)" << std::endl;
        }
        /// Attempt to set for color
        {
            kinect::VideoMode mode = g_colorStream.getVideoMode();
            if(((int)camera->FPS())==60)
                mode.setFps(60);
            else
                mode.setFps(30);
            mode.setFps(30); ///< @todo check!!!
            mode.setResolution(camera->width(), camera->height());
            rc = g_colorStream.setVideoMode(mode);
            if (rc != kinect::STATUS_OK)
                std::cerr << "error setting video mode (color)" << std::endl;
        }
    }


#ifdef THIS_CAUSES_INIT_STALLS
    /// Enable depth/color frame synchronization
    rc = device.setDepthColorSyncEnabled(true);
    if (rc != kinect::STATUS_OK)
    {
        qDebug()<<"Could not synchronise device";
        // VGA Kinect always seems to shut down here
        kinect::OpenNI::shutdown();
        exit(0);
    }
#endif

    /// Camera settings
    kinect::CameraSettings* settings = g_colorStream.getCameraSettings();
    settings->setAutoExposureEnabled(true);
    settings->setAutoWhiteBalanceEnabled(true);

    /// Fetch the camera intrinsics
#if 0
        float w = g_depthStream.getVideoMode().getResolutionX();protected:
Exemplo n.º 19
0
void openCommon(openni::Device& device, bool defaultRightColor)
{
	XnStatus nRetVal = XN_STATUS_OK;

	g_bIsDepthOn = false;
	g_bIsColorOn = false;
	g_bIsIROn    = false;

	g_depthSensorInfo = device.getSensorInfo(openni::SENSOR_DEPTH);
	g_colorSensorInfo = device.getSensorInfo(openni::SENSOR_COLOR);
	g_irSensorInfo = device.getSensorInfo(openni::SENSOR_IR);

	if (g_depthSensorInfo != NULL)
	{
		nRetVal = g_depthStream.create(device, openni::SENSOR_DEPTH);
		if (nRetVal != openni::STATUS_OK)
		{
			printf("Failed to create depth stream:\n%s\n", openni::OpenNI::getExtendedError());
			return;
		}

		nRetVal = g_depthStream.start();
		if (nRetVal != openni::STATUS_OK)
		{
			printf("Failed to start depth stream:\n%s\n", openni::OpenNI::getExtendedError());
			g_depthStream.destroy();
			return;
		}

		g_bIsDepthOn = true;
	}

	if (g_colorSensorInfo != NULL)
	{
		nRetVal = g_colorStream.create(device, openni::SENSOR_COLOR);
		if (nRetVal != openni::STATUS_OK)
		{
			printf("Failed to create color stream:\n%s\n", openni::OpenNI::getExtendedError());
			return;
		}

		if (defaultRightColor)
		{
			nRetVal = g_colorStream.start();
			if (nRetVal != openni::STATUS_OK)
			{
				printf("Failed to start color stream:\n%s\n", openni::OpenNI::getExtendedError());
				g_colorStream.destroy();
				return;
			}

			g_bIsColorOn = true;
		}
	}

	if (g_irSensorInfo != NULL)
	{
		nRetVal = g_irStream.create(device, openni::SENSOR_IR);
		if (nRetVal != openni::STATUS_OK)
		{
			printf("Failed to create IR stream:\n%s\n", openni::OpenNI::getExtendedError());
			return;
		}

		if (!g_bIsColorOn)
		{
			nRetVal = g_irStream.start();
			if (nRetVal != openni::STATUS_OK)
			{
				printf("Failed to start IR stream:\n%s\n", openni::OpenNI::getExtendedError());
				g_irStream.destroy();
				return;
			}

			g_bIsIROn = true;
		}
	}

	initConstants();

	readFrame();
}
Exemplo n.º 20
0
 ~Impl() {
   color_stream.stop();
   color_stream.destroy();
   device.close();
   openni::OpenNI::shutdown();
 }