bool convertDepthPointToColor(int depthX, int depthY, openni::DepthPixel depthZ, int* pColorX, int* pColorY) { if (!g_depthStream.isValid() || !g_colorStream.isValid()) return false; return (openni::STATUS_OK == openni::CoordinateConverter::convertDepthToColor(g_depthStream, g_colorStream, depthX, depthY, depthZ, pColorX, pColorY)); }
void toggleStreamState(openni::VideoStream& stream, openni::VideoFrameRef& frame, bool& isOn, openni::SensorType type, const char* name) { openni::Status nRetVal = openni::STATUS_OK; if (!stream.isValid()) { nRetVal = stream.create(g_device, type); if (nRetVal != openni::STATUS_OK) { displayError("Failed to create %s stream:\n%s", name, openni::OpenNI::getExtendedError()); return; } } if (isOn) { stream.stop(); frame.release(); } else { nRetVal = stream.start(); if (nRetVal != openni::STATUS_OK) { displayError("Failed to start %s stream:\n%s", name, openni::OpenNI::getExtendedError()); return; } } isOn = !isOn; }
void resetStreamCropping(openni::VideoStream& stream) { if (!stream.isValid()) { displayMessage("Stream does not exist!"); return; } if (!stream.isCroppingSupported()) { displayMessage("Stream does not support cropping!"); return; } openni::Status nRetVal = stream.resetCropping(); if (nRetVal != openni::STATUS_OK) { displayMessage("Failed to reset cropping: %s", xnGetStatusString(nRetVal)); return; } }
void setStreamCropping(openni::VideoStream& stream, int originX, int originY, int width, int height) { if (!stream.isValid()) { displayMessage("Stream does not exist!"); return; } if (!stream.isCroppingSupported()) { displayMessage("Stream does not support cropping!"); return; } openni::Status nRetVal = stream.setCropping(originX, originY, width, height); if (nRetVal != openni::STATUS_OK) { displayMessage("Failed to set cropping: %s", xnGetStatusString(nRetVal)); return; } }
void OccipitalSensor::onNewFrame(openni::VideoStream &stream) { /*static long i; i++; qDebug() << i << "onNewFrame()";*/ if (irStream.readFrame(&irFrame) != openni::STATUS_OK || depthStream.readFrame(&depthFrame) != openni::STATUS_OK) { std::cerr << "readFrame not OK " << stream.isValid() << std::endl; return; } const openni::Grayscale16Pixel* irData = (const openni::Grayscale16Pixel*)(irFrame.getData()); const openni::DepthPixel* depthData = (const openni::DepthPixel*)(depthFrame.getData()); for (int r = 0; r < Height; r++) { for (int c = 0; c < Width; c++) { this->irData(r, c) = irData[Width*r + c]; this->depthData(r, c) = depthData[Width*r + c]; } } }
void toggleIRState(int ) { if (g_irStream.isValid()) { if(g_bIsIROn) { g_irStream.stop(); g_irFrame.release(); } else { openni::Status nRetVal = g_irStream.start(); if (nRetVal != openni::STATUS_OK) { displayError("Failed to start IR stream:\n%s", openni::OpenNI::getExtendedError()); return; } } g_bIsIROn = !g_bIsIROn; } }
int SensorOpenNI::initialize() { LOG(INFO) << "Initializing OpenNI"; ///< force shutdown before starting!! kinect::OpenNI::shutdown(); kinect::Status rc; rc = kinect::STATUS_OK; /// Fetch the device URI to pass to Device::open() const char* deviceURI = kinect::ANY_DEVICE; /// Initialize the device rc = kinect::OpenNI::initialize(); if(rc!=kinect::STATUS_OK) { mDebug()<<"Initialization Errors (if any): "<< kinect::OpenNI::getExtendedError(); kinect::OpenNI::shutdown(); exit(0); } /// Open the device using the previously fetched device URI rc = device.open(deviceURI); if (rc != kinect::STATUS_OK) { mDebug()<<"Device open failed: "<<kinect::OpenNI::getExtendedError(); kinect::OpenNI::shutdown(); exit(0); } /// Create the depth stream rc = g_depthStream.create(device, kinect::SENSOR_DEPTH); if (rc == kinect::STATUS_OK) { /// start the depth stream, if its creation was successful rc = g_depthStream.start(); if (rc != kinect::STATUS_OK) { mDebug()<<"Couldn't start depth stream: "<<kinect::OpenNI::getExtendedError(); g_depthStream.destroy(); exit(0); } } else { mDebug()<<"Couldn't find depth stream: "<<kinect::OpenNI::getExtendedError(); exit(0); } if (!g_depthStream.isValid()) { mDebug()<<"No valid depth streams. Exiting"; kinect::OpenNI::shutdown(); exit(0); } /// Create the color stream rc = g_colorStream.create(device, kinect::SENSOR_COLOR); if (rc == kinect::STATUS_OK) { /// start the color stream, if its creation was successful rc = g_colorStream.start(); if (rc != kinect::STATUS_OK) { mDebug()<<"Couldn't start color stream: "<<kinect::OpenNI::getExtendedError(); g_colorStream.destroy(); exit(0); } } else { mDebug()<<"Couldn't find color stream: "<<kinect::OpenNI::getExtendedError(); exit(0); } if (!g_colorStream.isValid()) { mDebug()<<"No valid color streams. Exiting"; kinect::OpenNI::shutdown(); exit(0); } /// Configure resolutions { /// Attempt to set for depth { kinect::VideoMode mode = g_depthStream.getVideoMode(); if(((int)camera->FPS())==60) mode.setFps(60); else mode.setFps(30); mode.setResolution(camera->width(), camera->height()); rc = g_depthStream.setVideoMode(mode); if (rc != kinect::STATUS_OK) std::cerr << "error setting video mode (depth)" << std::endl; } /// Attempt to set for color { kinect::VideoMode mode = g_colorStream.getVideoMode(); if(((int)camera->FPS())==60) mode.setFps(60); else mode.setFps(30); mode.setFps(30); ///< @todo check!!! mode.setResolution(camera->width(), camera->height()); rc = g_colorStream.setVideoMode(mode); if (rc != kinect::STATUS_OK) std::cerr << "error setting video mode (color)" << std::endl; } } #ifdef THIS_CAUSES_INIT_STALLS /// Enable depth/color frame synchronization rc = device.setDepthColorSyncEnabled(true); if (rc != kinect::STATUS_OK) { qDebug()<<"Could not synchronise device"; // VGA Kinect always seems to shut down here kinect::OpenNI::shutdown(); exit(0); } #endif /// Camera settings kinect::CameraSettings* settings = g_colorStream.getCameraSettings(); settings->setAutoExposureEnabled(true); settings->setAutoWhiteBalanceEnabled(true); /// Fetch the camera intrinsics #if 0 float w = g_depthStream.getVideoMode().getResolutionX();protected: