boost::shared_ptr<openni::VideoStream> OpenNI2Device::getIRVideoStream() const throw (OpenNI2Exception) { if (ir_video_stream_.get() == 0) { if (hasIRSensor()) { ir_video_stream_ = boost::make_shared<openni::VideoStream>(); const openni::Status rc = ir_video_stream_->create(*openni_device_, openni::SENSOR_IR); if (rc != openni::STATUS_OK) THROW_OPENNI_EXCEPTION("Couldn't create IR video stream: \n%s\n", openni::OpenNI::getExtendedError()); } } return ir_video_stream_; }
const OpenNI2DeviceInfo openni2_convert(const openni::DeviceInfo* pInfo, const std::string serial) { if (!pInfo) THROW_OPENNI_EXCEPTION("openni2_convert called with zero pointer\n"); OpenNI2DeviceInfo output; output.name_ = pInfo->getName(); output.uri_ = pInfo->getUri(); output.vendor_ = pInfo->getVendor(); output.product_id_ = pInfo->getUsbProductId(); output.vendor_id_ = pInfo->getUsbVendorId(); output.serial_ = serial; return output; }
void OpenNI2Device::setAutoWhiteBalance(bool enable) throw (OpenNI2Exception) { boost::shared_ptr<openni::VideoStream> stream = getColorVideoStream(); if (stream) { openni::CameraSettings* camera_seeting = stream->getCameraSettings(); if (camera_seeting) { const openni::Status rc = camera_seeting->setAutoWhiteBalanceEnabled(enable); if (rc != openni::STATUS_OK) THROW_OPENNI_EXCEPTION("Couldn't set auto white balance: \n%s\n", openni::OpenNI::getExtendedError()); } } }
const OpenNI2VideoMode OpenNI2Device::getDepthVideoMode() throw (OpenNI2Exception) { OpenNI2VideoMode ret; boost::shared_ptr<openni::VideoStream> stream = getDepthVideoStream(); if (stream) { openni::VideoMode video_mode = stream->getVideoMode(); ret = openni2_convert(video_mode); } else THROW_OPENNI_EXCEPTION("Could not create video stream."); return ret; }
std::string OpenNI2Driver::resolveDeviceURI(const std::string& device_id) throw(OpenNI2Exception) { std::string device_URI; boost::shared_ptr<std::vector<std::string> > available_device_URIs = device_manager_->getConnectedDeviceURIs(); // look for '#<number>' format if (device_id_.size() > 1 && device_id_[0] == '#') { std::istringstream device_number_str(device_id_.substr(1)); int device_number; device_number_str >> device_number; int device_index = device_number - 1; // #1 refers to first device if (device_index >= available_device_URIs->size() || device_index < 0) { THROW_OPENNI_EXCEPTION( "Invalid device number %i, there are %zu devices connected.", device_number, available_device_URIs->size()); } else { return available_device_URIs->at(device_index); } }
void DeviceKinect::setDepthCropping (unsigned x, unsigned y, unsigned width, unsigned height) throw (OpenNIException) { if (width != 0 && height != 0) THROW_OPENNI_EXCEPTION ("Microsoft Kinect does not support cropping for the depth stream."); }
void DeviceKinect::setSynchronization (bool on_off) throw (OpenNIException) { if (on_off) THROW_OPENNI_EXCEPTION ("Microsoft Kinect does not support Hardware synchronization."); }
void OpenNI2Device::setDepthColorSync(bool enabled) throw (OpenNI2Exception) { openni::Status rc = openni_device_->setDepthColorSyncEnabled(enabled); if (rc != openni::STATUS_OK) THROW_OPENNI_EXCEPTION("Enabling depth color synchronization failed: \n%s\n", openni::OpenNI::getExtendedError()); }