int closeOpenNIDevice(Device &device , VideoStream &color , VideoStream &depth , VideoStream &ir) { fprintf(stderr,"Stopping depth and color streams\n"); depth.stop(); color.stop(); #if MOD_IR ir.stop(); ir.destroy(); #endif // MOD_IR depth.destroy(); color.destroy(); device.close(); return 1; }
OniStatus Device::close() { --m_openCount; if (m_openCount == 0) { while(m_streams.Begin() != m_streams.End()) { VideoStream* pStream = *m_streams.Begin(); pStream->stop(); m_streams.Remove(pStream); } for (int i = 0; i < MAX_SENSORS_PER_DEVICE; ++i) { if (m_sensors[i] != NULL) { XN_DELETE(m_sensors[i]); m_sensors[i] = NULL; } } if (m_deviceHandle != NULL) { m_driverHandler.deviceClose(m_deviceHandle); } m_deviceHandle = NULL; } return ONI_STATUS_OK; }
virtual bool beginConfigure() { was_running_ = running_; if(was_running_) stream_.stop(); running_ = false; return true; }
virtual ~SensorStreamManager() { stream_.removeNewFrameListener(this); stream_.stop(); stream_.destroy(); publisher_.shutdown(); }
int main() { Status rc = OpenNI::initialize(); if (rc != STATUS_OK) { printf("Initialize failed\n%s\n", OpenNI::getExtendedError()); return 1; } OpenNIEventListener eventPrinter; OpenNI::addListener(&eventPrinter); Device device; rc = device.open(ANY_DEVICE); if (rc != STATUS_OK) { printf("Couldn't open device\n%s\n", OpenNI::getExtendedError()); return 2; } VideoStream depth; if (device.getSensorInfo(SENSOR_DEPTH) != NULL) { rc = depth.create(device, SENSOR_DEPTH); if (rc != STATUS_OK) { printf("Couldn't create depth stream\n%s\n", OpenNI::getExtendedError()); } } rc = depth.start(); if (rc != STATUS_OK) { printf("Couldn't start the depth stream\n%s\n", OpenNI::getExtendedError()); } PrintCallback depthPrinter; // Register to new frame depth.addListener(&depthPrinter); // Wait while we're getting frames through the printer while (!wasKeyboardHit()) { Sleep(100); } depth.removeListener(&depthPrinter); depth.stop(); depth.destroy(); device.close(); OpenNI::shutdown(); return 0; }
virtual void onSubscriptionChanged(const image_transport::SingleSubscriberPublisher& topic) { if(topic.getNumSubscribers() > 0) { if(!running_ && stream_.start() == STATUS_OK) { running_ = true; } } else { stream_.stop(); running_ = false; } }
int main (int argc, char** argv) { Status rc = OpenNI::initialize(); if (rc != STATUS_OK) { std::cout << "Initialize failed: " << OpenNI::getExtendedError() << std::endl; return 1; } Device device; rc = device.open(ANY_DEVICE); if (rc != STATUS_OK) { std::cout << "Couldn't open device: " << OpenNI::getExtendedError() << std::endl; return 2; } VideoStream stream; if (device.getSensorInfo(currentSensor) != NULL) { rc = stream.create(device, currentSensor); if (rc != STATUS_OK) { std::cout << "Couldn't create stream: " << OpenNI::getExtendedError() << std::endl; return 3; } } rc = stream.start(); if (rc != STATUS_OK) { std::cout << "Couldn't start the stream: " << OpenNI::getExtendedError() << std::endl; return 4; } VideoFrameRef frame; //now open the video writer Size S = Size(stream.getVideoMode().getResolutionX(), stream.getVideoMode().getResolutionY()); VideoWriter outputVideo; std::string fileName = "out.avi"; outputVideo.open(fileName, -1, stream.getVideoMode().getFps(), S, currentSensor == SENSOR_COLOR ? true : false); if (!outputVideo.isOpened()) { std::cout << "Could not open the output video for write: " << fileName << std::endl; return -1; } while (waitKey(50) == -1) { int changedStreamDummy; VideoStream* pStream = &stream; rc = OpenNI::waitForAnyStream(&pStream, 1, &changedStreamDummy, SAMPLE_READ_WAIT_TIMEOUT); if (rc != STATUS_OK) { std::cout << "Wait failed! (timeout is " << SAMPLE_READ_WAIT_TIMEOUT << "ms): " << OpenNI::getExtendedError() << std::endl; continue; } rc = stream.readFrame(&frame); if (rc != STATUS_OK) { std::cout << "Read failed:" << OpenNI::getExtendedError() << std::endl; continue; } Mat image; switch (currentSensor) { case SENSOR_COLOR: image = Mat(frame.getHeight(), frame.getWidth(), CV_8UC3, (void*)frame.getData()); break; case SENSOR_DEPTH: image = Mat(frame.getHeight(), frame.getWidth(), DataType<DepthPixel>::type, (void*)frame.getData()); break; case SENSOR_IR: image = Mat(frame.getHeight(), frame.getWidth(), CV_8U, (void*)frame.getData()); break; default: break; } namedWindow( "Display window", WINDOW_AUTOSIZE ); // Create a window for display. imshow( "Display window", image ); // Show our image inside it. outputVideo << image; } stream.stop(); stream.destroy(); device.close(); OpenNI::shutdown(); return 0; }
int main() { // 2. initialize OpenNI Status rc = OpenNI::initialize(); if (rc != STATUS_OK) { printf("Initialize failed\n%s\n", OpenNI::getExtendedError()); return 1; } // 3. open a device Device device; rc = device.open(ANY_DEVICE); if (rc != STATUS_OK) { printf("Couldn't open device\n%s\n", OpenNI::getExtendedError()); return 2; } // 4. create depth stream VideoStream depth; if (device.getSensorInfo(SENSOR_DEPTH) != NULL){ rc = depth.create(device, SENSOR_DEPTH); if (rc != STATUS_OK){ printf("Couldn't create depth stream\n%s\n", OpenNI::getExtendedError()); return 3; } } VideoStream color; if (device.getSensorInfo(SENSOR_COLOR) != NULL){ rc = color.create(device, SENSOR_COLOR); if (rc != STATUS_OK){ printf("Couldn't create color stream\n%s\n", OpenNI::getExtendedError()); return 4; } } // 5. create OpenCV Window cv::namedWindow("Depth Image", CV_WINDOW_AUTOSIZE); cv::namedWindow("Color Image", CV_WINDOW_AUTOSIZE); // 6. start rc = depth.start(); if (rc != STATUS_OK) { printf("Couldn't start the depth stream\n%s\n", OpenNI::getExtendedError()); return 5; } rc = color.start(); if (rc != STATUS_OK){ printf("Couldn't start the depth stream\n%s\n", OpenNI::getExtendedError()); return 6; } VideoFrameRef colorframe; VideoFrameRef depthframe; int iMaxDepth = depth.getMaxPixelValue(); int iColorFps = color.getVideoMode().getFps(); cv::Size iColorFrameSize = cv::Size(color.getVideoMode().getResolutionX(), color.getVideoMode().getResolutionY()); cv::Mat colorimageRGB; cv::Mat colorimageBGR; cv::Mat depthimage; cv::Mat depthimageScaled; #ifdef F_RECORDVIDEO cv::VideoWriter outputvideo_color; cv::FileStorage outputfile_depth; time_t timenow = time(0); tm ltime; localtime_s(<ime, &timenow); int tyear = 1900 + ltime.tm_year; int tmouth = 1 + ltime.tm_mon; int tday = ltime.tm_mday; int thour = ltime.tm_hour; int tmin = ltime.tm_min; int tsecond = ltime.tm_sec; string filename_rgb = "RGB/rgb_" + to_string(tyear) + "_" + to_string(tmouth) + "_" + to_string(tday) + "_" + to_string(thour) + "_" + to_string(tmin) + "_" + to_string(tsecond) + ".avi"; string filename_d = "D/d_" + to_string(tyear) + "_" + to_string(tmouth) + "_" + to_string(tday) + "_" + to_string(thour) + "_" + to_string(tmin) + "_" + to_string(tsecond) + ".yml"; outputvideo_color.open(filename_rgb, CV_FOURCC('I', '4', '2', '0'), iColorFps, iColorFrameSize, true); if (!outputvideo_color.isOpened()){ cout << "Could not open the output color video for write: " << endl; return 7; } outputfile_depth.open(filename_d, cv::FileStorage::WRITE); if (!outputfile_depth.isOpened()){ cout << "Could not open the output depth file for write: " << endl; return 8; } #endif // F_RECORDVIDEO // 7. main loop, continue read while (!wasKeyboardHit()) { // 8. check is color stream is available if (color.isValid()){ if (color.readFrame(&colorframe) == STATUS_OK){ colorimageRGB = { colorframe.getHeight(), colorframe.getWidth(), CV_8UC3, (void*)colorframe.getData() }; cv::cvtColor(colorimageRGB, colorimageBGR, CV_RGB2BGR); } } // 9. check is depth stream is available if (depth.isValid()){ if (depth.readFrame(&depthframe) == STATUS_OK){ depthimage = { depthframe.getHeight(), depthframe.getWidth(), CV_16UC1, (void*)depthframe.getData() }; depthimage.convertTo(depthimageScaled, CV_8U, 255.0 / iMaxDepth); } } cv::imshow("Color Image", colorimageBGR); cv::imshow("Depth Image", depthimageScaled); #ifdef F_RECORDVIDEO outputvideo_color << colorimageBGR; outputfile_depth << "Mat" << depthimage; #endif // F_RECORDVIDEO cv::waitKey(10); } color.stop(); depth.stop(); color.destroy(); depth.destroy(); device.close(); OpenNI::shutdown(); return 0; }
int main(int argc, char **argv){ printf("starting\n"); fflush(stdout); ros::init(argc, argv, "xtion",ros::init_options::AnonymousName); ros::NodeHandle n("~"); //Base topic name n.param("topic", topic, string("/camera")); //Resolution //0 = 160x120 //1 = 320x240 n.param("depth_mode", _depth_mode, -1); n.param("rgb_mode", _rgb_mode, -1); n.param("sync", _sync, 0); n.param("registration", _registration,0); n.param("frame_id", frame_id, string("camera_frame")); n.param("device_num", _device_num, -1); n.param("device_uri", _device_uri, string("NA")); n.param("frame_skip", _frame_skip, 0); n.param("exposure", _exposure, -1); n.param("gain", _gain, -1); printf("Launched with params:\n"); printf("_device_num:= %d\n",_device_num); printf("_device_uri:= %s\n",_device_uri.c_str()); printf("_topic:= %s\n",topic.c_str()); printf("_sync:= %d\n",_sync); printf("_registration:= %d\n",_registration); printf("_depth_mode:= %d\n",_depth_mode); printf("_rgb_mode:= %d\n",_rgb_mode); printf("_frame_id:= %s\n",frame_id.c_str()); printf("_frame_skip:= %d\n",_frame_skip); printf("_exposure:= %d\n",_exposure); printf("_gain:= %d\n",_gain); fflush(stdout); if (_frame_skip<=0) _frame_skip = 1; //OPENNI2 STUFF //=================================================================== streams = new openni::VideoStream*[2]; streams[0]=&depth; streams[1]=&rgb; Status rc = OpenNI::initialize(); if (rc != STATUS_OK) { printf("Initialize failed\n%s\n", OpenNI::getExtendedError()); fflush(stdout); return 1; } // enumerate the devices openni::Array<openni::DeviceInfo> device_list; openni::OpenNI::enumerateDevices(&device_list); Device device; if(_device_uri.compare("NA")){ string dev_uri("NA"); for (int i = 0; i<device_list.getSize(); i++){ if(!string(device_list[i].getUri()).compare(0, _device_uri.size(), _device_uri )){ dev_uri = device_list[i].getUri(); break; } } if(!dev_uri.compare("NA")){ cerr << "cannot find device with uri starting for: " << _device_uri << endl; } rc = device.open(dev_uri.c_str()); } else{ if (_device_num < 0){ cerr << endl << endl << "found " << device_list.getSize() << " devices" << endl; for (int i = 0; i<device_list.getSize(); i++) cerr << "\t num: " << i << " uri: " << device_list[i].getUri() << endl; } if (_device_num>=device_list.getSize() || _device_num<0 ) { cerr << "device num: " << _device_num << " does not exist, aborting" << endl; openni::OpenNI::shutdown(); return 0; } rc = device.open(device_list[_device_num].getUri()); } if (rc != STATUS_OK){ printf("Couldn't open device\n%s\n", OpenNI::getExtendedError()); fflush(stdout); return 2; } if(_depth_mode>=0){ if (device.getSensorInfo(SENSOR_DEPTH) != NULL){ rc = depth.create(device, SENSOR_DEPTH); if (rc != STATUS_OK){ printf("Couldn't create depth stream\n%s\n", OpenNI::getExtendedError()); fflush(stdout); return 3; } //DEPTH pub_depth = n.advertise<sensor_msgs::Image>("/"+topic+"/depth/image_raw", 1); pub_camera_info_depth = n.advertise<sensor_msgs::CameraInfo>("/"+topic+"/depth/camera_info", 1); } } if(_rgb_mode>=0){ if (device.getSensorInfo(SENSOR_COLOR) != NULL){ rc = rgb.create(device, SENSOR_COLOR); if (rc != STATUS_OK){ printf("Couldn't create rgb stream\n%s\n", OpenNI::getExtendedError()); fflush(stdout); return 3; } //RGB pub_rgb = n.advertise<sensor_msgs::Image>("/"+topic+"/rgb/image_raw", 1); pub_camera_info_rgb = n.advertise<sensor_msgs::CameraInfo>("/"+topic+"/rgb/camera_info", 1); } } if(_depth_mode<0 && _rgb_mode<0){ cout << "Depth modes" << endl; const openni::SensorInfo* sinfo = device.getSensorInfo(openni::SENSOR_DEPTH); // select index=4 640x480, 30 fps, 1mm const openni::Array< openni::VideoMode>& modesDepth = sinfo->getSupportedVideoModes(); printf("Enums data:\nPIXEL_FORMAT_DEPTH_1_MM = 100,\nPIXEL_FORMAT_DEPTH_100_UM = 101,\nPIXEL_FORMAT_SHIFT_9_2 = 102,\nPIXEL_FORMAT_SHIFT_9_3 = 103,\nPIXEL_FORMAT_RGB888 = 200,\nPIXEL_FORMAT_YUV422 = 201,\nPIXEL_FORMAT_GRAY8 = 202,\nPIXEL_FORMAT_GRAY16 = 203,\nPIXEL_FORMAT_JPEG = 204,\nPIXEL_FORMAT_YUYV = 205,\n\n"); cout << "Depth modes" << endl; for (int i = 0; i<modesDepth.getSize(); i++) { printf("%i: %ix%i, %i fps, %i format\n", i, modesDepth[i].getResolutionX(), modesDepth[i].getResolutionY(),modesDepth[i].getFps(), modesDepth[i].getPixelFormat()); //PIXEL_FORMAT_DEPTH_1_MM = 100, PIXEL_FORMAT_DEPTH_100_UM = 101 } cout << "Rgb modes" << endl; const openni::SensorInfo* sinfoRgb = device.getSensorInfo(openni::SENSOR_COLOR); // select index=4 640x480, 30 fps, 1mm const openni::Array< openni::VideoMode>& modesRgb = sinfoRgb->getSupportedVideoModes(); for (int i = 0; i<modesRgb.getSize(); i++) { printf("%i: %ix%i, %i fps, %i format\n", i, modesRgb[i].getResolutionX(), modesRgb[i].getResolutionY(),modesRgb[i].getFps(), modesRgb[i].getPixelFormat()); //PIXEL_FORMAT_DEPTH_1_MM = 100, PIXEL_FORMAT_DEPTH_100_UM } depth.stop(); depth.destroy(); rgb.stop(); rgb.destroy(); device.close(); OpenNI::shutdown(); exit(1); } if(_depth_mode>=0){ rc = depth.setVideoMode(device.getSensorInfo(SENSOR_DEPTH)->getSupportedVideoModes()[_depth_mode]); depth.setMirroringEnabled(false); rc = depth.start(); } if(_rgb_mode>=0){ rc = rgb.setVideoMode(device.getSensorInfo(SENSOR_COLOR)->getSupportedVideoModes()[_rgb_mode]); rgb.setMirroringEnabled(false); rgb.getCameraSettings()->setAutoExposureEnabled(true); rgb.getCameraSettings()->setAutoWhiteBalanceEnabled(true); cerr << "Camera settings valid: " << rgb.getCameraSettings()->isValid() << endl; rc = rgb.start(); } if(_depth_mode>=0 && _rgb_mode>=0 && _sync==1){ rc =device.setDepthColorSyncEnabled(true); if (rc != STATUS_OK) { printf("Couldn't enable de pth and rgb images synchronization\n%s\n", OpenNI::getExtendedError()); exit(2); } } if(_depth_mode>=0 && _rgb_mode>=0 && _registration==1){ device.setImageRegistrationMode(openni::IMAGE_REGISTRATION_DEPTH_TO_COLOR); } run = true; pthread_t runner; pthread_create(&runner, 0, camera_thread, 0); ros::spin(); void* result; run =false; pthread_join(runner, &result); depth.stop(); depth.destroy(); rgb.stop(); rgb.destroy(); device.close(); OpenNI::shutdown(); return 0; }
void wb_kinect_disable(WbDeviceTag tag) { depth.stop(); depth.destroy(); device.close(); OpenNI::shutdown(); }
int main() { // 2. initialize OpenNI Status rc = OpenNI::initialize(); if (rc != STATUS_OK) { printf("Initialize failed\n%s\n", OpenNI::getExtendedError()); return 1; } // 3. open a device Device device; rc = device.open(ANY_DEVICE); if (rc != STATUS_OK) { printf("Couldn't open device\n%s\n", OpenNI::getExtendedError()); return 2; } // 4. create depth stream VideoStream depth; if (device.getSensorInfo(SENSOR_DEPTH) != NULL){ rc = depth.create(device, SENSOR_DEPTH); if (rc != STATUS_OK){ printf("Couldn't create depth stream\n%s\n", OpenNI::getExtendedError()); return 3; } } VideoStream color; if (device.getSensorInfo(SENSOR_COLOR) != NULL){ rc = color.create(device, SENSOR_COLOR); if (rc != STATUS_OK){ printf("Couldn't create color stream\n%s\n", OpenNI::getExtendedError()); return 4; } } // 5. create OpenCV Window cv::namedWindow("Depth Image", CV_WINDOW_AUTOSIZE); cv::namedWindow("Color Image", CV_WINDOW_AUTOSIZE); // 6. start rc = depth.start(); if (rc != STATUS_OK) { printf("Couldn't start the depth stream\n%s\n", OpenNI::getExtendedError()); return 5; } rc = color.start(); if (rc != STATUS_OK){ printf("Couldn't start the depth stream\n%s\n", OpenNI::getExtendedError()); return 6; } VideoFrameRef colorframe; VideoFrameRef depthframe; int iMaxDepth = depth.getMaxPixelValue(); cv::Mat colorimageRGB; cv::Mat colorimageBGR; cv::Mat depthimage; cv::Mat depthimageScaled; // 7. main loop, continue read while (!wasKeyboardHit()) { // 8. check is color stream is available if (color.isValid()){ if (color.readFrame(&colorframe) == STATUS_OK){ colorimageRGB = { colorframe.getHeight(), colorframe.getWidth(), CV_8UC3, (void*)colorframe.getData() }; cv::cvtColor(colorimageRGB, colorimageBGR, CV_RGB2BGR); } } // 9. check is depth stream is available if (depth.isValid()){ if (depth.readFrame(&depthframe) == STATUS_OK){ depthimage = { depthframe.getHeight(), depthframe.getWidth(), CV_16UC1, (void*)depthframe.getData() }; depthimage.convertTo(depthimageScaled, CV_8U, 255.0 / iMaxDepth); } } cv::imshow("Color Image", colorimageBGR); cv::imshow("Depth Image", depthimageScaled); cv::waitKey(10); } color.stop(); depth.stop(); color.destroy(); depth.destroy(); device.close(); OpenNI::shutdown(); return 0; }
int main() { Status rc = OpenNI::initialize(); if (rc != STATUS_OK) { printf("Initialize failed\n%s\n", OpenNI::getExtendedError()); return 1; } OpenNIDeviceListener devicePrinter; OpenNI::addDeviceConnectedListener(&devicePrinter); OpenNI::addDeviceDisconnectedListener(&devicePrinter); OpenNI::addDeviceStateChangedListener(&devicePrinter); openni::Array<openni::DeviceInfo> deviceList; openni::OpenNI::enumerateDevices(&deviceList); for (int i = 0; i < deviceList.getSize(); ++i) { printf("Device \"%s\" already connected\n", deviceList[i].getUri()); } Device device; rc = device.open(ANY_DEVICE); if (rc != STATUS_OK) { printf("Couldn't open device\n%s\n", OpenNI::getExtendedError()); return 2; } VideoStream depth; if (device.getSensorInfo(SENSOR_DEPTH) != NULL) { rc = depth.create(device, SENSOR_DEPTH); if (rc != STATUS_OK) { printf("Couldn't create depth stream\n%s\n", OpenNI::getExtendedError()); } } rc = depth.start(); if (rc != STATUS_OK) { printf("Couldn't start the depth stream\n%s\n", OpenNI::getExtendedError()); } PrintCallback depthPrinter; // Register to new frame depth.addNewFrameListener(&depthPrinter); int i = 1; while(i > 0) { scanf_s("%d", i); printf("%d\n", i); } depth.removeNewFrameListener(&depthPrinter); depth.stop(); depth.destroy(); device.close(); OpenNI::shutdown(); return 0; }