virtual bool tryConfigureVideoMode(VideoMode& mode) { bool result = true; VideoMode old = stream_.getVideoMode(); if(stream_.setVideoMode(mode) != STATUS_OK) { ROS_ERROR_STREAM_COND(stream_.setVideoMode(old) != STATUS_OK, "Failed to recover old video mode!"); result = false; } return result; }
// Copy basic properties between VideoStream void CopyGeneralProperties( const VideoStream& rSource, VideoStream& rTarget ) { rTarget.setVideoMode( rSource.getVideoMode() ); // assign basic properties rTarget.setProperty( ONI_STREAM_PROPERTY_VERTICAL_FOV, rSource.getVerticalFieldOfView() ); rTarget.setProperty( ONI_STREAM_PROPERTY_HORIZONTAL_FOV, rSource.getHorizontalFieldOfView() ); rTarget.setProperty( ONI_STREAM_PROPERTY_MIRRORING, rSource.getMirroringEnabled() ); // assign dpeth only properties rTarget.setProperty( ONI_STREAM_PROPERTY_MIN_VALUE, rSource.getMinPixelValue() ); rTarget.setProperty( ONI_STREAM_PROPERTY_MAX_VALUE, rSource.getMaxPixelValue() ); }
int main (int argc, char** argv) { Status rc = OpenNI::initialize(); if (rc != STATUS_OK) { std::cout << "Initialize failed: " << OpenNI::getExtendedError() << std::endl; return 1; } Device device; rc = device.open(ANY_DEVICE); if (rc != STATUS_OK) { std::cout << "Couldn't open device: " << OpenNI::getExtendedError() << std::endl; return 2; } VideoStream stream; if (device.getSensorInfo(currentSensor) != NULL) { rc = stream.create(device, currentSensor); if (rc != STATUS_OK) { std::cout << "Couldn't create stream: " << OpenNI::getExtendedError() << std::endl; return 3; } } rc = stream.start(); if (rc != STATUS_OK) { std::cout << "Couldn't start the stream: " << OpenNI::getExtendedError() << std::endl; return 4; } VideoFrameRef frame; //now open the video writer Size S = Size(stream.getVideoMode().getResolutionX(), stream.getVideoMode().getResolutionY()); VideoWriter outputVideo; std::string fileName = "out.avi"; outputVideo.open(fileName, -1, stream.getVideoMode().getFps(), S, currentSensor == SENSOR_COLOR ? true : false); if (!outputVideo.isOpened()) { std::cout << "Could not open the output video for write: " << fileName << std::endl; return -1; } while (waitKey(50) == -1) { int changedStreamDummy; VideoStream* pStream = &stream; rc = OpenNI::waitForAnyStream(&pStream, 1, &changedStreamDummy, SAMPLE_READ_WAIT_TIMEOUT); if (rc != STATUS_OK) { std::cout << "Wait failed! (timeout is " << SAMPLE_READ_WAIT_TIMEOUT << "ms): " << OpenNI::getExtendedError() << std::endl; continue; } rc = stream.readFrame(&frame); if (rc != STATUS_OK) { std::cout << "Read failed:" << OpenNI::getExtendedError() << std::endl; continue; } Mat image; switch (currentSensor) { case SENSOR_COLOR: image = Mat(frame.getHeight(), frame.getWidth(), CV_8UC3, (void*)frame.getData()); break; case SENSOR_DEPTH: image = Mat(frame.getHeight(), frame.getWidth(), DataType<DepthPixel>::type, (void*)frame.getData()); break; case SENSOR_IR: image = Mat(frame.getHeight(), frame.getWidth(), CV_8U, (void*)frame.getData()); break; default: break; } namedWindow( "Display window", WINDOW_AUTOSIZE ); // Create a window for display. imshow( "Display window", image ); // Show our image inside it. outputVideo << image; } stream.stop(); stream.destroy(); device.close(); OpenNI::shutdown(); return 0; }
int main() { // 2. initialize OpenNI Status rc = OpenNI::initialize(); if (rc != STATUS_OK) { printf("Initialize failed\n%s\n", OpenNI::getExtendedError()); return 1; } // 3. open a device Device device; rc = device.open(ANY_DEVICE); if (rc != STATUS_OK) { printf("Couldn't open device\n%s\n", OpenNI::getExtendedError()); return 2; } // 4. create depth stream VideoStream depth; if (device.getSensorInfo(SENSOR_DEPTH) != NULL){ rc = depth.create(device, SENSOR_DEPTH); if (rc != STATUS_OK){ printf("Couldn't create depth stream\n%s\n", OpenNI::getExtendedError()); return 3; } } VideoStream color; if (device.getSensorInfo(SENSOR_COLOR) != NULL){ rc = color.create(device, SENSOR_COLOR); if (rc != STATUS_OK){ printf("Couldn't create color stream\n%s\n", OpenNI::getExtendedError()); return 4; } } // 5. create OpenCV Window cv::namedWindow("Depth Image", CV_WINDOW_AUTOSIZE); cv::namedWindow("Color Image", CV_WINDOW_AUTOSIZE); // 6. start rc = depth.start(); if (rc != STATUS_OK) { printf("Couldn't start the depth stream\n%s\n", OpenNI::getExtendedError()); return 5; } rc = color.start(); if (rc != STATUS_OK){ printf("Couldn't start the depth stream\n%s\n", OpenNI::getExtendedError()); return 6; } VideoFrameRef colorframe; VideoFrameRef depthframe; int iMaxDepth = depth.getMaxPixelValue(); int iColorFps = color.getVideoMode().getFps(); cv::Size iColorFrameSize = cv::Size(color.getVideoMode().getResolutionX(), color.getVideoMode().getResolutionY()); cv::Mat colorimageRGB; cv::Mat colorimageBGR; cv::Mat depthimage; cv::Mat depthimageScaled; #ifdef F_RECORDVIDEO cv::VideoWriter outputvideo_color; cv::FileStorage outputfile_depth; time_t timenow = time(0); tm ltime; localtime_s(<ime, &timenow); int tyear = 1900 + ltime.tm_year; int tmouth = 1 + ltime.tm_mon; int tday = ltime.tm_mday; int thour = ltime.tm_hour; int tmin = ltime.tm_min; int tsecond = ltime.tm_sec; string filename_rgb = "RGB/rgb_" + to_string(tyear) + "_" + to_string(tmouth) + "_" + to_string(tday) + "_" + to_string(thour) + "_" + to_string(tmin) + "_" + to_string(tsecond) + ".avi"; string filename_d = "D/d_" + to_string(tyear) + "_" + to_string(tmouth) + "_" + to_string(tday) + "_" + to_string(thour) + "_" + to_string(tmin) + "_" + to_string(tsecond) + ".yml"; outputvideo_color.open(filename_rgb, CV_FOURCC('I', '4', '2', '0'), iColorFps, iColorFrameSize, true); if (!outputvideo_color.isOpened()){ cout << "Could not open the output color video for write: " << endl; return 7; } outputfile_depth.open(filename_d, cv::FileStorage::WRITE); if (!outputfile_depth.isOpened()){ cout << "Could not open the output depth file for write: " << endl; return 8; } #endif // F_RECORDVIDEO // 7. main loop, continue read while (!wasKeyboardHit()) { // 8. check is color stream is available if (color.isValid()){ if (color.readFrame(&colorframe) == STATUS_OK){ colorimageRGB = { colorframe.getHeight(), colorframe.getWidth(), CV_8UC3, (void*)colorframe.getData() }; cv::cvtColor(colorimageRGB, colorimageBGR, CV_RGB2BGR); } } // 9. check is depth stream is available if (depth.isValid()){ if (depth.readFrame(&depthframe) == STATUS_OK){ depthimage = { depthframe.getHeight(), depthframe.getWidth(), CV_16UC1, (void*)depthframe.getData() }; depthimage.convertTo(depthimageScaled, CV_8U, 255.0 / iMaxDepth); } } cv::imshow("Color Image", colorimageBGR); cv::imshow("Depth Image", depthimageScaled); #ifdef F_RECORDVIDEO outputvideo_color << colorimageBGR; outputfile_depth << "Mat" << depthimage; #endif // F_RECORDVIDEO cv::waitKey(10); } color.stop(); depth.stop(); color.destroy(); depth.destroy(); device.close(); OpenNI::shutdown(); return 0; }
int initializeOpenNIDevice(int deviceID ,const char * deviceName , Device &device , VideoStream &color , VideoStream &depth ,unsigned int width ,unsigned int height , unsigned int fps) { unsigned int openMode=OPENNI2_OPEN_REGULAR_ENUM; /* 0 = regular deviceID and enumeration*/ if (deviceName!=0) { //If our deviceName contains a .oni we assume that we have an oni file to open if (strstr(deviceName,".oni")!=0) { fprintf(stderr,"Found an .ONI filename , trying to open it..\n"); openMode=OPENNI2_OPEN_USING_STRING; } else if (strlen(deviceName)>7) { fprintf(stderr,"deviceName is too long (%lu chars) , assuming it is a Device URI ..\n",strlen(deviceName)); openMode=OPENNI2_OPEN_USING_STRING; } } switch (openMode) { //------------------------------------------------------------------------------------- //If we have an ONI file to open just pass it as an argument to device.open(deviceName) case OPENNI2_OPEN_USING_STRING : if (device.open(deviceName) != STATUS_OK) { fprintf(stderr,"Could not open using given string ( %s ) : %s \n",deviceName,OpenNI::getExtendedError()); return 0; } break; //------------------------------------------------------------------------------------- //If we don't have a deviceName we assume deviceID points to the device we want to open so we will try to use //the openNI enumerator to get the specific device URI for device with number deviceID and use this to device.open( devURI ) case OPENNI2_OPEN_REGULAR_ENUM : default : //We have to supply our own buffer to hold the uri device string , so we make one here char devURIBuffer[512]={0}; if (device.open(getURIForDeviceNumber(deviceID,devURIBuffer,512)) != STATUS_OK) { fprintf(stderr,"Could not open an OpenNI device : %s \n",OpenNI::getExtendedError()); return 0; } break; } if (device.getSensorInfo(SENSOR_DEPTH) != NULL) { Status rc = depth.create(device, SENSOR_DEPTH); if (rc == STATUS_OK) { VideoMode depthMode = depth.getVideoMode(); depthMode.setResolution(width,height); depthMode.setFps(fps); Status rc = depth.setVideoMode(depthMode); if (rc != STATUS_OK) { fprintf(stderr,"Error getting color at video mode requested %u x %u @ %u fps\n%s\n",width,height,fps,OpenNI::getExtendedError()); } if(depth.start()!= STATUS_OK) { fprintf(stderr,"Couldn't start the color stream: %s \n",OpenNI::getExtendedError()); return 0; } } else { fprintf(stderr,"Couldn't create depth stream: %s \n",OpenNI::getExtendedError()); return 0; } } if (device.getSensorInfo(SENSOR_COLOR) != NULL) { Status rc = color.create(device, SENSOR_COLOR); if (rc == STATUS_OK) { VideoMode colorMode = color.getVideoMode(); colorMode.setResolution(width,height); colorMode.setFps(fps); Status rc = color.setVideoMode(colorMode); if (rc != STATUS_OK) { fprintf(stderr,"Error getting depth at video mode requested %u x %u @ %u fps\n%s\n",width,height,fps,OpenNI::getExtendedError()); } if(color.start() != STATUS_OK) { fprintf(stderr,"Couldn't start the color stream: %s \n",OpenNI::getExtendedError()); return 0; } } else { fprintf(stderr,"Couldn't create depth stream: %s \n",OpenNI::getExtendedError()); OpenNI::getExtendedError(); return 0; } } #if MOD_IR if(device.getSensorInfo(SENSOR_IR) != NULL) { Status rc = ir.create(device, SENSOR_IR); // Create the VideoStream for IR if (rc == STATUS_OK) { rc = ir.start(); // Start the IR VideoStream } else { fprintf(stderr,"Couldn't create IR stream: %s \n",OpenNI::getExtendedError()); OpenNI::getExtendedError(); return 0; } } #endif // MOD_IR //Mirroring is disabled depth.setMirroringEnabled (false); color.setMirroringEnabled (false); fprintf(stdout,"Device Initialization Requested %u x %u @ %u fps \n",width,height,fps); return 1; }