std::vector<cv::Point2f> getChessboardCornersFileStream( cv::VideoCapture &capture, const std::vector<cv::Mat> &rectificationMaps, const int &horizon, const int &deadZone, const cv::Size &boardSize, const cv::Size &imageSize, const cv::Size &winSize, const cv::Size &zeroZone, const cv::TermCriteria &termCriteria) { cv::Mat newIn; cv::Mat undistorted; cv::Mat greyNewIn; cv::Mat ground; cv::Rect lowerRoi(cv::Point2f(0, horizon + deadZone), cv::Size(imageSize.width, imageSize.height - horizon - deadZone)); bool found = false; std::vector<cv::Point2f> corners; cv::namedWindow("ground", CV_WINDOW_NORMAL); char control = ' '; cv::Mat resizing; do { capture >> newIn; cv::resize(newIn, resizing, imageSize); cv::remap(resizing, undistorted, rectificationMaps[0], rectificationMaps[1], cv::INTER_LINEAR); cv::cvtColor(undistorted, greyNewIn, CV_RGB2GRAY); ground = greyNewIn(lowerRoi); found = cv::findChessboardCorners(ground, boardSize, corners, CV_CALIB_CB_FAST_CHECK | CV_CALIB_CB_ADAPTIVE_THRESH | CV_CALIB_CB_NORMALIZE_IMAGE); if (corners.size() > 0) { drawChessboardCorners(undistorted(lowerRoi), boardSize, corners, found); drawChessboardCorners(ground, boardSize, corners, found); std::cerr << corners.size() << std::endl; } drawDeadZoneHorizon(undistorted, horizon, deadZone); imshow("ground", ground); imshow("main", undistorted); control = cv::waitKey(1); } while (!found && 'q' != control); if ('q' == control) { cv::Exception ex(USER_TRIGGERED_EXIT, "user requested exit", __func__, __FILE__, __LINE__); throw ex; } cv::cornerSubPix(ground, corners, winSize, zeroZone, termCriteria); cv::imwrite("chessboard.jpeg", undistorted); cv::drawChessboardCorners(undistorted(lowerRoi), boardSize, corners, found); cv::imshow("main", undistorted); cv::destroyWindow("ground"); cv::destroyWindow("main"); return corners; }
void calibrateParameters(cv::VideoCapture &capture, std::vector<cv::Mat> rectifyMaps, int &horizon, int &deadZone, const cv::Size &imageSize) { cv::Mat newIn; cv::Mat undistorted; cv::Mat far; cv::Mat ground; char control = ' '; cv::Rect lowerRoi; cv::Rect upperRoi; cv::namedWindow("ground", CV_WINDOW_NORMAL); cv::namedWindow("far", CV_WINDOW_NORMAL); cv::namedWindow("main", CV_WINDOW_NORMAL); cv::namedWindow("trackbars", CV_WINDOW_NORMAL); cv::createTrackbar("horizon", "trackbars", &horizon, imageSize.height); cv::createTrackbar("dead zone", "trackbars", &deadZone, imageSize.height / 2); cv::Mat resizing; std::cerr << imageSize.width << " " << imageSize.height << std::endl; do { capture >> newIn; cv::remap(newIn, undistorted, rectifyMaps[0], rectifyMaps[1], cv::INTER_LINEAR); lowerRoi = cv::Rect(cv::Point2f(0, horizon + deadZone), cv::Size(undistorted.cols, undistorted.rows - horizon - deadZone)); upperRoi = cv::Rect(cv::Point2f(0, 0), cv::Size(undistorted.cols, horizon - deadZone)); ground = undistorted(lowerRoi); far = undistorted(upperRoi); drawDeadZoneHorizon(undistorted, horizon, deadZone); cv::line(undistorted, cv::Point(400, 0), cv::Point(400, 600), CV_RGB(255,0,0), 1, 8, 0); imshow("ground", ground); imshow("far", far); imshow("main", undistorted); control = cv::waitKey(1); } while ('s' != control && 'q' != control); if ('q' == control) { cv::Exception ex(USER_TRIGGERED_EXIT, "user commanded exit", __func__, __FILE__, __LINE__); throw ex; } cv::destroyWindow("ground"); cv::destroyWindow("far"); cv::destroyWindow("main"); }
void Kinect::run() { while(!stop_) { libfreenect2::FrameMap frames; listener_.waitForNewFrame(frames); libfreenect2::Frame *rgb_frame = frames[libfreenect2::Frame::Color]; libfreenect2::Frame *depth_frame = frames[libfreenect2::Frame::Depth]; libfreenect2::Frame undistorted(512, 424, 4), registered(512, 424, 4); registration_->apply(rgb_frame, depth_frame, &undistorted, ®istered); listener_.release(frames); cv::Mat rgba((int)registered.height, (int)registered.width, CV_8UC4, registered.data); cv::Mat depth((int)undistorted.height, (int)undistorted.width , CV_32FC1, undistorted.data); cv::Mat rgb; //Should flip horizontally to get correct directions. cv::Mat rgb_flipped; cv::Mat depth_flipped; cv::cvtColor(rgba, rgb, CV_BGRA2BGR); //1 stands for the horizontal flip cv::flip(rgb, rgb_flipped, 1); cv::flip(depth, depth_flipped, 1); for(std::vector<boost::shared_ptr<KinectFrameListener> >::iterator iter = frame_listeners_.begin(); iter!=frame_listeners_.end() ; iter++) { (*iter)->onFrame(rgb_flipped, depth_flipped); } } }
void ImageCallback( const sensor_msgs::ImageConstPtr& msg, const sensor_msgs::CameraInfoConstPtr& info ) { cv_bridge::CvImageConstPtr frame; try { frame = cv_bridge::toCvShare( msg, msg->encoding ); } catch( cv_bridge::Exception& e ) { ROS_ERROR( "cv_bridge exception: %s", e.what() ); return; } camplex::CameraCalibration calib( "", *info ); if( _useMaps && !_mapsInited ) { _distMap1 = cv::Mat( frame->image.size(), CV_16SC2 ); _distMap2 = cv::Mat( frame->image.size(), CV_16UC1 ); cv::initUndistortRectifyMap( calib.GetIntrinsicMatrix(), calib.GetDistortionCoeffs(), cv::noArray(), calib.GetIntrinsicMatrix(), frame->image.size(), CV_16SC2, _distMap1, _distMap2 ); _mapsInited = true; } cv::Mat undistorted( frame->image.size(), frame->image.type() ); if( !_useMaps ) { cv::undistort( frame->image, undistorted, calib.GetIntrinsicMatrix(), calib.GetDistortionCoeffs() ); } else { cv::remap( frame->image, undistorted, _distMap1, _distMap2, cv::INTER_LINEAR ); } sensor_msgs::ImagePtr outImage = cv_bridge::CvImage( msg->header, msg->encoding, undistorted ).toImageMsg(); sensor_msgs::CameraInfoPtr outInfo = boost::make_shared<sensor_msgs::CameraInfo>( *info ); outInfo->D = std::vector<double>( 5, 0.0 ); _imagePub.publish( outImage, outInfo ); }
/** * Main application entry point. * * Accepted argumemnts: * - cpu Perform depth processing with the CPU. * - gl Perform depth processing with OpenGL. * - cl Perform depth processing with OpenCL. * - <number> Serial number of the device to open. * - -noviewer Disable viewer window. */ int main(int argc, char *argv[]) { std::string program_path(argv[0]); size_t executable_name_idx = program_path.rfind("Protonect"); std::string binpath = "/"; if(executable_name_idx != std::string::npos) { binpath = program_path.substr(0, executable_name_idx); } libfreenect2::Freenect2 freenect2; // create a console logger with debug level (default is console logger with info level) libfreenect2::setGlobalLogger(libfreenect2::createConsoleLogger(libfreenect2::Logger::Debug)); MyFileLogger *filelogger = new MyFileLogger(getenv("LOGFILE")); if (filelogger->good()) libfreenect2::setGlobalLogger(filelogger); libfreenect2::Freenect2Device *dev = 0; libfreenect2::PacketPipeline *pipeline = 0; if(freenect2.enumerateDevices() == 0) { std::cout << "no device connected!" << std::endl; return -1; } std::string serial = freenect2.getDefaultDeviceSerialNumber(); bool viewer_enabled = true; for(int argI = 1; argI < argc; ++argI) { const std::string arg(argv[argI]); if(arg == "cpu") { if(!pipeline) pipeline = new libfreenect2::CpuPacketPipeline(); } else if(arg == "gl") { #ifdef LIBFREENECT2_WITH_OPENGL_SUPPORT if(!pipeline) pipeline = new libfreenect2::OpenGLPacketPipeline(); #else std::cout << "OpenGL pipeline is not supported!" << std::endl; #endif } else if(arg == "cl") { #ifdef LIBFREENECT2_WITH_OPENCL_SUPPORT if(!pipeline) pipeline = new libfreenect2::OpenCLPacketPipeline(); #else std::cout << "OpenCL pipeline is not supported!" << std::endl; #endif } else if(arg.find_first_not_of("0123456789") == std::string::npos) //check if parameter could be a serial number { serial = arg; } else if(arg == "-noviewer") { viewer_enabled = false; } else { std::cout << "Unknown argument: " << arg << std::endl; } } if(pipeline) { dev = freenect2.openDevice(serial, pipeline); } else { dev = freenect2.openDevice(serial); } if(dev == 0) { std::cout << "failure opening device!" << std::endl; return -1; } signal(SIGINT,sigint_handler); protonect_shutdown = false; libfreenect2::SyncMultiFrameListener listener(libfreenect2::Frame::Color | libfreenect2::Frame::Ir | libfreenect2::Frame::Depth); libfreenect2::FrameMap frames; libfreenect2::Frame undistorted(512, 424, 4), registered(512, 424, 4); dev->setColorFrameListener(&listener); dev->setIrAndDepthFrameListener(&listener); dev->start(); std::cout << "device serial: " << dev->getSerialNumber() << std::endl; std::cout << "device firmware: " << dev->getFirmwareVersion() << std::endl; libfreenect2::Registration* registration = new libfreenect2::Registration(dev->getIrCameraParams(), dev->getColorCameraParams()); size_t framecount = 0; #ifdef LIBFREENECT2_WITH_OPENGL_SUPPORT Viewer viewer; if (viewer_enabled) viewer.initialize(); #else viewer_enabled = false; #endif while(!protonect_shutdown) { listener.waitForNewFrame(frames); libfreenect2::Frame *rgb = frames[libfreenect2::Frame::Color]; libfreenect2::Frame *ir = frames[libfreenect2::Frame::Ir]; libfreenect2::Frame *depth = frames[libfreenect2::Frame::Depth]; registration->apply(rgb, depth, &undistorted, ®istered); framecount++; if (!viewer_enabled) { if (framecount % 100 == 0) std::cout << "The viewer is turned off. Received " << framecount << " frames. Ctrl-C to stop." << std::endl; listener.release(frames); continue; } #ifdef LIBFREENECT2_WITH_OPENGL_SUPPORT viewer.addFrame("RGB", rgb); viewer.addFrame("ir", ir); viewer.addFrame("depth", depth); viewer.addFrame("registered", ®istered); protonect_shutdown = protonect_shutdown || viewer.render(); #endif listener.release(frames); //libfreenect2::this_thread::sleep_for(libfreenect2::chrono::milliseconds(100)); } // TODO: restarting ir stream doesn't work! // TODO: bad things will happen, if frame listeners are freed before dev->stop() :( dev->stop(); dev->close(); delete registration; return 0; }
/** * Main application entry point. * * Accepted argumemnts: * - cpu Perform depth processing with the CPU. * - gl Perform depth processing with OpenGL. * - cl Perform depth processing with OpenCL. * - <number> Serial number of the device to open. * - -noviewer Disable viewer window. * - -streamer Enable UDP Streaming of captured images. * - -recorder Enable recording of captured images. * - -replay Enable replay of captured images. */ int main(int argc, char *argv[]) /// [main] { std::string program_path(argv[0]); std::cerr << "Version: " << LIBFREENECT2_VERSION << std::endl; std::cerr << "Environment variables: LOGFILE=<protonect.log>" << std::endl; std::cerr << "Usage: " << program_path << " [-gpu=<id>] [gl | cl | clkde | cuda | cudakde | cpu] [<device serial>]" << std::endl; std::cerr << " [-noviewer] [-norgb | -nodepth] [-help] [-version]" << std::endl; std::cerr << " [-recorder] [-streamer] [-replay]" << std::endl; std::cerr << " [-frames <number of frames to process>]" << std::endl; std::cerr << "To pause and unpause: pkill -USR1 ProtonectSR" << std::endl; size_t executable_name_idx = program_path.rfind("ProtonectSR"); const std::string prog(argv[0]); std::string binpath = "/"; if(executable_name_idx != std::string::npos) { binpath = program_path.substr(0, executable_name_idx); } #if defined(WIN32) || defined(_WIN32) || defined(__WIN32__) // avoid flooing the very slow Windows console with debug messages libfreenect2::setGlobalLogger(libfreenect2::createConsoleLogger(libfreenect2::Logger::Info)); #else // create a console logger with debug level (default is console logger with info level) /// [logging] libfreenect2::setGlobalLogger(libfreenect2::createConsoleLogger(libfreenect2::Logger::Debug)); /// [logging] #endif /// [file logging] MyFileLogger *filelogger = new MyFileLogger(getenv("LOGFILE")); if (filelogger->good()) libfreenect2::setGlobalLogger(filelogger); else delete filelogger; /// [file logging] /// [context] libfreenect2::Freenect2 freenect2; // TODO: enable on merge //libfreenect2::Freenect2Replay freenect2replay; libfreenect2::Freenect2Device *dev = 0; libfreenect2::PacketPipeline *pipeline = 0; /// [context] std::string serial = ""; bool viewer_enabled = true; bool streamer_enabled = false; bool recorder_enabled = false; bool replay_enabled = false; bool enable_rgb = true; bool enable_depth = true; int deviceId = -1; size_t framemax = -1; for(int argI = 1; argI < argc; ++argI) { const std::string arg(argv[argI]); if(arg == "-help" || arg == "--help" || arg == "-h" || arg == "-v" || arg == "--version" || arg == "-version") { // Just let the initial lines display at the beginning of main return 0; } else if(arg.find("-gpu=") == 0) { if (pipeline) { std::cerr << "-gpu must be specified before pipeline argument" << std::endl; return -1; } deviceId = atoi(argv[argI] + 5); } else if(arg == "cpu") { if(!pipeline) /// [pipeline] pipeline = new libfreenect2::CpuPacketPipeline(); /// [pipeline] } else if(arg == "gl") { #ifdef LIBFREENECT2_WITH_OPENGL_SUPPORT if(!pipeline) pipeline = new libfreenect2::OpenGLPacketPipeline(); #else std::cout << "OpenGL pipeline is not supported!" << std::endl; #endif } else if(arg == "cl") { #ifdef LIBFREENECT2_WITH_OPENCL_SUPPORT if(!pipeline) pipeline = new libfreenect2::OpenCLPacketPipeline(deviceId); #else std::cout << "OpenCL pipeline is not supported!" << std::endl; #endif } else if(arg == "clkde") { #ifdef LIBFREENECT2_WITH_OPENCL_SUPPORT if(!pipeline) pipeline = new libfreenect2::OpenCLKdePacketPipeline(deviceId); #else std::cout << "OpenCL pipeline is not supported!" << std::endl; #endif } else if(arg == "cuda") { #ifdef LIBFREENECT2_WITH_CUDA_SUPPORT if(!pipeline) pipeline = new libfreenect2::CudaPacketPipeline(deviceId); #else std::cout << "CUDA pipeline is not supported!" << std::endl; #endif } else if(arg == "cudakde") { #ifdef LIBFREENECT2_WITH_CUDA_SUPPORT if(!pipeline) pipeline = new libfreenect2::CudaKdePacketPipeline(deviceId); #else std::cout << "CUDA pipeline is not supported!" << std::endl; #endif } else if(arg.find_first_not_of("0123456789") == std::string::npos) //check if parameter could be a serial number { serial = arg; } else if(arg == "-noviewer" || arg == "--noviewer") { viewer_enabled = false; } else if(arg == "-norgb" || arg == "--norgb") { enable_rgb = false; } else if(arg == "-nodepth" || arg == "--nodepth") { enable_depth = false; } else if(arg == "-frames") { ++argI; framemax = strtol(argv[argI], NULL, 0); if (framemax == 0) { std::cerr << "invalid frame count '" << argv[argI] << "'" << std::endl; return -1; } } else if(arg == "-streamer" || arg == "--streamer" || prog == "freenect2-stream") { streamer_enabled = true; } else if(arg == "-recorder" || arg == "--recorder" || prog == "freenect2-record") { recorder_enabled = true; } else if(arg == "-replay" || arg == "--replay" || prog == "freenect2-replay") { replay_enabled = true; } else { std::cout << "Unknown argument: " << arg << std::endl; } } if (!enable_rgb && !enable_depth) { std::cerr << "Disabling both streams is not allowed!" << std::endl; return -1; } /// [discovery] if(replay_enabled == false) { if(freenect2.enumerateDevices() == 0) { std::cout << "no device connected!" << std::endl; return -1; } if(serial == "") { serial = freenect2.getDefaultDeviceSerialNumber(); } } /// [discovery] if(replay_enabled == false) { if(pipeline) { /// [open] dev = freenect2.openDevice(serial, pipeline); /// [open] } else { dev = freenect2.openDevice(serial); } } else { DIR *d; struct dirent *dir; std::vector<std::string> frame_filenames; d = opendir("recordings/depth"); if(!d) { std::cerr << "Could not open directory " << dir << " for replay." << std::endl; exit(1); } while((dir = readdir(d)) != NULL) { std::string name = dir->d_name; if(hasSuffix(name, ".depth")) { frame_filenames.push_back(name); } else { std::cerr << "Skipping currently unsupported frame filename: " << name << std::endl; } } // TODO: enable on merge /* if(pipeline) { /// [open] dev = freenect2replay.openDevice(frame_filenames, pipeline); /// [open] } else { dev = freenect2replay.openDevice(frame_filenames); } */ } if(dev == 0) { std::cout << "failure opening device!" << std::endl; return -1; } devtopause = dev; signal(SIGINT,sigint_handler); #ifdef SIGUSR1 signal(SIGUSR1, sigusr1_handler); #endif protonect_shutdown = false; /// [listeners] int types = 0; if (enable_rgb) types |= libfreenect2::Frame::Color; if (enable_depth) types |= libfreenect2::Frame::Ir | libfreenect2::Frame::Depth; libfreenect2::SyncMultiFrameListener listener(types); libfreenect2::FrameMap frames; dev->setColorFrameListener(&listener); dev->setIrAndDepthFrameListener(&listener); /// [listeners] /// [start] if (enable_rgb && enable_depth) { if (!dev->start()) return -1; } else { if (!dev->startStreams(enable_rgb, enable_depth)) return -1; } std::cout << "device serial: " << dev->getSerialNumber() << std::endl; std::cout << "device firmware: " << dev->getFirmwareVersion() << std::endl; /// [start] /// [registration setup] libfreenect2::Registration* registration = new libfreenect2::Registration(dev->getIrCameraParams(), dev->getColorCameraParams()); libfreenect2::Frame undistorted(512, 424, 4), registered(512, 424, 4); /// [registration setup] size_t framecount = 0; #ifdef EXAMPLES_WITH_OPENGL_SUPPORT Viewer viewer; if (viewer_enabled) viewer.initialize(); #else viewer_enabled = false; #endif Streamer streamer; // have to declare it outside statements to be accessible everywhere Recorder recorder; if(streamer_enabled) { streamer.initialize(); } if(recorder_enabled) { recorder.initialize(); } /// [loop start] while(!protonect_shutdown && (framemax == (size_t)-1 || framecount < framemax)) { if (!listener.waitForNewFrame(frames, 10*1000)) // 10 sconds { std::cout << "timeout!" << std::endl; return -1; } libfreenect2::Frame *rgb = frames[libfreenect2::Frame::Color]; libfreenect2::Frame *ir = frames[libfreenect2::Frame::Ir]; libfreenect2::Frame *depth = frames[libfreenect2::Frame::Depth]; /// [loop start] if (enable_rgb && enable_depth) { /// [registration] registration->apply(rgb, depth, &undistorted, ®istered); /// [registration] } framecount++; if (streamer_enabled) { streamer.stream(depth); } if (recorder_enabled) { // TODO: add recording timestamp if max frame number reached // + avoid recording new ones recorder.record(depth, "depth"); recorder.record(®istered, "registered"); // recorder.record(rgb,"rgb"); recorder.registTimeStamp(); } if (!viewer_enabled) { if (framecount % 100 == 0) std::cout << "The viewer is turned off. Received " << framecount << " frames. Ctrl-C to stop." << std::endl; listener.release(frames); continue; } #ifdef EXAMPLES_WITH_OPENGL_SUPPORT if (enable_rgb) { viewer.addFrame("RGB", rgb); } if (enable_depth) { viewer.addFrame("ir", ir); viewer.addFrame("depth", depth); } if (enable_rgb && enable_depth) { viewer.addFrame("registered", ®istered); } protonect_shutdown = protonect_shutdown || viewer.render(); #endif /// [loop end] listener.release(frames); /** libfreenect2::this_thread::sleep_for(libfreenect2::chrono::milliseconds(100)); */ } /// [loop end] if (recorder_enabled) { recorder.saveTimeStamp(); } // TODO: restarting ir stream doesn't work! // TODO: bad things will happen, if frame listeners are freed before dev->stop() :( /// [stop] dev->stop(); dev->close(); /// [stop] delete registration; return 0; }
void Projector::reproject(bool gpuView) { libfreenect2::Registration* registration = new libfreenect2::Registration(_dev->getIrCameraParams(), _dev->getColorCameraParams()); libfreenect2::Frame undistorted(512, 424, 4), registered(512, 424, 4); libfreenect2::FrameMap frames; SimpleViewer viewer; bool shutdown = false; cv::Mat board(480, 640, CV_8UC4, cv::Scalar::all(255)); if (!gpuView) { cv::namedWindow("reprojection", CV_WINDOW_NORMAL); cv::moveWindow("reprojection", 1200, 0); cv::setWindowProperty("reprojection", CV_WND_PROP_FULLSCREEN, CV_WINDOW_FULLSCREEN); } else { viewer.setSize(480, 640); // TO-DO change resolution viewer.initialize(); libfreenect2::Frame b(640, 480, 4); b.data = board.data; viewer.addFrame("RGB", &b); shutdown = shutdown || viewer.render(); } while (!shutdown) { board = cv::Mat(480, 640, CV_8UC4, cv::Scalar::all(255)); std::vector<cv::Point3f> wrldSrc; if (!gpuView) cv::imshow("reprojection", board); (_listener)->waitForNewFrame(frames); libfreenect2::Frame *rgb = frames[libfreenect2::Frame::Color]; libfreenect2::Frame *depth = frames[libfreenect2::Frame::Depth]; registration->apply(rgb, depth, &undistorted, ®istered, true, NULL, NULL); for (int i = 0; i<512; i++) { for (int j = 0; j<424; j++) { float x = 0, y = 0, z = 0, color = 0; registration->getPointXYZRGB(&undistorted, ®istered, i, j, x, y, z, color); if (z>0.5 && z<1.7) { x = static_cast<float>(x + right / ((double)640.0)); //////////TO-DO fix that y = static_cast<float>(y + up / ((double)480.0)); x -= 0.5; y -= 0.5; double PI = 3.14159265; x = static_cast<float>(std::cos(rotX * PI / 180) * x - std::sin(rotX * PI / 180) * y); y = static_cast<float>(std::sin(rotX * PI / 180) * x + std::cos(rotX * PI / 180) * y); x += 0.5; y += 0.5; wrldSrc.push_back(cv::Point3f(x * 100, y * 100, z * 100)); } } } if (wrldSrc.size() > 0) { std::vector<cv::Point2f> projected = projectPoints(wrldSrc); for (int i = 0; i < projected.size(); i++) { if (480 - projected[i].x >0 && projected[i].y > 0 && 480 - projected[i].x < 475 && projected[i].y < 630) { cv::Mat ROI = board(cv::Rect(static_cast<int>(projected[i].y), static_cast<int>(480 - projected[i].x), 2, 2)); ROI.setTo(cv::Scalar(100, 100, 150, 100)); } } if (!gpuView) imshow("reprojection", board); else { libfreenect2::Frame b(640, 480, 4); b.data = board.data; viewer.addFrame("RGB", &b); shutdown = shutdown || viewer.render(); } } (_listener)->release(frames); if (!gpuView) { int op = cv::waitKey(50); if (op == 100 || (char)(op) == 'd') right -= 1; if (op == 115 || (char)(op) == 's') up += 1; if (op == 97 || (char)(op) == 'a') right += 1; if (op == 119 || (char)(op) == 'w') up -= 1; if (op == 114 || (char)(op) == 'r') rotX -= 0.5; if (op == 102 || (char)(op) == 'f') rotX += 0.5; if (op == 1113997 || op == 1048586 || op == 1048608 || op == 10 || op == 32) { std::cout << "right = " << right << ";\nup = " << up << ";\nrotX = " << rotX << ";\n"; break; } } else { right = viewer.offsetX; up = viewer.offsetY; rotX = viewer.rot; } } if (!gpuView) cv::destroyWindow("reprojection"); else { viewer.stopWindow(); } }
void Projector::showRectangle(bool gpuView) { indices.resize(480); for (int i = 0; i < 480; i++) indices[i].resize(640); libfreenect2::Registration* registration = new libfreenect2::Registration(_dev->getIrCameraParams(), _dev->getColorCameraParams()); libfreenect2::Frame undistorted(512, 424, 4), registered(512, 424, 4); libfreenect2::FrameMap frames; SimpleViewer viewer; bool shutdown = false; cv::Mat board(480, 640, CV_8UC4, cv::Scalar::all(255)); if (!gpuView) { cv::namedWindow("reprojection", CV_WINDOW_NORMAL); cv::moveWindow("reprojection", 0, 0); //setWindowProperty("reprojection", CV_WND_PROP_FULLSCREEN, CV_WINDOW_FULLSCREEN); } else { viewer.setSize(480, 640); // TO-DO change resolution viewer.initialize(); libfreenect2::Frame b(640, 480, 4); b.data = board.data; viewer.addFrame("RGB", &b); shutdown = shutdown || viewer.render(); } while (!shutdown) { board = cv::Mat(480, 640, CV_8UC4, cv::Scalar::all(255)); std::vector<cv::Point3f> wrldSrc; std::vector<cv::Point3f> plnSrc; if (!gpuView) cv::imshow("reprojection", board); (_listener)->waitForNewFrame(frames); libfreenect2::Frame *rgb = frames[libfreenect2::Frame::Color]; libfreenect2::Frame *depth = frames[libfreenect2::Frame::Depth]; registration->apply(rgb, depth, &undistorted, ®istered, true, NULL, NULL); for (int i = 0; i<512; i++) { for (int j = 0; j<424; j++) { float x = 0, y = 0, z = 0, color = 0; registration->getPointXYZRGB(&undistorted, ®istered, i, j, x, y, z, color); if (z>0.5 && z<2.1) { x = static_cast<float>(x + right / ((double)640.0)); //////////TO-DO fix that y = static_cast<float>(y + up / ((double)480.0)); x -= 0.5; y -= 0.5; double PI = 3.14159265; x = static_cast<float>(std::cos(rotX * PI / 180) * x - std::sin(rotX * PI / 180) * y); y = static_cast<float>(std::sin(rotX * PI / 180) * x + std::cos(rotX * PI / 180) * y); x += 0.5; y += 0.5; wrldSrc.push_back(cv::Point3f(x * 100, y * 100, z * 100)); } } } PlaneData pln = findRectangle(registration, &undistorted, ®istered); if (wrldSrc.size() > 0) { std::vector<cv::Point2f> projected = projectPoints(wrldSrc); for (int i = 0; i < projected.size(); i++) { if (480 - projected[i].x >0 && projected[i].y > 0 && 480 - projected[i].x < 475 && projected[i].y < 630) { cv::Mat ROI = board(cv::Rect(static_cast<int>(projected[i].y), static_cast<int>(480 - projected[i].x), 2, 2)); ROI.setTo(cv::Scalar(100, 100, 150, 100)); } } if (pln.points.size() > 0) { projected = projectPoints(pln.points); cv::Mat cont = cv::Mat(480, 640, CV_8UC1, cv::Scalar::all(0)); for (int i = 0; i < projected.size(); i++) { if (480 - projected[i].x >0 && projected[i].y > 0 && 480 - projected[i].x < 475 && projected[i].y < 630) { cv::Mat ROI = board(cv::Rect(static_cast<int>(projected[i].y), static_cast<int>(480 - projected[i].x), 2, 2)); ROI.setTo(cv::Scalar(250, 100, 100, 100)); cont.at<uchar>(static_cast<int>(480 - projected[i].x), static_cast<int>(projected[i].y), 0) = 255; indices[static_cast<int>(480 - projected[i].x)][static_cast<int>(projected[i].y)] = i; } } vector<vector<cv::Point> > contours; vector<cv::Vec4i> hierarchy; cv::GaussianBlur(cont, cont, cv::Size(7, 7), 5, 11); findContours(cont, contours, hierarchy, cv::RETR_CCOMP, cv::CHAIN_APPROX_NONE, cv::Point(0, 0)); vector<vector<cv::Point> > contours_poly(contours.size()); vector<cv::Rect> boundRect(contours.size()); vector<cv::Point2f>center(contours.size()); vector<float>radius(contours.size()); int nPoly; for (int i = 0; i < contours.size(); i++) { cv::approxPolyDP(cv::Mat(contours[i]), contours_poly[i], 10, true); nPoly = contours_poly[i].size(); } for (int i = 0; i< contours.size(); i++) { drawContours(board, contours_poly, 0, cv::Scalar(0, 255, 0), 5); } } if (!gpuView) imshow("reprojection", board); else { libfreenect2::Frame b(640, 480, 4); b.data = board.data; viewer.addFrame("RGB", &b); shutdown = shutdown || viewer.render(); } } (_listener)->release(frames); if (!gpuView) { int op = cv::waitKey(50); if (op == 100 || (char)(op) == 'd') right -= 1; if (op == 115 || (char)(op) == 's') up += 1; if (op == 97 || (char)(op) == 'a') right += 1; if (op == 119 || (char)(op) == 'w') up -= 1; if (op == 114 || (char)(op) == 'r') rotX -= 0.5; if (op == 102 || (char)(op) == 'f') rotX += 0.5; if (op == 1113997 || op == 1048586 || op == 1048608 || op == 10 || op == 32) { std::cout << "right = " << right << ";\nup = " << up << ";\nrotX = " << rotX << ";\n"; break; } } else { right = 0; up = 0; rotX = 0; right = viewer.offsetX; up = viewer.offsetY; rotX = viewer.rot; } } if (!gpuView) cv::destroyWindow("reprojection"); else { viewer.stopWindow(); } }
void Projector::objProjectionOffline(std::string objPath, std::string objName, bool gpuView) { std::cout << "Camera init: "; objObject obj(objPath, objName); obj.loadData(); cout << "DONE\n"; cv::namedWindow("objTest", CV_WINDOW_NORMAL); cv::moveWindow("objTest", 0, 0); indices.resize(480); for (int i = 0; i < 480; i++) indices[i].resize(640); libfreenect2::Registration* registration = new libfreenect2::Registration(_dev->getIrCameraParams(), _dev->getColorCameraParams()); libfreenect2::Frame undistorted(512, 424, 4), registered(512, 424, 4); libfreenect2::FrameMap frames; SimpleViewer viewer; bool shutdown = false; cv::Mat board(480, 640, CV_8UC4, cv::Scalar::all(255)); cv::Vec3f prevNormal(-1, -1, -1); if (!gpuView) { cv::namedWindow("reprojection", CV_WINDOW_NORMAL); cv::moveWindow("reprojection", 200, 200); //setWindowProperty("reprojection", CV_WND_PROP_FULLSCREEN, CV_WINDOW_FULLSCREEN); } else { viewer.setSize(480, 640); // TO-DO change resolution viewer.initialize(); libfreenect2::Frame b(640, 480, 4); b.data = board.data; viewer.addFrame("RGB", &b); shutdown = shutdown || viewer.render(); } while (!shutdown) { board = cv::Mat(480, 640, CV_8UC4, cv::Scalar::all(255)); std::vector<cv::Point3f> plnSrc; if (!gpuView) cv::imshow("reprojection", board); (_listener)->waitForNewFrame(frames); libfreenect2::Frame *rgb = frames[libfreenect2::Frame::Color]; libfreenect2::Frame *depth = frames[libfreenect2::Frame::Depth]; registration->apply(rgb, depth, &undistorted, ®istered, true, NULL, NULL); PlaneData pln = findRectangle(registration, &undistorted, ®istered); if (pln.points.size() > 0) { std::vector<cv::Point2f> projected = projectPoints(pln.points); cv::Mat cont = cv::Mat(480, 640, CV_8UC1, cv::Scalar::all(0)); for (int i = 0; i < projected.size(); i++) { if (480 - projected[i].x >0 && projected[i].y > 0 && 480 - projected[i].x < 475 && projected[i].y < 630) { cv::Mat ROI = board(cv::Rect(static_cast<int>(projected[i].y), static_cast<int>(480 - projected[i].x), 2, 2)); ROI.setTo(cv::Scalar(250, 100, 100, 100)); cont.at<uchar>(static_cast<int>(480 - projected[i].x), static_cast<int>(projected[i].y), 0) = 255; plnSrc.push_back(pln.points[i]); } } vector<vector<cv::Point> > contours; vector<cv::Vec4i> hierarchy; cv::GaussianBlur(cont, cont, cv::Size(7, 7), 5, 11); findContours(cont, contours, hierarchy, cv::RETR_CCOMP, cv::CHAIN_APPROX_NONE, cv::Point(0, 0)); vector<vector<cv::Point> > contours_poly(contours.size()); vector<cv::Rect> boundRect(contours.size()); vector<cv::Point2f>center(contours.size()); vector<float>radius(contours.size()); for (int i = 0; i < contours.size(); i++) { cv::approxPolyDP(cv::Mat(contours[i]), contours_poly[i], 10, true); } for (int i = 0; i < contours.size(); i++) { drawContours(board, contours_poly, 0, cv::Scalar(0, 255, 0), 5); } cv::Mat data_pts = cv::Mat(300, 3, CV_64FC1); cv::Vec3f normal(0, 0, 0); int jump = plnSrc.size() / 300; for (int i = 0; i < 100; i++) { data_pts.at<double>(i, 0) = plnSrc[i*jump].x; data_pts.at<double>(i, 1) = plnSrc[i*jump].y; data_pts.at<double>(i, 2) = plnSrc[i*jump].z; data_pts.at<double>(i + 100, 0) = plnSrc[(i + 100)*jump].x; data_pts.at<double>(i + 100, 1) = plnSrc[(i + 100)*jump].y; data_pts.at<double>(i + 100, 2) = plnSrc[(i + 100)*jump].z; data_pts.at<double>(i + 200, 0) = plnSrc[(i + 200) *jump].x; data_pts.at<double>(i + 200, 1) = plnSrc[(i + 200)*jump].y; data_pts.at<double>(i + 200, 2) = plnSrc[(i + 200)*jump].z; } cv::PCA pca_analysis(data_pts, cv::Mat(), CV_PCA_DATA_AS_ROW); cv::Vec3f cntr = cv::Vec3f((pca_analysis.mean.at<double>(0, 0)), (pca_analysis.mean.at<double>(0, 1)), (pca_analysis.mean.at<double>(0, 2))); vector<cv::Point3f> eigen_vecs(2); vector<double> eigen_val(2); for (int i = 0; i < 2; ++i) { eigen_vecs[i] = cv::Point3f(pca_analysis.eigenvectors.at<double>(i, 0), pca_analysis.eigenvectors.at<double>(i, 1), pca_analysis.eigenvectors.at<double>(i, 2)); eigen_val[i] = pca_analysis.eigenvalues.at<double>(0, i); } cv::Vec3f p1 = cv::Vec3f((eigen_vecs[0].x * eigen_val[0]), (eigen_vecs[0].y * eigen_val[0]), (eigen_vecs[0].z * eigen_val[0])); cv::Vec3f p2 = cv::Vec3f((eigen_vecs[1].x * eigen_val[1]), (eigen_vecs[1].y * eigen_val[1]), (eigen_vecs[1].z * eigen_val[1])); normal = p1.cross(p2); normal = cv::normalize(normal); //pln.center = cntr; pln.normal = normal; obj.setCamera(cv::Point3f(pln.center.x, -pln.center.y, -pln.center.z + 150), cv::Vec3f(pln.normal[0], pln.normal[1], pln.normal[2])); if (!gpuView) imshow("reprojection", board); else { libfreenect2::Frame b(640, 480, 4); b.data = board.data; viewer.addFrame("RGB", &b); shutdown = shutdown || viewer.render(); } } cv::Mat im = obj.render(); cv::imshow("objTest", im); //} (_listener)->release(frames); if (!gpuView) { int op = cv::waitKey(50); if (op == 100 || (char)(op) == 'd') right -= 1; if (op == 115 || (char)(op) == 's') up += 1; if (op == 97 || (char)(op) == 'a') right += 1; if (op == 119 || (char)(op) == 'w') up -= 1; if (op == 114 || (char)(op) == 'r') rotX -= 0.5; if (op == 102 || (char)(op) == 'f') rotX += 0.5; if (op == 1113997 || op == 1048586 || op == 1048608 || op == 10 || op == 32) { std::cout << "right = " << right << ";\nup = " << up << ";\nrotX = " << rotX << ";\n"; break; } } else { //right = 0; //up = 0; //rotX = 0; right = viewer.offsetX; up = viewer.offsetY; rotX = viewer.rot; } } if (!gpuView) cv::destroyWindow("reprojection"); else { viewer.stopWindow(); } cv::destroyWindow("objTest"); }
void Projector::ctProjection(std::string ctFilePath, int startPoint, int xDim, int yDim, int zDim, bool PNG) { SimpleViewer viewer; CTObject ctObject(ctFilePath, xDim, yDim, zDim, startPoint); if(PNG) ctObject.readDataPNG(); else ctObject.readData(); libfreenect2::FrameMap frames; libfreenect2::Registration* registration = new libfreenect2::Registration(_dev->getIrCameraParams(), _dev->getColorCameraParams()); libfreenect2::Frame undistorted(512, 424, 4), registered(512, 424, 4); bool shutdown = false; cv::Mat board(480, 640, CV_8UC4, cv::Scalar::all(0)); //cv::namedWindow("CTviewer", CV_WINDOW_NORMAL); //cv::moveWindow("CTviewer", 00, 0); //cv::setWindowProperty("CTviewer", CV_WND_PROP_FULLSCREEN, CV_WINDOW_FULLSCREEN); { viewer.setSize(480, 640); // TO-DO change resolution viewer.initialize(); libfreenect2::Frame b(640, 480, 4); b.data = board.data; viewer.addFrame("RGB", &b); shutdown = shutdown || viewer.render(); } libfreenect2::Frame *rgb; libfreenect2::Frame *depth; (_listener)->waitForNewFrame(frames); rgb = frames[libfreenect2::Frame::Color]; depth = frames[libfreenect2::Frame::Depth]; registration->undistortDepth(depth, &undistorted); while (!shutdown) { std::thread *nextFrame = new std::thread[1]; /*(_listener)->waitForNewFrame(frames); rgb = frames[libfreenect2::Frame::Color]; depth = frames[libfreenect2::Frame::Depth]; registration->undistortDepth(depth, &undistorted);*/ //registration->apply(rgb, depth, &undistorted, ®istered, true, NULL, NULL); //cv::Mat frame = frameToMat("registered", ®istered); cv::Mat depthFrame;// = frameToMat("depth", depth); //cv::Mat board(424, 512, CV_8UC4, cv::Scalar::all(0)); board = cv::Mat(480, 640, CV_8UC4, cv::Scalar::all(0)); //depthFrame *= 0.001; cv::Mat temp_board(480, 640, CV_16UC3, cv::Scalar::all(0)); int parts = 128; int jump = 512 / parts; int minX = 999; int maxX = -1; int minY = 999; int maxY = -1; std::thread *tt = new std::thread[parts]; for (int i = 0; i < parts; ++i) { tt[i] = std::thread(tst, registration, &undistorted, ®istered, jump * i, jump + jump * i, &board, &ctObject, right, up, _mr, _mt, _cam, _pro, &depthFrame, &temp_board, &minX, &maxX, &minY, &maxY); } for (int i = 0; i < parts; ++i) tt[i].join(); //_listener->release(frames); nextFrame[0] = std::thread(loadNextFrame, _listener, &frames, rgb, depth, registration, &undistorted); int parts2 = 8; int jump2 = 512 / parts2; for (int ii = 0; ii < 3; ii++) { for (int i = 0; i < parts2; ++i) { tt[i] = std::thread(tt2, &temp_board, jump2 * i, jump2 + jump2 * i, cv::Size(3, 3), minX, maxX, minY, maxY); } for (int i = 0; i < parts2; ++i) tt[i].join(); } //for (int i = 0; i < 3; i++) //temp_board = pseudoBoxFilter(temp_board, temp_board, cv::Size(3, 3)); //temp_board = pseudoBoxFilter(temp_board, temp_board, cv::Size(3, 3)); //temp_board = pseudoBoxFilter(temp_board, temp_board, cv::Size(7, 7)); //temp_board = pseudoBoxFilter(temp_board, temp_board, cv::Size(7, 7)); //cv::medianBlur(temp_board, temp_board, 5); // TO-DO check if needed //cv::GaussianBlur(temp_board, temp_board, cv::Size(3, 3), 0, 1); //int parts3 = 16; //int jump3 = 480 / parts3; //std::thread *tt2 = new std::thread[parts]; //for (int i = 0; i < parts3; ++i) { // tt2[i] = std::thread(tt3, &board, &temp_board, &ctObject, jump3 * i, jump3 + jump3 * i); // //std::cout << jump3 * i << " " << jump3 + jump3 * i << "\n"; //} //for (int i = 0; i < parts; ++i) // tt2[i].join(); //std::cout << "zuoooooo\n"; /*for (int i = 0; i < 480; i++) { for (int j = 0; j < 640; j++) { cv::Mat ROI = board(cv::Rect(j, i, 1, 1)); cv::Vec3s v = temp_board.at<cv::Vec3s>(i, j); unsigned char value = ctObject.at(v[0], v[1], v[2]); ROI.setTo(cv::Scalar(value, value, value, 100)); } }*/ //cv::imshow("CTviewer", board); { libfreenect2::Frame b(640, 480, 4); b.data = board.data; viewer.addFrame("RGB", &b); shutdown = shutdown || viewer.render(); } //_listener->release(frames); nextFrame[0].join(); delete[] tt; delete[] nextFrame; //delete[] tt2; /*{ int op = cv::waitKey(1); if (op == 100 || (char)(op) == 'd') right -= 1; if (op == 115 || (char)(op) == 's') up += 1; if (op == 97 || (char)(op) == 'a') right += 1; if (op == 119 || (char)(op) == 'w') up -= 1; if (op == 1113997 || op == 1048586 || op == 1048608 || op == 10 || op == 32) { std::cout << "right = " << right << ";\nup = " << up << ";\nrotX = " << rotX << ";\n"; shutdown = true; cv::destroyWindow("CTviewer"); } }*/ } }
/** * Main application entry point. * * Accepted argumemnts: * - cpu Perform depth processing with the CPU. * - gl Perform depth processing with OpenGL. * - cl Perform depth processing with OpenCL. * - <number> Serial number of the device to open. * - -noviewer Disable viewer window. */ int main(int argc, char *argv[]) /// [main] { std::string program_path(argv[0]); std::cerr << "Version: " << LIBFREENECT2_VERSION << std::endl; std::cerr << "Environment variables: LOGFILE=<protonect.log>" << std::endl; std::cerr << "Usage: " << program_path << " [gl | cl | cuda | cpu] [<device serial>]" << std::endl; std::cerr << " [-noviewer] [-norgb | -nodepth] [-help] [-version]" << std::endl; std::cerr << "To pause and unpause: pkill -USR1 Protonect" << std::endl; size_t executable_name_idx = program_path.rfind("Protonect"); std::string binpath = "/"; if(executable_name_idx != std::string::npos) { binpath = program_path.substr(0, executable_name_idx); } #if defined(WIN32) || defined(_WIN32) || defined(__WIN32__) // avoid flooing the very slow Windows console with debug messages libfreenect2::setGlobalLogger(libfreenect2::createConsoleLogger(libfreenect2::Logger::Info)); #else // create a console logger with debug level (default is console logger with info level) /// [logging] libfreenect2::setGlobalLogger(libfreenect2::createConsoleLogger(libfreenect2::Logger::Debug)); /// [logging] #endif /// [file logging] MyFileLogger *filelogger = new MyFileLogger(getenv("LOGFILE")); if (filelogger->good()) libfreenect2::setGlobalLogger(filelogger); else delete filelogger; /// [file logging] /// [context] libfreenect2::Freenect2 freenect2; libfreenect2::Freenect2Device *dev = 0; libfreenect2::PacketPipeline *pipeline = 0; /// [context] std::string serial = ""; bool viewer_enabled = true; bool enable_rgb = true; bool enable_depth = true; for(int argI = 1; argI < argc; ++argI) { const std::string arg(argv[argI]); if(arg == "-help" || arg == "--help" || arg == "-h" || arg == "-v" || arg == "--version" || arg == "-version") { // Just let the initial lines display at the beginning of main return 0; } else if(arg == "cpu") { if(!pipeline) /// [pipeline] pipeline = new libfreenect2::CpuPacketPipeline(); /// [pipeline] } else if(arg == "gl") { #ifdef LIBFREENECT2_WITH_OPENGL_SUPPORT if(!pipeline) pipeline = new libfreenect2::OpenGLPacketPipeline(); #else std::cout << "OpenGL pipeline is not supported!" << std::endl; #endif } else if(arg == "cl") { #ifdef LIBFREENECT2_WITH_OPENCL_SUPPORT if(!pipeline) pipeline = new libfreenect2::OpenCLPacketPipeline(); #else std::cout << "OpenCL pipeline is not supported!" << std::endl; #endif } else if(arg == "cuda") { #ifdef LIBFREENECT2_WITH_CUDA_SUPPORT if(!pipeline) pipeline = new libfreenect2::CudaPacketPipeline(); #else std::cout << "CUDA pipeline is not supported!" << std::endl; #endif } else if(arg.find_first_not_of("0123456789") == std::string::npos) //check if parameter could be a serial number { serial = arg; } else if(arg == "-noviewer" || arg == "--noviewer") { viewer_enabled = false; } else if(arg == "-norgb" || arg == "--norgb") { enable_rgb = false; } else if(arg == "-nodepth" || arg == "--nodepth") { enable_depth = false; } else { std::cout << "Unknown argument: " << arg << std::endl; } } if (!enable_rgb && !enable_depth) { std::cerr << "Disabling both streams is not allowed!" << std::endl; return -1; } /// [discovery] if(freenect2.enumerateDevices() == 0) { std::cout << "no device connected!" << std::endl; return -1; } if (serial == "") { serial = freenect2.getDefaultDeviceSerialNumber(); } /// [discovery] if(pipeline) { /// [open] dev = freenect2.openDevice(serial, pipeline); /// [open] } else { dev = freenect2.openDevice(serial); } if(dev == 0) { std::cout << "failure opening device!" << std::endl; return -1; } devtopause = dev; signal(SIGINT,sigint_handler); #ifdef SIGUSR1 signal(SIGUSR1, sigusr1_handler); #endif protonect_shutdown = false; /// [listeners] int types = 0; if (enable_rgb) types |= libfreenect2::Frame::Color; if (enable_depth) types |= libfreenect2::Frame::Ir | libfreenect2::Frame::Depth; libfreenect2::SyncMultiFrameListener listener(types); libfreenect2::FrameMap frames; dev->setColorFrameListener(&listener); dev->setIrAndDepthFrameListener(&listener); /// [listeners] /// [start] if (enable_rgb && enable_depth) { if (!dev->start()) return -1; } else { if (!dev->startStreams(enable_rgb, enable_depth)) return -1; } std::cout << "device serial: " << dev->getSerialNumber() << std::endl; std::cout << "device firmware: " << dev->getFirmwareVersion() << std::endl; /// [start] /// [registration setup] libfreenect2::Registration* registration = new libfreenect2::Registration(dev->getIrCameraParams(), dev->getColorCameraParams()); libfreenect2::Frame undistorted(512, 424, 4), registered(512, 424, 4); /// [registration setup] size_t framecount = 0; #ifdef EXAMPLES_WITH_OPENGL_SUPPORT Viewer viewer; if (viewer_enabled) viewer.initialize(); #else viewer_enabled = false; #endif /// [loop start] while(!protonect_shutdown) { listener.waitForNewFrame(frames); libfreenect2::Frame *rgb = frames[libfreenect2::Frame::Color]; libfreenect2::Frame *ir = frames[libfreenect2::Frame::Ir]; libfreenect2::Frame *depth = frames[libfreenect2::Frame::Depth]; /// [loop start] if (enable_rgb && enable_depth) { /// [registration] registration->apply(rgb, depth, &undistorted, ®istered); /// [registration] } framecount++; if (!viewer_enabled) { if (framecount % 100 == 0) std::cout << "The viewer is turned off. Received " << framecount << " frames. Ctrl-C to stop." << std::endl; listener.release(frames); continue; } #ifdef EXAMPLES_WITH_OPENGL_SUPPORT if (enable_rgb) { viewer.addFrame("RGB", rgb); } if (enable_depth) { viewer.addFrame("ir", ir); viewer.addFrame("depth", depth); } if (enable_rgb && enable_depth) { viewer.addFrame("registered", ®istered); } protonect_shutdown = protonect_shutdown || viewer.render(); #endif /// [loop end] listener.release(frames); /** libfreenect2::this_thread::sleep_for(libfreenect2::chrono::milliseconds(100)); */ } /// [loop end] // TODO: restarting ir stream doesn't work! // TODO: bad things will happen, if frame listeners are freed before dev->stop() :( /// [stop] dev->stop(); dev->close(); /// [stop] delete registration; return 0; }
//update the kinect void Device::updateKinect() { libfreenect2::FrameMap frames; //Temporary arrays float * newDepth = new float[FRAME_SIZE_DEPTH]; float * newIr = new float[FRAME_SIZE_DEPTH]; float * newUndisorted = new float[FRAME_SIZE_DEPTH]; libfreenect2::Frame undistorted(512, 424, 4), registered(512, 424, 4); //MAIN THREAD while(initialized_device){ listener->waitForNewFrame(frames); if(enableRegistered){ libfreenect2::Frame * rgb = frames[libfreenect2::Frame::Color]; memcpy(colorData, reinterpret_cast<const uint32_t *>(rgb->data), 1920 * 1080 * 4); libfreenect2::Frame * depth = frames[libfreenect2::Frame::Depth]; memcpy(newDepth, reinterpret_cast<const float * >(depth->data), FRAME_BYTE_SIZE_DEPTH); //Mappers RGB + Depth registration->apply(rgb, depth, &undistorted, ®istered); memcpy(newUndisorted, reinterpret_cast<const float * >(undistorted.data), FRAME_BYTE_SIZE_DEPTH); memcpy(registeredData, reinterpret_cast<const uint32_t * >(registered.data), FRAME_BYTE_SIZE_DEPTH); }else if(enableVideo && !enableDepth){ libfreenect2::Frame * rgb = frames[libfreenect2::Frame::Color]; memcpy(colorData, reinterpret_cast<const uint32_t *>(rgb->data), 1920 * 1080 * 4); }else if( !enableVideo && enableDepth ){ libfreenect2::Frame * depth = frames[libfreenect2::Frame::Depth]; memcpy(newDepth, reinterpret_cast<const float * >(depth->data), FRAME_BYTE_SIZE_DEPTH); }else if(enableVideo && enableDepth && !enableRegistered){ libfreenect2::Frame * rgb = frames[libfreenect2::Frame::Color]; memcpy(colorData, reinterpret_cast<const uint32_t *>(rgb->data), 1920 * 1080 * 4); libfreenect2::Frame * depth = frames[libfreenect2::Frame::Depth]; memcpy(newDepth, reinterpret_cast<const float * >(depth->data), FRAME_BYTE_SIZE_DEPTH); } if(enableIR){ libfreenect2::Frame * ir = frames[libfreenect2::Frame::Ir]; memcpy(newIr, reinterpret_cast<const float * >(ir->data), FRAME_BYTE_SIZE_DEPTH); } int indexFD = 0; int pIndexEnd = (FRAME_SIZE_DEPTH); int indexX = 0; int indexY = 0; int cameraXYZ = 0; while(indexFD < pIndexEnd){ float depth = newDepth[indexFD]; //Depth //0.0566666f -> (value/45000)* 255 rawDepthData[indexFD] = uint32_t(depth); //IR irData[indexFD] = colorByte2Int((uint32_t(newIr[indexFD]*0.0566666f)>>2)); //undisorted undisortedData[indexFD] = colorByte2Int(uint32_t(newUndisorted[indexFD]*0.0566666f)); depthData[indexFD] = colorByte2Int(uint32_t(depth*0.0566666f)); //evaluates the depth XYZ position; depthCameraData[cameraXYZ++] = (indexX - dev->getIrCameraParams().cx) * depth / dev->getIrCameraParams().fx;//x depthCameraData[cameraXYZ++] = (indexY - dev->getIrCameraParams().cy) * depth / dev->getIrCameraParams().fy; //y depthCameraData[cameraXYZ++] = depth; //z indexX++; if(indexX >= 512){ indexX=0; indexY++;} indexFD++; // } } //framw listener listener->release(frames); } //clean up if(newDepth != NULL) delete newDepth; if(newIr != NULL) delete newIr; if(newUndisorted != NULL) delete newUndisorted; }
//-------------------------------------------------------------------------------- void ofxKinectV2::threadedFunction() { libfreenect2::Frame undistorted(DEPTH_WIDTH, DEPTH_HEIGHT, 4), registered(DEPTH_WIDTH, DEPTH_HEIGHT, 4); while (isThreadRunning()) { if (!bOpened) continue; listener->waitForNewFrame(frames); libfreenect2::Frame *rgb = frames[libfreenect2::Frame::Color]; libfreenect2::Frame *ir = frames[libfreenect2::Frame::Ir]; libfreenect2::Frame *depth = frames[libfreenect2::Frame::Depth]; registration->apply(rgb, depth, &undistorted, ®istered); frameColor[indexBack].setFromPixels(rgb->data, rgb->width, rgb->height, 4); frameIr[indexBack].setFromPixels((float *)ir->data, ir->width, ir->height, 1); frameRawDepth[indexBack].setFromPixels((float *)depth->data, depth->width, depth->height, 1); frameAligned[indexBack].setFromPixels(registered.data, registered.width, registered.height, 4); listener->release(frames); for (auto pixel : frameColor[indexBack].getPixelsIter()) // swap rgb std::swap(pixel[0], pixel[2]); for (auto pixel : frameIr[indexBack].getPixelsIter()) // downscale to 0-1 pixel[0] /= 65535.0f; for (auto pixel : frameAligned[indexBack].getPixelsIter()) // swap rgb std::swap(pixel[0], pixel[2]); if(!bUseRawDepth) { auto& depth = frameDepth[indexBack]; auto& raw_depth = frameRawDepth[indexBack]; if ((depth.getWidth() != raw_depth.getWidth()) || (depth.getHeight() != raw_depth.getHeight())) { depth.allocate(raw_depth.getWidth(), raw_depth.getHeight(), 3); } std::pair<float, float> minmax(0.0, 0.8); for (int i = 0; i < raw_depth.size(); i++) { float hue = ofMap(raw_depth[i], minDistance, maxDistance, minmax.first, minmax.second, true); if (hue == minmax.first || hue == minmax.second) depth.setColor(i * 3, ofColor(0)); else depth.setColor(i * 3, ofFloatColor::fromHsb(hue, 0.9, 0.9)); } } // get point cloud { float rgbPix = 0; size_t i = 0; for (int y = 0; y < DEPTH_HEIGHT; y++) { for (int x = 0; x < DEPTH_WIDTH; x++) { auto& pt = pcVertices[indexBack][i]; registration->getPointXYZRGB(&undistorted, ®istered, y, x, pt.x, pt.y, pt.z, rgbPix); pt.z *= -1.0f; pt.w = 1.0f; const uint8_t *p = reinterpret_cast<uint8_t*>(&rgbPix); pcColors[indexBack][i] = ofColor(p[2], p[1], p[0]); i++; } } } //while (bNewFrame) { // wait for main thread } std::lock_guard<std::mutex> guard(mutex); std::swap(indexFront, indexBack); bNewFrame = true; } }