int main (int argc, char** argv) { arg_base::set_help_option("-h"); arg_parse(argc, argv); ntk_debug_level = opt::debug_level(); cv::setBreakOnError(true); QApplication::setGraphicsSystem("raster"); QApplication app (argc, argv); // Opening cameras. Its important to remember that CvCapture has to be done // before PMD otherwise will crash // 1.- Logitech (openCV) // 2.- PMD (pmdlibrary) const char* fake_dir = opt::image(); bool is_directory = opt::directory() != 0; if (opt::directory()) fake_dir = opt::directory(); ntk::RGBDProcessor* rgbd_processor = 0; RGBDGrabber* grabber = 0; #ifdef NESTK_USE_OPENNI OpenniDriver* ni_driver = 0; #endif bool use_openni = false; if (opt::use_kinect()) { #ifdef NESTK_USE_OPENNI if (opt::use_freenect()) { rgbd_processor = new FreenectRGBDProcessor(); } else { rgbd_processor = new OpenniRGBDProcessor(); use_openni = true; } #else rgbd_processor = new FreenectRGBDProcessor(); #endif } else { rgbd_processor = new RGBDProcessor(); } rgbd_processor->setMaxNormalAngle(40); rgbd_processor->setFilterFlag(RGBDProcessor::ComputeMapping, true); rgbd_processor->setFilterFlag(RGBDProcessor::FilterThresholdDepth, true); rgbd_processor->setMinDepth(0.3); rgbd_processor->setMaxDepth(1.5); if (opt::image() || opt::directory()) { std::string path = opt::image() ? opt::image() : opt::directory(); FileGrabber* file_grabber = new FileGrabber(path, is_directory); grabber = file_grabber; // Image are saved with flipping applied. rgbd_processor->setFilterFlag(RGBDProcessor::FlipColorImage, false); } else if (opt::use_kinect()) { if (opt::use_freenect()) { #ifdef NESTK_USE_FREENECT FreenectGrabber* k_grabber = new FreenectGrabber(); k_grabber->initialize(); k_grabber->setIRMode(false); grabber = k_grabber; #else fatal_error("Freenect support not built."); #endif } else { #ifdef NESTK_USE_OPENNI // Config dir is supposed to be next to the binaries. QDir prev = QDir::current(); QDir::setCurrent(QApplication::applicationDirPath()); if (!ni_driver) ni_driver = new OpenniDriver(); OpenniGrabber* k_grabber = new OpenniGrabber(*ni_driver); k_grabber->setTrackUsers(false); if (opt::high_resolution()) k_grabber->setHighRgbResolution(true); k_grabber->initialize(); QDir::setCurrent(prev.absolutePath()); grabber = k_grabber; #else fatal_error("OpenNI support not built, try --freenect."); #endif } } else { ntk_assert(0, "PMDSDK support not enabled."); } MeshGenerator* mesh_generator = 0; ntk::RGBDCalibration* calib_data = 0; if (opt::calibration_file()) { calib_data = new RGBDCalibration(); calib_data->loadFromFile(opt::calibration_file()); } else if (use_openni) { calib_data = grabber->calibrationData(); } if (calib_data) { mesh_generator = new MeshGenerator(); grabber->setCalibrationData(*calib_data); } if (opt::sync()) grabber->setSynchronous(true); RGBDFrameRecorder frame_recorder (opt::dir_prefix()); frame_recorder.setFrameIndex(opt::first_index()); frame_recorder.setSaveOnlyRaw(true); frame_recorder.setUseBinaryRaw(true); GuiController gui_controller (*grabber, *rgbd_processor); grabber->addEventListener(&gui_controller); gui_controller.setFrameRecorder(frame_recorder); if (opt::sync()) gui_controller.setPaused(true); TableObjectRGBDModeler modeler; modeler.setDepthFilling(true); modeler.setRemoveSmallStructures(true); ModelAcquisitionController* acq_controller = 0; acq_controller = new ModelAcquisitionController (gui_controller, modeler); acq_controller->setPaused(true); RelativePoseEstimator* pose_estimator = new DummyRelativePoseEstimator(); acq_controller->setPoseEstimator(pose_estimator); gui_controller.setModelAcquisitionController(*acq_controller); if (mesh_generator) gui_controller.setMeshGenerator(*mesh_generator); grabber->start(); app.exec(); delete mesh_generator; delete acq_controller; }
int main (int argc, char** argv) { arg_base::set_help_option("-h"); arg_parse(argc, argv); ntk_debug_level = opt::debug_level(); cv::setBreakOnError(true); QApplication::setGraphicsSystem("raster"); QApplication app (argc, argv); RGBDGrabberFactory& grabber_factory = RGBDGrabberFactory::instance(); RGBDGrabberFactory::Params params; if (opt::directory()) params.directory = opt::directory(); if (opt::openni()) params.default_type = RGBDGrabberFactory::OPENNI; if (opt::freenect()) params.default_type = RGBDGrabberFactory::FREENECT; if (opt::kin4win()) params.default_type = RGBDGrabberFactory::KIN4WIN; if (opt::softkinetic()) params.default_type = RGBDGrabberFactory::SOFTKINETIC; if (opt::pmd()) params.default_type = RGBDGrabberFactory::PMD; if (opt::calibration_file()) params.calibration_file = opt::calibration_file(); if (opt::sync()) params.synchronous = true; std::vector<RGBDGrabberFactory::GrabberData> grabbers; grabbers = grabber_factory.createGrabbers(params); if (grabbers.size() < 1) { QMessageBox::critical(0, "Fatal error", "Cannot connect to any RGBD device.\n\nPlease check that it is correctly plugged to the computer."); return 1; } RGBDGrabber* grabber = grabbers[0].grabber; RGBDProcessor* rgbd_processor = grabbers[0].processor; bool ok = grabber->connectToDevice(); if (!ok) { ntk_dbg(0) << "WARNING: connectToDevice failed."; return 1; } rgbd_processor->setMaxNormalAngle(40); rgbd_processor->setFilterFlag(RGBDProcessorFlags::ComputeMapping, true); rgbd_processor->setFilterFlag(RGBDProcessorFlags::FilterThresholdDepth, true); rgbd_processor->setMinDepth(0.05f); rgbd_processor->setMaxDepth(1.5f); MeshGenerator* mesh_generator = 0; ntk::RGBDCalibrationPtr calib_data = grabber->calibrationData(); if (calib_data) { mesh_generator = new MeshGenerator(); } RGBDFrameRecorder frame_recorder (opt::dir_prefix()); frame_recorder.setFrameIndex(opt::first_index()); frame_recorder.setSaveOnlyRaw(true); frame_recorder.setUseBinaryRaw(true); GuiController gui_controller (*grabber, *rgbd_processor); grabber->addEventListener(&gui_controller); gui_controller.setFrameRecorder(frame_recorder); if (opt::sync()) gui_controller.setPaused(true); TableObjectRGBDModeler modeler; modeler.setDepthFilling(true); modeler.setRemoveSmallStructures(true); ModelAcquisitionController* acq_controller = 0; acq_controller = new ModelAcquisitionController (gui_controller, modeler); acq_controller->setPaused(true); IncrementalPoseEstimatorFromImage* pose_estimator = new DummyIncrementalPoseEstimator(); acq_controller->setPoseEstimator(pose_estimator); gui_controller.setModelAcquisitionController(*acq_controller); if (mesh_generator) gui_controller.setMeshGenerator(*mesh_generator); grabber->start(); app.exec(); delete mesh_generator; delete acq_controller; }
int main(int argc, char **argv) { // Parse command line options. arg_base::set_help_option("-h"); arg_parse(argc, argv); // Set debug level to 1. ntk::ntk_debug_level = 1; // Set current directory to application directory. // This is to find Nite config in config/ directory. QApplication app (argc, argv); QDir::setCurrent(QApplication::applicationDirPath()); // Declare the global OpenNI driver. Only one can be instantiated in a program. OpenniDriver ni_driver; // Declare the frame grabber. OpenniGrabber grabber(ni_driver, opt::kinect_id()); // High resolution 1280x1024 RGB Image. if (opt::high_resolution()) grabber.setHighRgbResolution(true); // Start the grabber. grabber.connectToDevice(); grabber.start(); // Holder for the current image. RGBDImage image; // Image post processor. Compute mappings when RGB resolution is 1280x1024. OpenniRGBDProcessor post_processor; // Wait for a new frame, get a local copy and postprocess it. grabber.waitForNextFrame(); grabber.copyImageTo(image); post_processor.processImage(image); ntk_ensure(image.calibration(), "Uncalibrated rgbd image, cannot project to 3D!"); // Initialize the object modeler. PointCloud<PointXYZ> cloud; rgbdImageToPointCloud(cloud, image); TableObjectDetector<PointXYZ> detector; detector.setObjectVoxelSize(0.003); // 3 mm voxels. bool ok = detector.detect(cloud); ntk_throw_exception_if(!ok, "No cluster detected on a table plane!"); for (int cluster_id = 0; cluster_id < detector.objectClusters().size(); ++cluster_id) { TableObjectRGBDModeler modeler; modeler.feedFromTableObjectDetector(detector, cluster_id); Pose3D pose = *image.calibration()->depth_pose; modeler.addNewView(image, pose); modeler.computeMesh(); modeler.currentMesh().saveToPlyFile(cv::format("object%d.ply", cluster_id).c_str()); } grabber.stop(); grabber.disconnectFromDevice(); }