// return not null transform if odometry is correctly computed Transform OdometryDVO::computeTransform( SensorData & data, const Transform & guess, OdometryInfo * info) { Transform t; #ifdef RTABMAP_DVO UTimer timer; if(data.imageRaw().empty() || data.imageRaw().rows != data.depthOrRightRaw().rows || data.imageRaw().cols != data.depthOrRightRaw().cols) { UERROR("Not supported input!"); return t; } if(!(data.cameraModels().size() == 1 && data.cameraModels()[0].isValidForReprojection())) { UERROR("Invalid camera model! Only single RGB-D camera supported by DVO. Try another odometry approach."); return t; } if(dvo_ == 0) { dvo::DenseTracker::Config cfg = dvo::DenseTracker::getDefaultConfig(); dvo_ = new dvo::DenseTracker(cfg); } cv::Mat grey, grey_s16, depth_inpainted, depth_mask, depth_mono, depth_float; if(data.imageRaw().type() != CV_32FC1) { if(data.imageRaw().type() == CV_8UC3) { cv::cvtColor(data.imageRaw(), grey, CV_BGR2GRAY); } else { grey = data.imageRaw(); } grey.convertTo(grey_s16, CV_32F); } else { grey_s16 = data.imageRaw(); } // make sure all zeros are NAN if(data.depthRaw().type() == CV_32FC1) { depth_float = data.depthRaw(); for(int i=0; i<depth_float.rows; ++i) { for(int j=0; j<depth_float.cols; ++j) { float & d = depth_float.at<float>(i,j); if(d == 0.0f) { d = NAN; } } } } else if(data.depthRaw().type() == CV_16UC1) { depth_float = cv::Mat(data.depthRaw().size(), CV_32FC1); for(int i=0; i<data.depthRaw().rows; ++i) { for(int j=0; j<data.depthRaw().cols; ++j) { float d = float(data.depthRaw().at<unsigned short>(i,j))/1000.0f; depth_float.at<float>(i, j) = d==0.0f?NAN:d; } } } else { UFATAL("Unknown depth format!"); } if(camera_ == 0) { dvo::core::IntrinsicMatrix intrinsics = dvo::core::IntrinsicMatrix::create( data.cameraModels()[0].fx(), data.cameraModels()[0].fy(), data.cameraModels()[0].cx(), data.cameraModels()[0].cy()); camera_ = new dvo::core::RgbdCameraPyramid( data.cameraModels()[0].imageWidth(), data.cameraModels()[0].imageHeight(), intrinsics); } dvo::core::RgbdImagePyramid * current = new dvo::core::RgbdImagePyramid(*camera_, grey_s16, depth_float); cv::Mat covariance; if(reference_ == 0) { reference_ = current; if(!lost_) { t.setIdentity(); } covariance = cv::Mat::eye(6,6,CV_64FC1) * 9999.0; } else { dvo::DenseTracker::Result result; dvo_->match(*reference_, *current, result); t = Transform::fromEigen3d(result.Transformation); if(result.Information(0,0) > 0.0 && result.Information(0,0) != 1.0) { lost_ = false; cv::Mat information = cv::Mat::eye(6,6, CV_64FC1); memcpy(information.data, result.Information.data(), 36*sizeof(double)); covariance = information.inv(); covariance *= 100.0; // to be in the same scale than loop closure detection Transform currentMotion = t; t = motionFromKeyFrame_.inverse() * t; // TODO make parameters? if(currentMotion.getNorm() > 0.01 || currentMotion.getAngle() > 0.01) { if(info) { info->keyFrameAdded = true; } // new keyframe delete reference_; reference_ = current; motionFromKeyFrame_.setIdentity(); } else { delete current; motionFromKeyFrame_ = currentMotion; } } else { lost_ = true; delete reference_; delete current; reference_ = 0; // this will make restart from the next frame motionFromKeyFrame_.setIdentity(); t.setNull(); covariance = cv::Mat::eye(6,6,CV_64FC1) * 9999.0; UWARN("dvo failed to estimate motion, tracking will be reinitialized on next frame."); } } const Transform & localTransform = data.cameraModels()[0].localTransform(); if(!t.isNull() && !t.isIdentity() && !localTransform.isIdentity() && !localTransform.isNull()) { // from camera frame to base frame t = localTransform * t * localTransform.inverse(); } if(info) { info->type = (int)kTypeDVO; info->covariance = covariance; } UINFO("Odom update time = %fs", timer.elapsed()); #else UERROR("RTAB-Map is not built with DVO support! Select another visual odometry approach."); #endif return t; }
int main(int argc, char * argv[]) { ULogger::setType(ULogger::kTypeConsole); ULogger::setLevel(ULogger::kInfo); ULogger::setPrintTime(false); ULogger::setPrintWhere(false); int driver = -1; int device = 0; bool stereo = false; for(int i=1; i<argc; ++i) { if(strcmp(argv[i], "--driver") == 0) { ++i; if(i < argc) { driver = std::atoi(argv[i]); if(driver < -1) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "--device") == 0) { ++i; if(i < argc) { device = std::atoi(argv[i]); if(device < 0) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "--debug") == 0) { ULogger::setLevel(ULogger::kDebug); ULogger::setPrintTime(true); ULogger::setPrintWhere(true); continue; } if(strcmp(argv[i], "--stereo") == 0) { stereo=true; continue; } if(strcmp(argv[i], "--help") == 0) { showUsage(); } printf("Unrecognized option : %s\n", argv[i]); showUsage(); } if(driver < -1 || driver > 7) { UERROR("driver should be between -1 and 7."); showUsage(); } UINFO("Using driver %d", driver); UINFO("Using device %d", device); UINFO("Stereo: %s", stereo?"true":"false"); QApplication app(argc, argv); rtabmap::CalibrationDialog dialog(stereo, "."); rtabmap::Camera * camera = 0; if(driver == -1) { camera = new rtabmap::CameraVideo(device); dialog.setStereoMode(stereo); } else if(driver == 0) { camera = new rtabmap::CameraOpenni(); } else if(driver == 1) { if(!rtabmap::CameraOpenNI2::available()) { UERROR("Not built with OpenNI2 support..."); exit(-1); } camera = new rtabmap::CameraOpenNI2(); } else if(driver == 2) { if(!rtabmap::CameraFreenect::available()) { UERROR("Not built with Freenect support..."); exit(-1); } camera = new rtabmap::CameraFreenect(); } else if(driver == 3) { if(!rtabmap::CameraOpenNICV::available()) { UERROR("Not built with OpenNI from OpenCV support..."); exit(-1); } camera = new rtabmap::CameraOpenNICV(false); } else if(driver == 4) { if(!rtabmap::CameraOpenNICV::available()) { UERROR("Not built with OpenNI from OpenCV support..."); exit(-1); } camera = new rtabmap::CameraOpenNICV(true); } else if(driver == 5) { if(!rtabmap::CameraFreenect2::available()) { UERROR("Not built with Freenect2 support..."); exit(-1); } camera = new rtabmap::CameraFreenect2(0, rtabmap::CameraFreenect2::kTypeColorIR); dialog.setSwitchedImages(true); dialog.setStereoMode(stereo, "rgb", "depth"); } else if(driver == 6) { if(!rtabmap::CameraStereoDC1394::available()) { UERROR("Not built with DC1394 support..."); exit(-1); } camera = new rtabmap::CameraStereoDC1394(); dialog.setStereoMode(stereo); } else if(driver == 7) { if(!rtabmap::CameraStereoFlyCapture2::available()) { UERROR("Not built with FlyCapture2/Triclops support..."); exit(-1); } camera = new rtabmap::CameraStereoFlyCapture2(); dialog.setStereoMode(stereo); } else { UFATAL(""); } rtabmap::CameraThread * cameraThread = 0; if(camera) { if(!camera->init("")) { printf("Camera init failed!\n"); delete camera; exit(1); } cameraThread = new rtabmap::CameraThread(camera); } dialog.registerToEventsManager(); dialog.show(); cameraThread->start(); app.exec(); cameraThread->join(true); delete cameraThread; }
cv::Mat BayesFilter::generatePrediction(const Memory * memory, const std::vector<int> & ids) const { if(!_fullPredictionUpdate && !_prediction.empty()) { return updatePrediction(_prediction, memory, uKeys(_posterior), ids); } UDEBUG(""); UASSERT(memory && _predictionLC.size() >= 2 && ids.size()); UTimer timer; timer.start(); UTimer timerGlobal; timerGlobal.start(); std::map<int, int> idToIndexMap; for(unsigned int i=0; i<ids.size(); ++i) { UASSERT_MSG(ids[i] != 0, "Signature id is null ?!?"); idToIndexMap.insert(idToIndexMap.end(), std::make_pair(ids[i], i)); } //int rows = prediction.rows; cv::Mat prediction = cv::Mat::zeros(ids.size(), ids.size(), CV_32FC1); int cols = prediction.cols; // Each prior is a column vector UDEBUG("_predictionLC.size()=%d",_predictionLC.size()); std::set<int> idsDone; for(unsigned int i=0; i<ids.size(); ++i) { if(idsDone.find(ids[i]) == idsDone.end()) { if(ids[i] > 0) { // Set high values (gaussians curves) to loop closure neighbors // ADD prob for each neighbors std::map<int, int> neighbors = memory->getNeighborsId(ids[i], _predictionLC.size()-1, 0); std::list<int> idsLoopMargin; //filter neighbors in STM for(std::map<int, int>::iterator iter=neighbors.begin(); iter!=neighbors.end();) { if(memory->isInSTM(iter->first)) { neighbors.erase(iter++); } else { if(iter->second == 0) { idsLoopMargin.push_back(iter->second); } ++iter; } } // should at least have 1 id in idsMarginLoop if(idsLoopMargin.size() == 0) { UFATAL("No 0 margin neighbor for signature %d !?!?", ids[i]); } // same neighbor tree for loop signatures (margin = 0) for(std::list<int>::iterator iter = idsLoopMargin.begin(); iter!=idsLoopMargin.end(); ++iter) { float sum = 0.0f; // sum values added sum += this->addNeighborProb(prediction, i, neighbors, idToIndexMap); idsDone.insert(*iter); this->normalize(prediction, i, sum, ids[0]<0); } } else { // Set the virtual place prior if(_virtualPlacePrior > 0) { if(cols>1) // The first must be the virtual place { ((float*)prediction.data)[i] = _virtualPlacePrior; float val = (1.0-_virtualPlacePrior)/(cols-1); for(int j=1; j<cols; j++) { ((float*)prediction.data)[i + j*cols] = val; } } else if(cols>0) { ((float*)prediction.data)[i] = 1; } } else { // Only for some tests... // when _virtualPlacePrior=0, set all priors to the same value if(cols>1) { float val = 1.0/cols; for(int j=0; j<cols; j++) { ((float*)prediction.data)[i + j*cols] = val; } } else if(cols>0) { ((float*)prediction.data)[i] = 1; } } } } } ULOGGER_DEBUG("time = %fs", timerGlobal.ticks()); return prediction; }
int main(int argc, char * argv[]) { ULogger::setType(ULogger::kTypeConsole); ULogger::setLevel(ULogger::kInfo); int driver = 0; if(argc < 2) { showUsage(); } else { driver = atoi(argv[argc-1]); if(driver < 0 || driver > 4) { UERROR("driver should be between 0 and 4."); showUsage(); } } UINFO("Using driver %d", driver); rtabmap::CameraRGBD * camera = 0; if(driver == 0) { camera = new rtabmap::CameraOpenni("", 0); } else if(driver == 1) { if(!rtabmap::CameraOpenNI2::available()) { UERROR("Not built with OpenNI2 support..."); exit(-1); } camera = new rtabmap::CameraOpenNI2(0); } else if(driver == 2) { if(!rtabmap::CameraFreenect::available()) { UERROR("Not built with Freenect support..."); exit(-1); } camera = new rtabmap::CameraFreenect(0, 0); } else if(driver == 3) { if(!rtabmap::CameraOpenNICV::available()) { UERROR("Not built with OpenNI from OpenCV support..."); exit(-1); } camera = new rtabmap::CameraOpenNICV(false, 0); } else if(driver == 4) { if(!rtabmap::CameraOpenNICV::available()) { UERROR("Not built with OpenNI from OpenCV support..."); exit(-1); } camera = new rtabmap::CameraOpenNICV(true, 0); } else { UFATAL(""); } if(!camera->init()) { printf("Camera init failed!\n"); delete camera; exit(1); } cv::Mat rgb, depth; float fx, fy, cx, cy; camera->takeImage(rgb, depth, fx, fy, cx, cy); cv::namedWindow("Video", CV_WINDOW_AUTOSIZE); // create window cv::namedWindow("Depth", CV_WINDOW_AUTOSIZE); // create window pcl::visualization::CloudViewer viewer("cloud"); while(!rgb.empty() && !viewer.wasStopped()) { cv::Mat tmp; depth.convertTo(tmp, CV_8UC1, 255.0/2048.0); cv::imshow("Video", rgb); // show frame cv::imshow("Depth",tmp); viewer.showCloud(rtabmap::util3d::cloudFromDepthRGB(rgb, depth, cx, cy, fx, fy), "cloud"); int c = cv::waitKey(10); // wait 10 ms or for key stroke if(c == 27) break; // if ESC, break and quit rgb = cv::Mat(); depth = cv::Mat(); camera->takeImage(rgb, depth, fx, fy, cx, cy); } cv::destroyWindow("Video"); cv::destroyWindow("Depth"); delete camera; return 0; }
int main(int argc, char * argv[]) { ULogger::setType(ULogger::kTypeConsole); ULogger::setLevel(ULogger::kInfo); //ULogger::setPrintTime(false); //ULogger::setPrintWhere(false); int driver = 0; std::string stereoSavePath; float rate = 0.0f; std::string fourcc = "MJPG"; if(argc < 2) { showUsage(); } else { for(int i=1; i<argc; ++i) { if(strcmp(argv[i], "-rate") == 0) { ++i; if(i < argc) { rate = uStr2Float(argv[i]); if(rate < 0.0f) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-save_stereo") == 0) { ++i; if(i < argc) { stereoSavePath = argv[i]; } else { showUsage(); } continue; } if(strcmp(argv[i], "-fourcc") == 0) { ++i; if(i < argc) { fourcc = argv[i]; if(fourcc.size() != 4) { UERROR("fourcc should be 4 characters."); showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "--help") == 0 || strcmp(argv[i], "-help") == 0) { showUsage(); } else if(i< argc-1) { printf("Unrecognized option \"%s\"", argv[i]); showUsage(); } // last driver = atoi(argv[i]); if(driver < 0 || driver > 8) { UERROR("driver should be between 0 and 8."); showUsage(); } } } UINFO("Using driver %d", driver); rtabmap::Camera * camera = 0; if(driver < 6) { if(!stereoSavePath.empty()) { UWARN("-save_stereo option cannot be used with RGB-D drivers."); stereoSavePath.clear(); } if(driver == 0) { camera = new rtabmap::CameraOpenni(); } else if(driver == 1) { if(!rtabmap::CameraOpenNI2::available()) { UERROR("Not built with OpenNI2 support..."); exit(-1); } camera = new rtabmap::CameraOpenNI2(); } else if(driver == 2) { if(!rtabmap::CameraFreenect::available()) { UERROR("Not built with Freenect support..."); exit(-1); } camera = new rtabmap::CameraFreenect(); } else if(driver == 3) { if(!rtabmap::CameraOpenNICV::available()) { UERROR("Not built with OpenNI from OpenCV support..."); exit(-1); } camera = new rtabmap::CameraOpenNICV(false); } else if(driver == 4) { if(!rtabmap::CameraOpenNICV::available()) { UERROR("Not built with OpenNI from OpenCV support..."); exit(-1); } camera = new rtabmap::CameraOpenNICV(true); } else if(driver == 5) { if(!rtabmap::CameraFreenect2::available()) { UERROR("Not built with Freenect2 support..."); exit(-1); } camera = new rtabmap::CameraFreenect2(0, rtabmap::CameraFreenect2::kTypeColor2DepthSD); } } else if(driver == 6) { if(!rtabmap::CameraStereoDC1394::available()) { UERROR("Not built with DC1394 support..."); exit(-1); } camera = new rtabmap::CameraStereoDC1394(); } else if(driver == 7) { if(!rtabmap::CameraStereoFlyCapture2::available()) { UERROR("Not built with FlyCapture2/Triclops support..."); exit(-1); } camera = new rtabmap::CameraStereoFlyCapture2(); } else if(driver == 8) { if(!rtabmap::CameraStereoZed::available()) { UERROR("Not built with ZED sdk support..."); exit(-1); } camera = new rtabmap::CameraStereoZed(0); } else { UFATAL(""); } if(!camera->init()) { printf("Camera init failed! Please select another driver (see \"--help\").\n"); delete camera; exit(1); } rtabmap::SensorData data = camera->takeImage(); if(data.imageRaw().cols != data.depthOrRightRaw().cols || data.imageRaw().rows != data.depthOrRightRaw().rows) { UWARN("RGB (%d/%d) and depth (%d/%d) frames are not the same size! The registered cloud cannot be shown.", data.imageRaw().cols, data.imageRaw().rows, data.depthOrRightRaw().cols, data.depthOrRightRaw().rows); } pcl::visualization::CloudViewer * viewer = 0; if(!data.stereoCameraModel().isValidForProjection() && (data.cameraModels().size() == 0 || !data.cameraModels()[0].isValidForProjection())) { UWARN("Camera not calibrated! The registered cloud cannot be shown."); } else { viewer = new pcl::visualization::CloudViewer("cloud"); } rtabmap::Transform t(1, 0, 0, 0, 0, -1, 0, 0, 0, 0, -1, 0); cv::VideoWriter videoWriter; UDirectory dir; if(!stereoSavePath.empty() && !data.imageRaw().empty() && !data.rightRaw().empty()) { if(UFile::getExtension(stereoSavePath).compare("avi") == 0) { if(data.imageRaw().size() == data.rightRaw().size()) { if(rate <= 0) { UERROR("You should set the input rate when saving stereo images to a video file."); showUsage(); } cv::Size targetSize = data.imageRaw().size(); targetSize.width *= 2; UASSERT(fourcc.size() == 4); videoWriter.open( stereoSavePath, CV_FOURCC(fourcc.at(0), fourcc.at(1), fourcc.at(2), fourcc.at(3)), rate, targetSize, data.imageRaw().channels() == 3); } else { UERROR("Images not the same size, cannot save stereo images to the video file."); } } else if(UDirectory::exists(stereoSavePath)) { UDirectory::makeDir(stereoSavePath+"/"+"left"); UDirectory::makeDir(stereoSavePath+"/"+"right"); } else { UERROR("Directory \"%s\" doesn't exist.", stereoSavePath.c_str()); stereoSavePath.clear(); } } // to catch the ctrl-c signal(SIGABRT, &sighandler); signal(SIGTERM, &sighandler); signal(SIGINT, &sighandler); int id=1; while(!data.imageRaw().empty() && (viewer==0 || !viewer->wasStopped()) && running) { cv::Mat rgb = data.imageRaw(); if(!data.depthRaw().empty() && (data.depthRaw().type() == CV_16UC1 || data.depthRaw().type() == CV_32FC1)) { // depth cv::Mat depth = data.depthRaw(); if(depth.type() == CV_32FC1) { depth = rtabmap::util2d::cvtDepthFromFloat(depth); } if(rgb.cols == depth.cols && rgb.rows == depth.rows && data.cameraModels().size() && data.cameraModels()[0].isValidForProjection()) { pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud = rtabmap::util3d::cloudFromDepthRGB( rgb, depth, data.cameraModels()[0]); cloud = rtabmap::util3d::transformPointCloud(cloud, t); if(viewer) viewer->showCloud(cloud, "cloud"); } else if(!depth.empty() && data.cameraModels().size() && data.cameraModels()[0].isValidForProjection()) { pcl::PointCloud<pcl::PointXYZ>::Ptr cloud = rtabmap::util3d::cloudFromDepth( depth, data.cameraModels()[0]); cloud = rtabmap::util3d::transformPointCloud(cloud, t); viewer->showCloud(cloud, "cloud"); } cv::Mat tmp; unsigned short min=0, max = 2048; uMinMax((unsigned short*)depth.data, depth.rows*depth.cols, min, max); depth.convertTo(tmp, CV_8UC1, 255.0/max); cv::imshow("Video", rgb); // show frame cv::imshow("Depth", tmp); } else if(!data.rightRaw().empty()) { // stereo cv::Mat right = data.rightRaw(); cv::imshow("Left", rgb); // show frame cv::imshow("Right", right); if(rgb.cols == right.cols && rgb.rows == right.rows && data.stereoCameraModel().isValidForProjection()) { if(right.channels() == 3) { cv::cvtColor(right, right, CV_BGR2GRAY); } pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud = rtabmap::util3d::cloudFromStereoImages( rgb, right, data.stereoCameraModel()); cloud = rtabmap::util3d::transformPointCloud(cloud, t); if(viewer) viewer->showCloud(cloud, "cloud"); } } int c = cv::waitKey(10); // wait 10 ms or for key stroke if(c == 27) break; // if ESC, break and quit if(videoWriter.isOpened()) { cv::Mat left = data.imageRaw(); cv::Mat right = data.rightRaw(); if(left.size() == right.size()) { cv::Size targetSize = left.size(); targetSize.width *= 2; cv::Mat targetImage(targetSize, left.type()); if(right.type() != left.type()) { cv::Mat tmp; cv::cvtColor(right, tmp, left.channels()==3?CV_GRAY2BGR:CV_BGR2GRAY); right = tmp; } UASSERT(left.type() == right.type()); cv::Mat roiA(targetImage, cv::Rect( 0, 0, left.size().width, left.size().height )); left.copyTo(roiA); cv::Mat roiB( targetImage, cvRect( left.size().width, 0, left.size().width, left.size().height ) ); right.copyTo(roiB); videoWriter.write(targetImage); printf("Saved frame %d to \"%s\"\n", id, stereoSavePath.c_str()); } else { UERROR("Left and right images are not the same size!?"); } } else if(!stereoSavePath.empty()) { cv::imwrite(stereoSavePath+"/"+"left/"+uNumber2Str(id) + ".jpg", data.imageRaw()); cv::imwrite(stereoSavePath+"/"+"right/"+uNumber2Str(id) + ".jpg", data.rightRaw()); printf("Saved frames %d to \"%s/left\" and \"%s/right\" directories\n", id, stereoSavePath.c_str(), stereoSavePath.c_str()); } ++id; data = camera->takeImage(); } printf("Closing...\n"); if(viewer) { delete viewer; } cv::destroyWindow("Video"); cv::destroyWindow("Depth"); delete camera; return 0; }
int main (int argc, char * argv[]) { ULogger::setType(ULogger::kTypeConsole); ULogger::setLevel(ULogger::kInfo); ULogger::setPrintTime(false); ULogger::setPrintWhere(false); // parse arguments float rate = 0.0; std::string inputDatabase; int driver = 0; int odomType = rtabmap::Parameters::defaultOdomFeatureType(); bool icp = false; bool flow = false; bool mono = false; int nnType = rtabmap::Parameters::defaultOdomBowNNType(); float nndr = rtabmap::Parameters::defaultOdomBowNNDR(); float distance = rtabmap::Parameters::defaultOdomInlierDistance(); int maxWords = rtabmap::Parameters::defaultOdomMaxFeatures(); int minInliers = rtabmap::Parameters::defaultOdomMinInliers(); float maxDepth = rtabmap::Parameters::defaultOdomMaxDepth(); int iterations = rtabmap::Parameters::defaultOdomIterations(); int resetCountdown = rtabmap::Parameters::defaultOdomResetCountdown(); int decimation = 4; float voxel = 0.005; int samples = 10000; float ratio = 0.7f; int maxClouds = 10; int briefBytes = rtabmap::Parameters::defaultBRIEFBytes(); int fastThr = rtabmap::Parameters::defaultFASTThreshold(); float sec = 0.0f; bool gpu = false; int localHistory = rtabmap::Parameters::defaultOdomBowLocalHistorySize(); bool p2p = rtabmap::Parameters::defaultOdomPnPEstimation(); for(int i=1; i<argc; ++i) { if(strcmp(argv[i], "-driver") == 0) { ++i; if(i < argc) { driver = std::atoi(argv[i]); if(driver < 0 || driver > 7) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-o") == 0) { ++i; if(i < argc) { odomType = std::atoi(argv[i]); if(odomType < 0 || odomType > 6) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-nn") == 0) { ++i; if(i < argc) { nnType = std::atoi(argv[i]); if(nnType < 0 || nnType > 4) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-nndr") == 0) { ++i; if(i < argc) { nndr = uStr2Float(argv[i]); if(nndr < 0.0f) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-hz") == 0) { ++i; if(i < argc) { rate = uStr2Float(argv[i]); if(rate < 0) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-db") == 0) { ++i; if(i < argc) { inputDatabase = argv[i]; if(UFile::getExtension(inputDatabase).compare("db") != 0) { printf("Database path (%s) should end with \"db\" \n", inputDatabase.c_str()); showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-clouds") == 0) { ++i; if(i < argc) { maxClouds = std::atoi(argv[i]); if(maxClouds < 0) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-sec") == 0) { ++i; if(i < argc) { sec = uStr2Float(argv[i]); if(sec < 0.0f) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-in") == 0) { ++i; if(i < argc) { distance = uStr2Float(argv[i]); if(distance <= 0) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-max") == 0) { ++i; if(i < argc) { maxWords = std::atoi(argv[i]); if(maxWords < 0) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-min") == 0) { ++i; if(i < argc) { minInliers = std::atoi(argv[i]); if(minInliers < 0) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-depth") == 0) { ++i; if(i < argc) { maxDepth = uStr2Float(argv[i]); if(maxDepth < 0) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-i") == 0) { ++i; if(i < argc) { iterations = std::atoi(argv[i]); if(iterations <= 0) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-reset") == 0) { ++i; if(i < argc) { resetCountdown = std::atoi(argv[i]); if(resetCountdown < 0) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-d") == 0) { ++i; if(i < argc) { decimation = std::atoi(argv[i]); if(decimation < 1) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-v") == 0) { ++i; if(i < argc) { voxel = uStr2Float(argv[i]); if(voxel < 0) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-s") == 0) { ++i; if(i < argc) { samples = std::atoi(argv[i]); if(samples < 0) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-cr") == 0) { ++i; if(i < argc) { ratio = uStr2Float(argv[i]); if(ratio < 0.0f) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-gpu") == 0) { gpu = true; continue; } if(strcmp(argv[i], "-lh") == 0) { ++i; if(i < argc) { localHistory = std::atoi(argv[i]); if(localHistory < 0) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-brief_bytes") == 0) { ++i; if(i < argc) { briefBytes = std::atoi(argv[i]); if(briefBytes < 1) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-fast_thr") == 0) { ++i; if(i < argc) { fastThr = std::atoi(argv[i]); if(fastThr < 1) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-icp") == 0) { icp = true; continue; } if(strcmp(argv[i], "-flow") == 0) { flow = true; continue; } if(strcmp(argv[i], "-mono") == 0) { mono = true; continue; } if(strcmp(argv[i], "-p2p") == 0) { p2p = true; continue; } if(strcmp(argv[i], "-debug") == 0) { ULogger::setLevel(ULogger::kDebug); ULogger::setPrintTime(true); ULogger::setPrintWhere(true); continue; } printf("Unrecognized option : %s\n", argv[i]); showUsage(); } if(odomType > 1 && nnType == rtabmap::VWDictionary::kNNFlannKdTree) { UERROR("You set \"-o %d\" (binary descriptor), you must use \"-nn 2\" (any \"-nn\" other than kNNFlannKdTree)", odomType); showUsage(); } else if(odomType <= 1 && nnType == rtabmap::VWDictionary::kNNFlannLSH) { UERROR("You set \"-o %d\" (float descriptor), you must use \"-nn 1\" (any \"-nn\" other than kNNFlannLSH)", odomType); showUsage(); } if(inputDatabase.size()) { UINFO("Using database input \"%s\"", inputDatabase.c_str()); } else { UINFO("Using OpenNI camera"); } std::string odomName; if(odomType == 0) { odomName = "SURF"; } else if(odomType == 1) { odomName = "SIFT"; } else if(odomType == 2) { odomName = "ORB"; } else if(odomType == 3) { odomName = "FAST+FREAK"; } else if(odomType == 4) { odomName = "FAST+BRIEF"; } else if(odomType == 5) { odomName = "GFTT+FREAK"; } else if(odomType == 6) { odomName = "GFTT+BRIEF"; } else if(odomType == 7) { odomName = "BRISK"; } if(icp) { odomName= "ICP"; } if(flow) { odomName= "Optical Flow"; } std::string nnName; if(nnType == 0) { nnName = "kNNFlannLinear"; } else if(nnType == 1) { nnName = "kNNFlannKdTree"; } else if(nnType == 2) { nnName= "kNNFlannLSH"; } else if(nnType == 3) { nnName= "kNNBruteForce"; } else if(nnType == 4) { nnName= "kNNBruteForceGPU"; } UINFO("Odometry used = %s", odomName.c_str()); UINFO("Camera rate = %f Hz", rate); UINFO("Maximum clouds shown = %d", maxClouds); UINFO("Delay = %f s", sec); UINFO("Max depth = %f", maxDepth); UINFO("Reset odometry coutdown = %d", resetCountdown); QApplication app(argc, argv); rtabmap::Odometry * odom = 0; rtabmap::ParametersMap parameters; parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kOdomMaxDepth(), uNumber2Str(maxDepth))); parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kOdomResetCountdown(), uNumber2Str(resetCountdown))); if(!icp) { UINFO("Min inliers = %d", minInliers); UINFO("Inlier maximum correspondences distance = %f", distance); UINFO("RANSAC iterations = %d", iterations); UINFO("Max features = %d", maxWords); UINFO("GPU = %s", gpu?"true":"false"); parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kOdomInlierDistance(), uNumber2Str(distance))); parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kOdomMinInliers(), uNumber2Str(minInliers))); parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kOdomIterations(), uNumber2Str(iterations))); parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kOdomMaxFeatures(), uNumber2Str(maxWords))); parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kOdomFeatureType(), uNumber2Str(odomType))); if(odomType == 0) { parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kSURFGpuVersion(), uBool2Str(gpu))); } if(odomType == 2) { parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kORBGpu(), uBool2Str(gpu))); } if(odomType == 3 || odomType == 4) { UINFO("FAST threshold = %d", fastThr); parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kFASTThreshold(), uNumber2Str(fastThr))); parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kFASTGpu(), uBool2Str(gpu))); } if(odomType == 4 || odomType == 6) { UINFO("BRIEF bytes = %d", briefBytes); parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kBRIEFBytes(), uNumber2Str(briefBytes))); } if(flow) { // Optical Flow odom = new rtabmap::OdometryOpticalFlow(parameters); } else { //BOW UINFO("Nearest neighbor = %s", nnName.c_str()); UINFO("Nearest neighbor ratio = %f", nndr); UINFO("Local history = %d", localHistory); parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kOdomBowNNType(), uNumber2Str(nnType))); parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kOdomBowNNDR(), uNumber2Str(nndr))); parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kOdomBowLocalHistorySize(), uNumber2Str(localHistory))); if(mono) { parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kOdomPnPFlags(), uNumber2Str(0))); //CV_ITERATIVE parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kOdomPnPReprojError(), "4.0")); parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kOdomIterations(), "100")); odom = new rtabmap::OdometryMono(parameters); } else { odom = new rtabmap::OdometryBOW(parameters); } } } else if(icp) // ICP { UINFO("ICP maximum correspondences distance = %f", distance); UINFO("ICP iterations = %d", iterations); UINFO("Cloud decimation = %d", decimation); UINFO("Cloud voxel size = %f", voxel); UINFO("Cloud samples = %d", samples); UINFO("Cloud correspondence ratio = %f", ratio); UINFO("Cloud point to plane = %s", p2p?"false":"true"); odom = new rtabmap::OdometryICP(decimation, voxel, samples, distance, iterations, ratio, !p2p); } rtabmap::OdometryThread odomThread(odom); rtabmap::OdometryViewer odomViewer(maxClouds, 2, 0.0, 50); UEventsManager::addHandler(&odomThread); UEventsManager::addHandler(&odomViewer); odomViewer.setWindowTitle("Odometry view"); odomViewer.resize(1280, 480+QPushButton().minimumHeight()); if(inputDatabase.size()) { rtabmap::DBReader camera(inputDatabase, rate, true); if(camera.init()) { odomThread.start(); if(sec > 0) { uSleep(sec*1000); } camera.start(); app.exec(); camera.join(true); odomThread.join(true); } } else { rtabmap::CameraRGBD * camera = 0; rtabmap::Transform t=rtabmap::Transform(0,0,1,0, -1,0,0,0, 0,-1,0,0); if(driver == 0) { camera = new rtabmap::CameraOpenni("", rate, t); } else if(driver == 1) { if(!rtabmap::CameraOpenNI2::available()) { UERROR("Not built with OpenNI2 support..."); exit(-1); } camera = new rtabmap::CameraOpenNI2("", rate, t); } else if(driver == 2) { if(!rtabmap::CameraFreenect::available()) { UERROR("Not built with Freenect support..."); exit(-1); } camera = new rtabmap::CameraFreenect(0, rate, t); } else if(driver == 3) { if(!rtabmap::CameraOpenNICV::available()) { UERROR("Not built with OpenNI from OpenCV support..."); exit(-1); } camera = new rtabmap::CameraOpenNICV(false, rate, t); } else if(driver == 4) { if(!rtabmap::CameraOpenNICV::available()) { UERROR("Not built with OpenNI from OpenCV support..."); exit(-1); } camera = new rtabmap::CameraOpenNICV(true, rate, t); } else if(driver == 5) { if(!rtabmap::CameraFreenect2::available()) { UERROR("Not built with Freenect2 support..."); exit(-1); } camera = new rtabmap::CameraFreenect2(0, rtabmap::CameraFreenect2::kTypeRGBDepthSD, rate, t); } else if(driver == 6) { if(!rtabmap::CameraStereoDC1394::available()) { UERROR("Not built with dc1394 support..."); exit(-1); } camera = new rtabmap::CameraStereoDC1394(rate, t); } else if(driver == 7) { if(!rtabmap::CameraStereoFlyCapture2::available()) { UERROR("Not built with FlyCapture2/Triclops support..."); exit(-1); } camera = new rtabmap::CameraStereoFlyCapture2(rate, t); } else { UFATAL("Camera driver (%d) not found!", driver); } //pcl::console::setVerbosityLevel(pcl::console::L_DEBUG); if(camera->init()) { if(camera->isCalibrated()) { rtabmap::CameraThread cameraThread(camera); odomThread.start(); cameraThread.start(); odomViewer.exec(); cameraThread.join(true); odomThread.join(true); } else { printf("The camera is not calibrated! You should calibrate the camera first.\n"); delete camera; } } else { printf("Failed to initialize the camera! Please select another driver (see \"--help\").\n"); delete camera; } } return 0; }
int main(int argc, char * argv[]) { ULogger::setType(ULogger::kTypeConsole); ULogger::setLevel(ULogger::kInfo); //ULogger::setPrintTime(false); //ULogger::setPrintWhere(false); int driver = 0; if(argc < 2) { showUsage(); } else { if(strcmp(argv[argc-1], "--help") == 0) { showUsage(); } driver = atoi(argv[argc-1]); if(driver < 0 || driver > 7) { UERROR("driver should be between 0 and 6."); showUsage(); } } UINFO("Using driver %d", driver); rtabmap::CameraRGBD * camera = 0; if(driver == 0) { camera = new rtabmap::CameraOpenni(); } else if(driver == 1) { if(!rtabmap::CameraOpenNI2::available()) { UERROR("Not built with OpenNI2 support..."); exit(-1); } camera = new rtabmap::CameraOpenNI2(); } else if(driver == 2) { if(!rtabmap::CameraFreenect::available()) { UERROR("Not built with Freenect support..."); exit(-1); } camera = new rtabmap::CameraFreenect(); } else if(driver == 3) { if(!rtabmap::CameraOpenNICV::available()) { UERROR("Not built with OpenNI from OpenCV support..."); exit(-1); } camera = new rtabmap::CameraOpenNICV(false); } else if(driver == 4) { if(!rtabmap::CameraOpenNICV::available()) { UERROR("Not built with OpenNI from OpenCV support..."); exit(-1); } camera = new rtabmap::CameraOpenNICV(true); } else if(driver == 5) { if(!rtabmap::CameraFreenect2::available()) { UERROR("Not built with Freenect2 support..."); exit(-1); } camera = new rtabmap::CameraFreenect2(0, rtabmap::CameraFreenect2::kTypeRGBDepthSD); } else if(driver == 6) { if(!rtabmap::CameraStereoDC1394::available()) { UERROR("Not built with DC1394 support..."); exit(-1); } camera = new rtabmap::CameraStereoDC1394(); } else if(driver == 7) { if(!rtabmap::CameraStereoFlyCapture2::available()) { UERROR("Not built with FlyCapture2/Triclops support..."); exit(-1); } camera = new rtabmap::CameraStereoFlyCapture2(); } else { UFATAL(""); } if(!camera->init()) { printf("Camera init failed! Please select another driver (see \"--help\").\n"); delete camera; exit(1); } cv::Mat rgb, depth; float fx, fy, cx, cy; camera->takeImage(rgb, depth, fx, fy, cx, cy); if(rgb.cols != depth.cols || rgb.rows != depth.rows) { UWARN("RGB (%d/%d) and depth (%d/%d) frames are not the same size! The registered cloud cannot be shown.", rgb.cols, rgb.rows, depth.cols, depth.rows); } if(!fx || !fy) { UWARN("fx and/or fy are not set! The registered cloud cannot be shown."); } pcl::visualization::CloudViewer viewer("cloud"); rtabmap::Transform t(1, 0, 0, 0, 0, -1, 0, 0, 0, 0, -1, 0); while(!rgb.empty() && !viewer.wasStopped()) { if(depth.type() == CV_16UC1 || depth.type() == CV_32FC1) { // depth if(depth.type() == CV_32FC1) { depth = rtabmap::util3d::cvtDepthFromFloat(depth); } if(rgb.cols == depth.cols && rgb.rows == depth.rows && fx && fy) { std::cout << "------------------------------------------------- tools/CameraRGBD/main.cpp -------------------------------------------------" << std::endl; pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud = rtabmap::util3d::cloudFromDepthRGB(rgb, depth, cx, cy, fx, fy); cloud = rtabmap::util3d::transformPointCloud(cloud, t); viewer.showCloud(cloud, "cloud"); } else if(!depth.empty() && fx && fy) { pcl::PointCloud<pcl::PointXYZ>::Ptr cloud = rtabmap::util3d::cloudFromDepth(depth, cx, cy, fx, fy); cloud = rtabmap::util3d::transformPointCloud(cloud, t); viewer.showCloud(cloud, "cloud"); } cv::Mat tmp; unsigned short min=0, max = 2048; uMinMax((unsigned short*)depth.data, depth.rows*depth.cols, min, max); depth.convertTo(tmp, CV_8UC1, 255.0/max); cv::imshow("Video", rgb); // show frame cv::imshow("Depth", tmp); } else { // stereo cv::imshow("Left", rgb); // show frame cv::imshow("Right", depth); if(rgb.cols == depth.cols && rgb.rows == depth.rows && fx && fy) { if(depth.channels() == 3) { cv::cvtColor(depth, depth, CV_BGR2GRAY); } pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud = rtabmap::util3d::cloudFromStereoImages(rgb, depth, cx, cy, fx, fy); cloud = rtabmap::util3d::transformPointCloud(cloud, t); viewer.showCloud(cloud, "cloud"); } } int c = cv::waitKey(10); // wait 10 ms or for key stroke if(c == 27) break; // if ESC, break and quit rgb = cv::Mat(); depth = cv::Mat(); camera->takeImage(rgb, depth, fx, fy, cx, cy); } cv::destroyWindow("Video"); cv::destroyWindow("Depth"); delete camera; return 0; }
int main(int argc, char *argv[]) { ULogger::setType(ULogger::kTypeConsole); UFATAL("visual_odometry node is deprecated, use rgbd_odometry instead!"); return 0; }
void RtabmapThread::mainLoop() { State state = kStateDetecting; ParametersMap parameters; _stateMutex.lock(); { if(!_state.empty() && !_stateParam.empty()) { state = _state.top(); _state.pop(); parameters = _stateParam.top(); _stateParam.pop(); } } _stateMutex.unlock(); switch(state) { case kStateDetecting: this->process(); break; case kStateInit: UASSERT(!parameters.at("RtabmapThread/DatabasePath").empty()); _rtabmap->init(parameters, parameters.at("RtabmapThread/DatabasePath")); break; case kStateChangingParameters: Parameters::parse(parameters, Parameters::kRtabmapImageBufferSize(), _dataBufferMaxSize); Parameters::parse(parameters, Parameters::kRtabmapDetectionRate(), _rate); UASSERT(_dataBufferMaxSize >= 0); UASSERT(_rate >= 0.0f); _rtabmap->parseParameters(parameters); break; case kStateReseting: _rtabmap->resetMemory(); this->clearBufferedData(); break; case kStateClose: if(_dataBuffer.size()) { UWARN("Closing... %d data still buffered! They will be cleared.", (int)_dataBuffer.size()); this->clearBufferedData(); } _rtabmap->close(); break; case kStateDumpingMemory: _rtabmap->dumpData(); break; case kStateDumpingPrediction: _rtabmap->dumpPrediction(); break; case kStateGeneratingDOTGraph: _rtabmap->generateDOTGraph(parameters.at("path")); break; case kStateGeneratingDOTLocalGraph: _rtabmap->generateDOTGraph(parameters.at("path"), atoi(parameters.at("id").c_str()), atoi(parameters.at("margin").c_str())); break; case kStateGeneratingTOROGraphLocal: _rtabmap->generateTOROGraph(parameters.at("path"), atoi(parameters.at("optimized").c_str())!=0, false); break; case kStateGeneratingTOROGraphGlobal: _rtabmap->generateTOROGraph(parameters.at("path"), atoi(parameters.at("optimized").c_str())!=0, true); break; case kStateCleanDataBuffer: this->clearBufferedData(); break; case kStatePublishingMapLocal: this->publishMap(atoi(parameters.at("optimized").c_str())!=0, false); break; case kStatePublishingMapGlobal: this->publishMap(atoi(parameters.at("optimized").c_str())!=0, true); break; case kStatePublishingTOROGraphLocal: this->publishTOROGraph(atoi(parameters.at("optimized").c_str())!=0, false); break; case kStatePublishingTOROGraphGlobal: this->publishTOROGraph(atoi(parameters.at("optimized").c_str())!=0, true); break; case kStateTriggeringMap: _rtabmap->triggerNewMap(); break; default: UFATAL("Invalid state !?!?"); break; } }