int isMatching(const char *alias, int canCheckOnServer) { CTListItemAlias i(alias); CTListItemAlias *ret = (CTListItemAlias*)list.findByKey(i.hashhex, i.eHashHexSize); if(ret && ret->iSentToServer) { return ret ? ret->iIsMatching : 0; } if(iState != ePostOk) { puts("WARN: Call the doJob() first"); if(!canCheckOnServer)return -1; void uSleep(int usec ); while(iState == eDiscovering)uSleep(100); if(iState != ePostOk) { int r = post(); if(r<0)return -1; } } ret = (CTListItemAlias*)list.findByKey(i.hashhex, i.eHashHexSize); return ret ? ret->iIsMatching : 0; }
void CameraThread::mainLoopKill() { UDEBUG(""); if(dynamic_cast<CameraFreenect2*>(_camera) != 0) { int i=20; while(i-->0) { uSleep(100); if(!this->isKilled()) { break; } } if(this->isKilled()) { //still in killed state, maybe a deadlock UERROR("CameraFreenect2: Failed to kill normally the Freenect2 driver! The thread is locked " "on waitForNewFrame() method of libfreenect2. This maybe caused by not linking on the right libusb. " "Note that rtabmap should link on libusb of libfreenect2. " "Tip before starting rtabmap: \"$ export LD_LIBRARY_PATH=~/libfreenect2/depends/libusb/lib:$LD_LIBRARY_PATH\""); } } }
int uWaitForProcess(UPID pid, UPHANDLE h, sys_call_error_fun fun) { #ifdef _WIN32 DWORD res; res = WaitForSingleObject(h, INFINITE); if (res == WAIT_FAILED) { sys_call_error("WaitForSingleObject"); return -1; } else return 0; #else int status = 0; for (;;) { status = uIsProcessExist(pid, h, __sys_call_error); /* Error happened in uIsProcessExist */ if (-2 == status) return -1; /* Process still alive */ if ( 0 == status) uSleep(1, __sys_call_error); /* status == -1, there is no such process */ else break; } return 0; #endif }
void DBReader::mainLoopBegin() { if(_delayToStartSec > 0.0f) { uSleep(_delayToStartSec*1000.0f); } _timer.start(); }
void MapCloudDisplay::downloadMap() { if(download_map_->getBool()) { rtabmap_ros::GetMap getMapSrv; getMapSrv.request.global = true; getMapSrv.request.optimized = true; getMapSrv.request.graphOnly = false; ros::NodeHandle nh; QMessageBox * messageBox = new QMessageBox( QMessageBox::NoIcon, tr("Calling \"%1\" service...").arg(nh.resolveName("rtabmap/get_map").c_str()), tr("Downloading the map... please wait (rviz could become gray!)"), QMessageBox::NoButton); messageBox->setAttribute(Qt::WA_DeleteOnClose, true); messageBox->show(); QApplication::processEvents(); uSleep(100); // hack make sure the text in the QMessageBox is shown... QApplication::processEvents(); if(!ros::service::call("rtabmap/get_map", getMapSrv)) { ROS_ERROR("MapCloudDisplay: Can't call \"%s\" service. " "Tip: if rtabmap node is not in rtabmap namespace, you can remap the service " "to \"get_map\" in the launch " "file like: <remap from=\"rtabmap/get_map\" to=\"get_map\"/>.", nh.resolveName("rtabmap/get_map").c_str()); messageBox->setText(tr("MapCloudDisplay: Can't call \"%1\" service. " "Tip: if rtabmap node is not in rtabmap namespace, you can remap the service " "to \"get_map\" in the launch " "file like: <remap from=\"rtabmap/get_map\" to=\"get_map\"/>."). arg(nh.resolveName("rtabmap/get_map").c_str())); } else { messageBox->setText(tr("Creating all clouds (%1 poses and %2 clouds downloaded)...") .arg(getMapSrv.response.data.graph.poses.size()).arg(getMapSrv.response.data.nodes.size())); QApplication::processEvents(); this->reset(); processMapData(getMapSrv.response.data); messageBox->setText(tr("Creating all clouds (%1 poses and %2 clouds downloaded)... done!") .arg(getMapSrv.response.data.graph.poses.size()).arg(getMapSrv.response.data.nodes.size())); QTimer::singleShot(1000, messageBox, SLOT(close())); } download_map_->blockSignals(true); download_map_->setBool(false); download_map_->blockSignals(false); } else { // just stay true if double-clicked on DownloadMap property, let the // first process above finishes download_map_->blockSignals(true); download_map_->setBool(true); download_map_->blockSignals(false); } }
void setup_pit(u32 pitHz) { u32 divisor; divisor = PIT_FREQ/pitHz; outb(PIT_REG_MODE, 0x34); uSleep(10); // outb(PIT_REG_CH0, PIT_LATCH & 0xff); outb(PIT_REG_CH0, (u8)(divisor & 0xFF)); outb(PIT_REG_CH0, (u8)((divisor >> 8) & 0xFF)); // uSleep(10); // outb(PIT_REG_CH0, PIT_LATCH >> 8); }
SensorData Camera::takeImage(CameraInfo * info) { bool warnFrameRateTooHigh = false; float actualFrameRate = 0; if(_imageRate>0) { int sleepTime = (1000.0f/_imageRate - 1000.0f*_frameRateTimer->getElapsedTime()); if(sleepTime > 2) { uSleep(sleepTime-2); } else if(sleepTime < 0) { warnFrameRateTooHigh = true; actualFrameRate = 1.0/(_frameRateTimer->getElapsedTime()); } // Add precision at the cost of a small overhead while(_frameRateTimer->getElapsedTime() < 1.0/double(_imageRate)-0.000001) { // } double slept = _frameRateTimer->getElapsedTime(); _frameRateTimer->start(); UDEBUG("slept=%fs vs target=%fs", slept, 1.0/double(_imageRate)); } UTimer timer; SensorData data = this->captureImage(info); double captureTime = timer.ticks(); if(warnFrameRateTooHigh) { UWARN("Camera: Cannot reach target image rate %f Hz, current rate is %f Hz and capture time = %f s.", _imageRate, actualFrameRate, captureTime); } else { UDEBUG("Time capturing image = %fs", captureTime); } if(info) { info->id = data.id(); info->stamp = data.stamp(); info->timeCapture = captureTime; } return data; }
void net_loop (int flags, int (*is_end)(void)) { while (!is_end ()) { uSleep(10000); struct pollfd fds[101]; int cc = 0; if (flags & 3) { fds[0].fd = 0; fds[0].events = POLLIN; cc ++; } write_state_file (); int x = connections_make_poll_array (fds + cc, 101 - cc) + cc; double timer = next_timer_in (); if (timer > 1000) { timer = 1000; } if (poll (fds, x, timer) < 0) { work_timers (); continue; } work_timers (); if ((flags & 3) && (fds[0].revents & POLLIN)) { unread_messages = 0; if (flags & 1) { // rl_callback_read_char (); qthreadExec(); } else { char *line = 0; size_t len = 0; assert (getline (&line, &len, stdin) >= 0); got_it (line, strlen (line)); } } connections_poll_result (fds + cc, x - cc); #ifdef USE_LUA lua_do_all (); #endif if (safe_quit && !queries_num) { printf ("All done. Exit\n"); qthreadExitRequest (0); } if (unknown_user_list_pos) { do_get_user_list_info_silent (unknown_user_list_pos, unknown_user_list); unknown_user_list_pos = 0; } } }
void pping_client::WriteStackTraceFile(LPEXCEPTION_POINTERS exceptPtrs) { if (exceptPtrs != NULL) { int wait = 60; this->except_thread_id = GetCurrentThreadId(); this->stacktrace_fh = sedna_soft_fault_log_fh(this->component, "-st"); this->exceptPtrs = exceptPtrs; while (this->exceptPtrs != NULL && wait > 0) { if (UUnnamedSemaphoreUp(&sem, NULL) != 0) return; uSleep(1, NULL); wait--; } } }
int disk_readall(void) { char* addr=(char*)0x26d400;//Free space //int len=0x2400;//9 Ki Bytes /** assert(len > 0); assert((addr &0xff0000)==((addr+len-1)&0xff0000));//[addr,addr+len) does not exceed a 64 KiB boundary. assert((addr+len)<=0x1<<24);//16MiB Limitation */ io_sti(); io_out8(0x00d6,0xc0); io_out8(0x00c0,0x00); io_out8(0x000a,0x06); io_out8(0x03f2,0x1c);//Start motor uSleep(20); for(int i=ADDR_BOOTINFO->cyls;i<CYL_NUM;i++) { const int TRACK=SECTOR_NUM *SECTOR_SIZE;//0x2400 bytes disk_read((int)addr,i,0); memcpy((char*)ADDR_DISKIMG + (HEAD_NUM * i + 0) *TRACK, addr,TRACK); disk_read((int)addr,i,1); memcpy((char*)ADDR_DISKIMG + (HEAD_NUM * i + 1) *TRACK, addr,TRACK); } io_out8(0x03f2,0x0c);//Stop motor //test //calculating the checksum int sum=0; for(int i=0;i<CYL_NUM*HEAD_NUM*SECTOR_NUM* SECTOR_SIZE;i++) { sum += (((char*)ADDR_DISKIMG)[i] & 0xff)*i; } char str[256]; sprintf(str,"fd_checksum=%08x\n",sum); DEBUG_PRINT(str); return 0; }
void UThread::join(bool killFirst) { //make sure the thread is created while(this->isCreating()) { uSleep(1); } #if WIN32 #if PRINT_DEBUG UDEBUG("Thread %d joining %d", UThreadC<void>::Self(), threadId_); #endif if(UThreadC<void>::Self() == threadId_) #else #if PRINT_DEBUG UDEBUG("Thread %d joining %d", UThreadC<void>::Self(), handle_); #endif if(UThreadC<void>::Self() == handle_) #endif { UERROR("Thread cannot join itself"); return; } if(killFirst) { this->kill(); } runningMutex_.lock(); runningMutex_.unlock(); #if PRINT_DEBUG UDEBUG("Join ended for %d", UThreadC<void>::Self()); #endif }
void UThread::kill() { #if PRINT_DEBUG ULOGGER_DEBUG(""); #endif killSafelyMutex_.lock(); { if(this->isRunning()) { // Thread is creating while(state_ == kSCreating) { uSleep(1); } if(state_ == kSRunning) { state_ = kSKilled; // Call function to do something before wait mainLoopKill(); } else { UERROR("thread (%d) is supposed to be running...", threadId_); } } else { #if PRINT_DEBUG UDEBUG("thread (%d) is not running...", threadId_); #endif } } killSafelyMutex_.unlock(); }
SensorData DBReader::getNextData() { SensorData data; if(_dbDriver) { float frameRate = _frameRate; if(frameRate>0.0f) { int sleepTime = (1000.0f/frameRate - 1000.0f*_timer.getElapsedTime()); if(sleepTime > 2) { uSleep(sleepTime-2); } // Add precision at the cost of a small overhead while(_timer.getElapsedTime() < 1.0/double(frameRate)-0.000001) { // } double slept = _timer.getElapsedTime(); _timer.start(); UDEBUG("slept=%fs vs target=%fs", slept, 1.0/double(frameRate)); } if(!this->isKilled() && _currentId != _ids.end()) { cv::Mat imageBytes; cv::Mat depthBytes; cv::Mat laserScanBytes; int mapId; float fx,fy,cx,cy; Transform localTransform, pose; float variance = 1.0f; _dbDriver->getNodeData(*_currentId, imageBytes, depthBytes, laserScanBytes, fx, fy, cx, cy, localTransform); if(!_odometryIgnored) { _dbDriver->getPose(*_currentId, pose, mapId); std::map<int, Link> links; _dbDriver->loadLinks(*_currentId, links, Link::kNeighbor); if(links.size()) { // assume the first is the backward neighbor, take its variance variance = links.begin()->second.variance(); } } int seq = *_currentId; ++_currentId; if(imageBytes.empty()) { UWARN("No image loaded from the database for id=%d!", *_currentId); } util3d::CompressionThread ctImage(imageBytes, true); util3d::CompressionThread ctDepth(depthBytes, true); util3d::CompressionThread ctLaserScan(laserScanBytes, false); ctImage.start(); ctDepth.start(); ctLaserScan.start(); ctImage.join(); ctDepth.join(); ctLaserScan.join(); data = SensorData( ctLaserScan.getUncompressedData(), ctImage.getUncompressedData(), ctDepth.getUncompressedData(), fx,fy,cx,cy, localTransform, pose, variance, seq); } } else { UERROR("Not initialized..."); } return data; }
void DBReader::mainLoop() { SensorData data = this->getNextData(); if(data.isValid()) { int goalId = 0; double previousStamp = data.stamp(); data.setStamp(UTimer::now()); if(data.userData().size() >= 6 && memcmp(data.userData().data(), "GOAL:", 5) == 0) { //GOAL format detected, remove it from the user data and send it as goal event std::string goalStr = uBytes2Str(data.userData()); if(!goalStr.empty()) { std::list<std::string> strs = uSplit(goalStr, ':'); if(strs.size() == 2) { goalId = atoi(strs.rbegin()->c_str()); data.setUserData(std::vector<unsigned char>()); } } } if(!_odometryIgnored) { if(data.pose().isNull()) { UWARN("Reading the database: odometry is null! " "Please set \"Ignore odometry = true\" if there is " "no odometry in the database."); } this->post(new OdometryEvent(data)); } else { this->post(new CameraEvent(data)); } if(goalId > 0) { this->post(new RtabmapEventCmd(RtabmapEventCmd::kCmdGoal, "", goalId)); if(_currentId != _ids.end()) { // get stamp for the next signature to compute the delay // that was used originally for planning int weight; std::string label; double stamp; int mapId; Transform localTransform, pose; std::vector<unsigned char> userData; _dbDriver->getNodeInfo(*_currentId, pose, mapId, weight, label, stamp, userData); if(previousStamp && stamp && stamp > previousStamp) { double delay = stamp - previousStamp; UWARN("Goal %d detected, posting it! Waiting %f seconds before sending next data...", goalId, delay); uSleep(delay*1000); } else { UWARN("stamps = %d=%f %d=%f ", data.id(), data.stamp(), *_currentId, stamp); } } } } else if(!this->isKilled()) { UINFO("no more images..."); this->kill(); this->post(new CameraEvent()); } }
int main(int argc, char** argv) { ros::init(argc, argv, "data_player"); //ULogger::setType(ULogger::kTypeConsole); //ULogger::setLevel(ULogger::kDebug); //ULogger::setEventLevel(ULogger::kWarning); bool publishClock = false; for(int i=1;i<argc;++i) { if(strcmp(argv[i], "--clock") == 0) { publishClock = true; } } ros::NodeHandle nh; ros::NodeHandle pnh("~"); std::string frameId = "base_link"; std::string odomFrameId = "odom"; std::string cameraFrameId = "camera_optical_link"; std::string scanFrameId = "base_laser_link"; double rate = 1.0f; std::string databasePath = ""; bool publishTf = true; int startId = 0; bool useDbStamps = true; pnh.param("frame_id", frameId, frameId); pnh.param("odom_frame_id", odomFrameId, odomFrameId); pnh.param("camera_frame_id", cameraFrameId, cameraFrameId); pnh.param("scan_frame_id", scanFrameId, scanFrameId); pnh.param("rate", rate, rate); // Ratio of the database stamps pnh.param("database", databasePath, databasePath); pnh.param("publish_tf", publishTf, publishTf); pnh.param("start_id", startId, startId); // A general 360 lidar with 0.5 deg increment double scanAngleMin, scanAngleMax, scanAngleIncrement, scanRangeMin, scanRangeMax; pnh.param<double>("scan_angle_min", scanAngleMin, -M_PI); pnh.param<double>("scan_angle_max", scanAngleMax, M_PI); pnh.param<double>("scan_angle_increment", scanAngleIncrement, M_PI / 720.0); pnh.param<double>("scan_range_min", scanRangeMin, 0.0); pnh.param<double>("scan_range_max", scanRangeMax, 60); ROS_INFO("frame_id = %s", frameId.c_str()); ROS_INFO("odom_frame_id = %s", odomFrameId.c_str()); ROS_INFO("camera_frame_id = %s", cameraFrameId.c_str()); ROS_INFO("scan_frame_id = %s", scanFrameId.c_str()); ROS_INFO("rate = %f", rate); ROS_INFO("publish_tf = %s", publishTf?"true":"false"); ROS_INFO("start_id = %d", startId); ROS_INFO("Publish clock (--clock): %s", publishClock?"true":"false"); if(databasePath.empty()) { ROS_ERROR("Parameter \"database\" must be set (path to a RTAB-Map database)."); return -1; } databasePath = uReplaceChar(databasePath, '~', UDirectory::homeDir()); if(databasePath.size() && databasePath.at(0) != '/') { databasePath = UDirectory::currentDir(true) + databasePath; } ROS_INFO("database = %s", databasePath.c_str()); rtabmap::DBReader reader(databasePath, -rate, false, false, false, startId); if(!reader.init()) { ROS_ERROR("Cannot open database \"%s\".", databasePath.c_str()); return -1; } ros::ServiceServer pauseSrv = pnh.advertiseService("pause", pauseCallback); ros::ServiceServer resumeSrv = pnh.advertiseService("resume", resumeCallback); image_transport::ImageTransport it(nh); image_transport::Publisher imagePub; image_transport::Publisher rgbPub; image_transport::Publisher depthPub; image_transport::Publisher leftPub; image_transport::Publisher rightPub; ros::Publisher rgbCamInfoPub; ros::Publisher depthCamInfoPub; ros::Publisher leftCamInfoPub; ros::Publisher rightCamInfoPub; ros::Publisher odometryPub; ros::Publisher scanPub; ros::Publisher scanCloudPub; ros::Publisher globalPosePub; ros::Publisher gpsFixPub; ros::Publisher clockPub; tf2_ros::TransformBroadcaster tfBroadcaster; if(publishClock) { clockPub = nh.advertise<rosgraph_msgs::Clock>("/clock", 1); } UTimer timer; rtabmap::CameraInfo cameraInfo; rtabmap::SensorData data = reader.takeImage(&cameraInfo); rtabmap::OdometryInfo odomInfo; odomInfo.reg.covariance = cameraInfo.odomCovariance; rtabmap::OdometryEvent odom(data, cameraInfo.odomPose, odomInfo); double acquisitionTime = timer.ticks(); while(ros::ok() && odom.data().id()) { ROS_INFO("Reading sensor data %d...", odom.data().id()); ros::Time time(odom.data().stamp()); if(publishClock) { rosgraph_msgs::Clock msg; msg.clock = time; clockPub.publish(msg); } sensor_msgs::CameraInfo camInfoA; //rgb or left sensor_msgs::CameraInfo camInfoB; //depth or right camInfoA.K.assign(0); camInfoA.K[0] = camInfoA.K[4] = camInfoA.K[8] = 1; camInfoA.R.assign(0); camInfoA.R[0] = camInfoA.R[4] = camInfoA.R[8] = 1; camInfoA.P.assign(0); camInfoA.P[10] = 1; camInfoA.header.frame_id = cameraFrameId; camInfoA.header.stamp = time; camInfoB = camInfoA; int type = -1; if(!odom.data().depthRaw().empty() && (odom.data().depthRaw().type() == CV_32FC1 || odom.data().depthRaw().type() == CV_16UC1)) { if(odom.data().cameraModels().size() > 1) { ROS_WARN("Multi-cameras detected in database but this node cannot send multi-images yet..."); } else { //depth if(odom.data().cameraModels().size()) { camInfoA.D.resize(5,0); camInfoA.P[0] = odom.data().cameraModels()[0].fx(); camInfoA.K[0] = odom.data().cameraModels()[0].fx(); camInfoA.P[5] = odom.data().cameraModels()[0].fy(); camInfoA.K[4] = odom.data().cameraModels()[0].fy(); camInfoA.P[2] = odom.data().cameraModels()[0].cx(); camInfoA.K[2] = odom.data().cameraModels()[0].cx(); camInfoA.P[6] = odom.data().cameraModels()[0].cy(); camInfoA.K[5] = odom.data().cameraModels()[0].cy(); camInfoB = camInfoA; } type=0; if(rgbPub.getTopic().empty()) rgbPub = it.advertise("rgb/image", 1); if(depthPub.getTopic().empty()) depthPub = it.advertise("depth_registered/image", 1); if(rgbCamInfoPub.getTopic().empty()) rgbCamInfoPub = nh.advertise<sensor_msgs::CameraInfo>("rgb/camera_info", 1); if(depthCamInfoPub.getTopic().empty()) depthCamInfoPub = nh.advertise<sensor_msgs::CameraInfo>("depth_registered/camera_info", 1); } } else if(!odom.data().rightRaw().empty() && odom.data().rightRaw().type() == CV_8U) { //stereo if(odom.data().stereoCameraModel().isValidForProjection()) { camInfoA.D.resize(8,0); camInfoA.P[0] = odom.data().stereoCameraModel().left().fx(); camInfoA.K[0] = odom.data().stereoCameraModel().left().fx(); camInfoA.P[5] = odom.data().stereoCameraModel().left().fy(); camInfoA.K[4] = odom.data().stereoCameraModel().left().fy(); camInfoA.P[2] = odom.data().stereoCameraModel().left().cx(); camInfoA.K[2] = odom.data().stereoCameraModel().left().cx(); camInfoA.P[6] = odom.data().stereoCameraModel().left().cy(); camInfoA.K[5] = odom.data().stereoCameraModel().left().cy(); camInfoB = camInfoA; camInfoB.P[3] = odom.data().stereoCameraModel().right().Tx(); // Right_Tx = -baseline*fx } type=1; if(leftPub.getTopic().empty()) leftPub = it.advertise("left/image", 1); if(rightPub.getTopic().empty()) rightPub = it.advertise("right/image", 1); if(leftCamInfoPub.getTopic().empty()) leftCamInfoPub = nh.advertise<sensor_msgs::CameraInfo>("left/camera_info", 1); if(rightCamInfoPub.getTopic().empty()) rightCamInfoPub = nh.advertise<sensor_msgs::CameraInfo>("right/camera_info", 1); } else { if(imagePub.getTopic().empty()) imagePub = it.advertise("image", 1); } camInfoA.height = odom.data().imageRaw().rows; camInfoA.width = odom.data().imageRaw().cols; camInfoB.height = odom.data().depthOrRightRaw().rows; camInfoB.width = odom.data().depthOrRightRaw().cols; if(!odom.data().laserScanRaw().isEmpty()) { if(scanPub.getTopic().empty() && odom.data().laserScanRaw().is2d()) { scanPub = nh.advertise<sensor_msgs::LaserScan>("scan", 1); if(odom.data().laserScanRaw().angleIncrement() > 0.0f) { ROS_INFO("Scan will be published."); } else { ROS_INFO("Scan will be published with those parameters:"); ROS_INFO(" scan_angle_min=%f", scanAngleMin); ROS_INFO(" scan_angle_max=%f", scanAngleMax); ROS_INFO(" scan_angle_increment=%f", scanAngleIncrement); ROS_INFO(" scan_range_min=%f", scanRangeMin); ROS_INFO(" scan_range_max=%f", scanRangeMax); } } else if(scanCloudPub.getTopic().empty()) { scanCloudPub = nh.advertise<sensor_msgs::PointCloud2>("scan_cloud", 1); ROS_INFO("Scan cloud will be published."); } } if(!odom.data().globalPose().isNull() && odom.data().globalPoseCovariance().cols==6 && odom.data().globalPoseCovariance().rows==6) { if(globalPosePub.getTopic().empty()) { globalPosePub = nh.advertise<geometry_msgs::PoseWithCovarianceStamped>("global_pose", 1); ROS_INFO("Global pose will be published."); } } if(odom.data().gps().stamp() > 0.0) { if(gpsFixPub.getTopic().empty()) { gpsFixPub = nh.advertise<sensor_msgs::NavSatFix>("gps/fix", 1); ROS_INFO("GPS will be published."); } } // publish transforms first if(publishTf) { rtabmap::Transform localTransform; if(odom.data().cameraModels().size() == 1) { localTransform = odom.data().cameraModels()[0].localTransform(); } else if(odom.data().stereoCameraModel().isValidForProjection()) { localTransform = odom.data().stereoCameraModel().left().localTransform(); } if(!localTransform.isNull()) { geometry_msgs::TransformStamped baseToCamera; baseToCamera.child_frame_id = cameraFrameId; baseToCamera.header.frame_id = frameId; baseToCamera.header.stamp = time; rtabmap_ros::transformToGeometryMsg(localTransform, baseToCamera.transform); tfBroadcaster.sendTransform(baseToCamera); } if(!odom.pose().isNull()) { geometry_msgs::TransformStamped odomToBase; odomToBase.child_frame_id = frameId; odomToBase.header.frame_id = odomFrameId; odomToBase.header.stamp = time; rtabmap_ros::transformToGeometryMsg(odom.pose(), odomToBase.transform); tfBroadcaster.sendTransform(odomToBase); } if(!scanPub.getTopic().empty() || !scanCloudPub.getTopic().empty()) { geometry_msgs::TransformStamped baseToLaserScan; baseToLaserScan.child_frame_id = scanFrameId; baseToLaserScan.header.frame_id = frameId; baseToLaserScan.header.stamp = time; rtabmap_ros::transformToGeometryMsg(odom.data().laserScanCompressed().localTransform(), baseToLaserScan.transform); tfBroadcaster.sendTransform(baseToLaserScan); } } if(!odom.pose().isNull()) { if(odometryPub.getTopic().empty()) odometryPub = nh.advertise<nav_msgs::Odometry>("odom", 1); if(odometryPub.getNumSubscribers()) { nav_msgs::Odometry odomMsg; odomMsg.child_frame_id = frameId; odomMsg.header.frame_id = odomFrameId; odomMsg.header.stamp = time; rtabmap_ros::transformToPoseMsg(odom.pose(), odomMsg.pose.pose); UASSERT(odomMsg.pose.covariance.size() == 36 && odom.covariance().total() == 36 && odom.covariance().type() == CV_64FC1); memcpy(odomMsg.pose.covariance.begin(), odom.covariance().data, 36*sizeof(double)); odometryPub.publish(odomMsg); } } // Publish async topics first (so that they can catched by rtabmap before the image topics) if(globalPosePub.getNumSubscribers() > 0 && !odom.data().globalPose().isNull() && odom.data().globalPoseCovariance().cols==6 && odom.data().globalPoseCovariance().rows==6) { geometry_msgs::PoseWithCovarianceStamped msg; rtabmap_ros::transformToPoseMsg(odom.data().globalPose(), msg.pose.pose); memcpy(msg.pose.covariance.data(), odom.data().globalPoseCovariance().data, 36*sizeof(double)); msg.header.frame_id = frameId; msg.header.stamp = time; globalPosePub.publish(msg); } if(odom.data().gps().stamp() > 0.0) { sensor_msgs::NavSatFix msg; msg.longitude = odom.data().gps().longitude(); msg.latitude = odom.data().gps().latitude(); msg.altitude = odom.data().gps().altitude(); msg.position_covariance_type = sensor_msgs::NavSatFix::COVARIANCE_TYPE_DIAGONAL_KNOWN; msg.position_covariance.at(0) = msg.position_covariance.at(4) = msg.position_covariance.at(8)= odom.data().gps().error()* odom.data().gps().error(); msg.header.frame_id = frameId; msg.header.stamp.fromSec(odom.data().gps().stamp()); gpsFixPub.publish(msg); } if(type >= 0) { if(rgbCamInfoPub.getNumSubscribers() && type == 0) { rgbCamInfoPub.publish(camInfoA); } if(leftCamInfoPub.getNumSubscribers() && type == 1) { leftCamInfoPub.publish(camInfoA); } if(depthCamInfoPub.getNumSubscribers() && type == 0) { depthCamInfoPub.publish(camInfoB); } if(rightCamInfoPub.getNumSubscribers() && type == 1) { rightCamInfoPub.publish(camInfoB); } } if(imagePub.getNumSubscribers() || rgbPub.getNumSubscribers() || leftPub.getNumSubscribers()) { cv_bridge::CvImage img; if(odom.data().imageRaw().channels() == 1) { img.encoding = sensor_msgs::image_encodings::MONO8; } else { img.encoding = sensor_msgs::image_encodings::BGR8; } img.image = odom.data().imageRaw(); sensor_msgs::ImagePtr imageRosMsg = img.toImageMsg(); imageRosMsg->header.frame_id = cameraFrameId; imageRosMsg->header.stamp = time; if(imagePub.getNumSubscribers()) { imagePub.publish(imageRosMsg); } if(rgbPub.getNumSubscribers() && type == 0) { rgbPub.publish(imageRosMsg); } if(leftPub.getNumSubscribers() && type == 1) { leftPub.publish(imageRosMsg); leftCamInfoPub.publish(camInfoA); } } if(depthPub.getNumSubscribers() && !odom.data().depthRaw().empty() && type==0) { cv_bridge::CvImage img; if(odom.data().depthRaw().type() == CV_32FC1) { img.encoding = sensor_msgs::image_encodings::TYPE_32FC1; } else { img.encoding = sensor_msgs::image_encodings::TYPE_16UC1; } img.image = odom.data().depthRaw(); sensor_msgs::ImagePtr imageRosMsg = img.toImageMsg(); imageRosMsg->header.frame_id = cameraFrameId; imageRosMsg->header.stamp = time; depthPub.publish(imageRosMsg); depthCamInfoPub.publish(camInfoB); } if(rightPub.getNumSubscribers() && !odom.data().rightRaw().empty() && type==1) { cv_bridge::CvImage img; img.encoding = sensor_msgs::image_encodings::MONO8; img.image = odom.data().rightRaw(); sensor_msgs::ImagePtr imageRosMsg = img.toImageMsg(); imageRosMsg->header.frame_id = cameraFrameId; imageRosMsg->header.stamp = time; rightPub.publish(imageRosMsg); rightCamInfoPub.publish(camInfoB); } if(!odom.data().laserScanRaw().isEmpty()) { if(scanPub.getNumSubscribers() && odom.data().laserScanRaw().is2d()) { //inspired from pointcloud_to_laserscan package sensor_msgs::LaserScan msg; msg.header.frame_id = scanFrameId; msg.header.stamp = time; msg.angle_min = scanAngleMin; msg.angle_max = scanAngleMax; msg.angle_increment = scanAngleIncrement; msg.time_increment = 0.0; msg.scan_time = 0; msg.range_min = scanRangeMin; msg.range_max = scanRangeMax; if(odom.data().laserScanRaw().angleIncrement() > 0.0f) { msg.angle_min = odom.data().laserScanRaw().angleMin(); msg.angle_max = odom.data().laserScanRaw().angleMax(); msg.angle_increment = odom.data().laserScanRaw().angleIncrement(); msg.range_min = odom.data().laserScanRaw().rangeMin(); msg.range_max = odom.data().laserScanRaw().rangeMax(); } uint32_t rangesSize = std::ceil((msg.angle_max - msg.angle_min) / msg.angle_increment); msg.ranges.assign(rangesSize, 0.0); const cv::Mat & scan = odom.data().laserScanRaw().data(); for (int i=0; i<scan.cols; ++i) { const float * ptr = scan.ptr<float>(0,i); double range = hypot(ptr[0], ptr[1]); if (range >= msg.range_min && range <=msg.range_max) { double angle = atan2(ptr[1], ptr[0]); if (angle >= msg.angle_min && angle <= msg.angle_max) { int index = (angle - msg.angle_min) / msg.angle_increment; if (index>=0 && index<rangesSize && (range < msg.ranges[index] || msg.ranges[index]==0)) { msg.ranges[index] = range; } } } } scanPub.publish(msg); } else if(scanCloudPub.getNumSubscribers()) { sensor_msgs::PointCloud2 msg; pcl_conversions::moveFromPCL(*rtabmap::util3d::laserScanToPointCloud2(odom.data().laserScanRaw()), msg); msg.header.frame_id = scanFrameId; msg.header.stamp = time; scanCloudPub.publish(msg); } } if(odom.data().userDataRaw().type() == CV_8SC1 && odom.data().userDataRaw().cols >= 7 && // including null str ending odom.data().userDataRaw().rows == 1 && memcmp(odom.data().userDataRaw().data, "GOAL:", 5) == 0) { //GOAL format detected, remove it from the user data and send it as goal event std::string goalStr = (const char *)odom.data().userDataRaw().data; if(!goalStr.empty()) { std::list<std::string> strs = uSplit(goalStr, ':'); if(strs.size() == 2) { int goalId = atoi(strs.rbegin()->c_str()); if(goalId > 0) { ROS_WARN("Goal %d detected, calling rtabmap's set_goal service!", goalId); rtabmap_ros::SetGoal setGoalSrv; setGoalSrv.request.node_id = goalId; setGoalSrv.request.node_label = ""; if(!ros::service::call("set_goal", setGoalSrv)) { ROS_ERROR("Can't call \"set_goal\" service"); } } } } } ros::spinOnce(); while(ros::ok() && paused) { uSleep(100); ros::spinOnce(); } timer.restart(); cameraInfo = rtabmap::CameraInfo(); data = reader.takeImage(&cameraInfo); odomInfo.reg.covariance = cameraInfo.odomCovariance; odom = rtabmap::OdometryEvent(data, cameraInfo.odomPose, odomInfo); acquisitionTime = timer.ticks(); } return 0; }
cv::Mat Camera::takeImage(cv::Mat & descriptors, std::vector<cv::KeyPoint> & keypoints) { descriptors = cv::Mat(); keypoints.clear(); float imageRate = _imageRate==0.0f?33.0f:_imageRate; // limit to 33Hz if infinity if(imageRate>0) { int sleepTime = (1000.0f/imageRate - 1000.0f*_frameRateTimer->getElapsedTime()); if(sleepTime > 2) { uSleep(sleepTime-2); } // Add precision at the cost of a small overhead while(_frameRateTimer->getElapsedTime() < 1.0/double(imageRate)-0.000001) { // } double slept = _frameRateTimer->getElapsedTime(); _frameRateTimer->start(); UDEBUG("slept=%fs vs target=%fs", slept, 1.0/double(imageRate)); } cv::Mat img; UTimer timer; img = this->captureImage(); UDEBUG("Time capturing image = %fs", timer.ticks()); if(!img.empty()) { if(img.depth() != CV_8U) { UWARN("Images should have already 8U depth !?"); cv::Mat tmp = img; img = cv::Mat(); tmp.convertTo(img, CV_8U); UDEBUG("Time converting image to 8U = %fs", timer.ticks()); } if(_featuresExtracted && _keypointDetector && _keypointDescriptor) { keypoints = _keypointDetector->generateKeypoints(img); descriptors = _keypointDescriptor->generateDescriptors(img, keypoints); UDEBUG("Post treatment time = %fs", timer.ticks()); } if(_framesDropped) { unsigned int count = 0; while(count++ < _framesDropped) { cv::Mat tmp = this->captureImage(); if(!tmp.empty()) { UDEBUG("frame dropped (%d/%d)", (int)count, (int)_framesDropped); } else { break; } } UDEBUG("Frames dropped time = %fs", timer.ticks()); } } return img; }
SensorData DBReader::getNextData() { SensorData data; if(_dbDriver) { float frameRate = _frameRate; if(frameRate>0.0f) { int sleepTime = (1000.0f/frameRate - 1000.0f*_timer.getElapsedTime()); if(sleepTime > 2) { uSleep(sleepTime-2); } // Add precision at the cost of a small overhead while(_timer.getElapsedTime() < 1.0/double(frameRate)-0.000001) { // } double slept = _timer.getElapsedTime(); _timer.start(); UDEBUG("slept=%fs vs target=%fs", slept, 1.0/double(frameRate)); } if(!this->isKilled() && _currentId != _ids.end()) { cv::Mat imageBytes; cv::Mat depthBytes; cv::Mat laserScanBytes; int mapId; float fx,fy,cx,cy; Transform localTransform, pose; float rotVariance = 1.0f; float transVariance = 1.0f; std::vector<unsigned char> userData; _dbDriver->getNodeData(*_currentId, imageBytes, depthBytes, laserScanBytes, fx, fy, cx, cy, localTransform); // info int weight; std::string label; double stamp; _dbDriver->getNodeInfo(*_currentId, pose, mapId, weight, label, stamp, userData); if(!_odometryIgnored) { std::map<int, Link> links; _dbDriver->loadLinks(*_currentId, links, Link::kNeighbor); if(links.size()) { // assume the first is the backward neighbor, take its variance rotVariance = links.begin()->second.rotVariance(); transVariance = links.begin()->second.transVariance(); } } else { pose.setNull(); } int seq = *_currentId; ++_currentId; if(imageBytes.empty()) { UWARN("No image loaded from the database for id=%d!", *_currentId); } rtabmap::CompressionThread ctImage(imageBytes, true); rtabmap::CompressionThread ctDepth(depthBytes, true); rtabmap::CompressionThread ctLaserScan(laserScanBytes, false); ctImage.start(); ctDepth.start(); ctLaserScan.start(); ctImage.join(); ctDepth.join(); ctLaserScan.join(); data = SensorData( ctLaserScan.getUncompressedData(), ctImage.getUncompressedData(), ctDepth.getUncompressedData(), fx,fy,cx,cy, localTransform, pose, rotVariance, transVariance, seq, stamp, userData); UDEBUG("Laser=%d RGB/Left=%d Depth=%d Right=%d", data.laserScan().empty()?0:1, data.image().empty()?0:1, data.depth().empty()?0:1, data.rightImage().empty()?0:1); } } else { UERROR("Not initialized..."); } return data; }
void IMUThread::mainLoop() { UTimer totalTime; UDEBUG(""); if(rate_>0 || captureDelay_) { double delay = rate_>0?1000.0/double(rate_):1000.0f*captureDelay_; int sleepTime = delay - 1000.0f*frameRateTimer_.getElapsedTime(); if(sleepTime > 2) { uSleep(sleepTime-2); } // Add precision at the cost of a small overhead delay/=1000.0; while(frameRateTimer_.getElapsedTime() < delay-0.000001) { // } frameRateTimer_.start(); } captureDelay_ = 0.0; std::string line; if (std::getline(imuFile_, line)) { std::stringstream stream(line); std::string s; std::getline(stream, s, ','); std::string nanoseconds = s.substr(s.size() - 9, 9); std::string seconds = s.substr(0, s.size() - 9); cv::Vec3d gyr; for (int j = 0; j < 3; ++j) { std::getline(stream, s, ','); gyr[j] = uStr2Double(s); } cv::Vec3d acc; for (int j = 0; j < 3; ++j) { std::getline(stream, s, ','); acc[j] = uStr2Double(s); } double stamp = double(uStr2Int(seconds)) + double(uStr2Int(nanoseconds))*1e-9; if(previousStamp_>0 && stamp > previousStamp_) { captureDelay_ = stamp - previousStamp_; } previousStamp_ = stamp; IMU imu(gyr, cv::Mat(), acc, cv::Mat(), localTransform_); this->post(new IMUEvent(imu, stamp)); } else if(!this->isKilled()) { UWARN("no more imu data..."); this->kill(); this->post(new IMUEvent()); } }
void CalibrationDialog::calibrate() { processingData_ = true; savedCalibration_ = false; QMessageBox mb(QMessageBox::Information, tr("Calibrating..."), tr("Operation in progress...")); mb.show(); QApplication::processEvents(); uSleep(100); // hack make sure the text in the QMessageBox is shown... QApplication::processEvents(); std::vector<std::vector<cv::Point3f> > objectPoints(1); cv::Size boardSize(ui_->spinBox_boardWidth->value(), ui_->spinBox_boardHeight->value()); float squareSize = ui_->doubleSpinBox_squareSize->value(); // compute board corner positions for( int i = 0; i < boardSize.height; ++i ) for( int j = 0; j < boardSize.width; ++j ) objectPoints[0].push_back(cv::Point3f(float( j*squareSize ), float( i*squareSize ), 0)); for(int id=0; id<(stereo_?2:1); ++id) { UINFO("Calibrating camera %d (samples=%d)", id, (int)imagePoints_[id].size()); objectPoints.resize(imagePoints_[id].size(), objectPoints[0]); //calibrate std::vector<cv::Mat> rvecs, tvecs; std::vector<float> reprojErrs; cv::Mat K, D; K = cv::Mat::eye(3,3,CV_64FC1); UINFO("calibrate!"); //Find intrinsic and extrinsic camera parameters double rms = cv::calibrateCamera(objectPoints, imagePoints_[id], imageSize_[id], K, D, rvecs, tvecs); UINFO("Re-projection error reported by calibrateCamera: %f", rms); // compute reprojection errors std::vector<cv::Point2f> imagePoints2; int i, totalPoints = 0; double totalErr = 0, err; reprojErrs.resize(objectPoints.size()); for( i = 0; i < (int)objectPoints.size(); ++i ) { cv::projectPoints( cv::Mat(objectPoints[i]), rvecs[i], tvecs[i], K, D, imagePoints2); err = cv::norm(cv::Mat(imagePoints_[id][i]), cv::Mat(imagePoints2), CV_L2); int n = (int)objectPoints[i].size(); reprojErrs[i] = (float) std::sqrt(err*err/n); totalErr += err*err; totalPoints += n; } double totalAvgErr = std::sqrt(totalErr/totalPoints); UINFO("avg re projection error = %f", totalAvgErr); cv::Mat P(3,4,CV_64FC1); P.at<double>(2,3) = 1; K.copyTo(P.colRange(0,3).rowRange(0,3)); std::cout << "cameraMatrix = " << K << std::endl; std::cout << "distCoeffs = " << D << std::endl; std::cout << "width = " << imageSize_[id].width << std::endl; std::cout << "height = " << imageSize_[id].height << std::endl; models_[id] = CameraModel(cameraName_.toStdString(), imageSize_[id], K, D, cv::Mat::eye(3,3,CV_64FC1), P); if(id == 0) { ui_->label_fx->setNum(models_[id].fx()); ui_->label_fy->setNum(models_[id].fy()); ui_->label_cx->setNum(models_[id].cx()); ui_->label_cy->setNum(models_[id].cy()); ui_->label_error->setNum(totalAvgErr); std::stringstream strK, strD, strR, strP; strK << models_[id].K(); strD << models_[id].D(); strR << models_[id].R(); strP << models_[id].P(); ui_->lineEdit_K->setText(strK.str().c_str()); ui_->lineEdit_D->setText(strD.str().c_str()); ui_->lineEdit_R->setText(strR.str().c_str()); ui_->lineEdit_P->setText(strP.str().c_str()); } else { ui_->label_fx_2->setNum(models_[id].fx()); ui_->label_fy_2->setNum(models_[id].fy()); ui_->label_cx_2->setNum(models_[id].cx()); ui_->label_cy_2->setNum(models_[id].cy()); ui_->label_error_2->setNum(totalAvgErr); std::stringstream strK, strD, strR, strP; strK << models_[id].K(); strD << models_[id].D(); strR << models_[id].R(); strP << models_[id].P(); ui_->lineEdit_K_2->setText(strK.str().c_str()); ui_->lineEdit_D_2->setText(strD.str().c_str()); ui_->lineEdit_R_2->setText(strR.str().c_str()); ui_->lineEdit_P_2->setText(strP.str().c_str()); } } if(stereo_ && models_[0].isValid() && models_[1].isValid()) { UINFO("stereo calibration (samples=%d)...", (int)stereoImagePoints_[0].size()); cv::Size imageSize = imageSize_[0].width > imageSize_[1].width?imageSize_[0]:imageSize_[1]; cv::Mat R, T, E, F; std::vector<std::vector<cv::Point3f> > objectPoints(1); cv::Size boardSize(ui_->spinBox_boardWidth->value(), ui_->spinBox_boardHeight->value()); float squareSize = ui_->doubleSpinBox_squareSize->value(); // compute board corner positions for( int i = 0; i < boardSize.height; ++i ) for( int j = 0; j < boardSize.width; ++j ) objectPoints[0].push_back(cv::Point3f(float( j*squareSize ), float( i*squareSize ), 0)); objectPoints.resize(stereoImagePoints_[0].size(), objectPoints[0]); // calibrate extrinsic #if CV_MAJOR_VERSION < 3 double rms = cv::stereoCalibrate( objectPoints, stereoImagePoints_[0], stereoImagePoints_[1], models_[0].K(), models_[0].D(), models_[1].K(), models_[1].D(), imageSize, R, T, E, F, cv::TermCriteria(cv::TermCriteria::COUNT+cv::TermCriteria::EPS, 100, 1e-5), cv::CALIB_FIX_INTRINSIC); #else double rms = cv::stereoCalibrate( objectPoints, stereoImagePoints_[0], stereoImagePoints_[1], models_[0].K(), models_[0].D(), models_[1].K(), models_[1].D(), imageSize, R, T, E, F, cv::CALIB_FIX_INTRINSIC, cv::TermCriteria(cv::TermCriteria::COUNT+cv::TermCriteria::EPS, 100, 1e-5)); #endif UINFO("stereo calibration... done with RMS error=%f", rms); double err = 0; int npoints = 0; std::vector<cv::Vec3f> lines[2]; UINFO("Computing avg re-projection error..."); for(unsigned int i = 0; i < stereoImagePoints_[0].size(); i++ ) { int npt = (int)stereoImagePoints_[0][i].size(); cv::Mat imgpt[2]; for(int k = 0; k < 2; k++ ) { imgpt[k] = cv::Mat(stereoImagePoints_[k][i]); cv::undistortPoints(imgpt[k], imgpt[k], models_[k].K(), models_[k].D(), cv::Mat(), models_[k].K()); computeCorrespondEpilines(imgpt[k], k+1, F, lines[k]); } for(int j = 0; j < npt; j++ ) { double errij = fabs(stereoImagePoints_[0][i][j].x*lines[1][j][0] + stereoImagePoints_[0][i][j].y*lines[1][j][1] + lines[1][j][2]) + fabs(stereoImagePoints_[1][i][j].x*lines[0][j][0] + stereoImagePoints_[1][i][j].y*lines[0][j][1] + lines[0][j][2]); err += errij; } npoints += npt; } double totalAvgErr = err/(double)npoints; UINFO("stereo avg re projection error = %f", totalAvgErr); cv::Mat R1, R2, P1, P2, Q; cv::Rect validRoi[2]; cv::stereoRectify(models_[0].K(), models_[0].D(), models_[1].K(), models_[1].D(), imageSize, R, T, R1, R2, P1, P2, Q, cv::CALIB_ZERO_DISPARITY, 0, imageSize, &validRoi[0], &validRoi[1]); UINFO("Valid ROI1 = %d,%d,%d,%d ROI2 = %d,%d,%d,%d newImageSize=%d/%d", validRoi[0].x, validRoi[0].y, validRoi[0].width, validRoi[0].height, validRoi[1].x, validRoi[1].y, validRoi[1].width, validRoi[1].height, imageSize.width, imageSize.height); if(imageSize_[0].width == imageSize_[1].width) { //Stereo, keep new extrinsic projection matrix stereoModel_ = StereoCameraModel( cameraName_.toStdString(), imageSize_[0], models_[0].K(), models_[0].D(), R1, P1, imageSize_[1], models_[1].K(), models_[1].D(), R2, P2, R, T, E, F); } else { //Kinect stereoModel_ = StereoCameraModel( cameraName_.toStdString(), imageSize_[0], models_[0].K(), models_[0].D(), cv::Mat::eye(3,3,CV_64FC1), models_[0].P(), imageSize_[1], models_[1].K(), models_[1].D(), cv::Mat::eye(3,3,CV_64FC1), models_[1].P(), R, T, E, F); } std::stringstream strR1, strP1, strR2, strP2; strR1 << stereoModel_.left().R(); strP1 << stereoModel_.left().P(); strR2 << stereoModel_.right().R(); strP2 << stereoModel_.right().P(); ui_->lineEdit_R->setText(strR1.str().c_str()); ui_->lineEdit_P->setText(strP1.str().c_str()); ui_->lineEdit_R_2->setText(strR2.str().c_str()); ui_->lineEdit_P_2->setText(strP2.str().c_str()); ui_->label_baseline->setNum(stereoModel_.baseline()); //ui_->label_error_stereo->setNum(totalAvgErr); } if(stereo_ && stereoModel_.left().isValid() && stereoModel_.right().isValid()&& (!ui_->label_baseline->isVisible() || stereoModel_.baseline() > 0.0)) { ui_->radioButton_rectified->setEnabled(true); ui_->radioButton_stereoRectified->setEnabled(true); ui_->radioButton_stereoRectified->setChecked(true); ui_->pushButton_save->setEnabled(true); } else if(models_[0].isValid()) { ui_->radioButton_rectified->setEnabled(true); ui_->radioButton_rectified->setChecked(true); ui_->pushButton_save->setEnabled(!stereo_); } UINFO("End calibration"); processingData_ = false; }
int main(void) { int ret; ShmSBuf_t buf_in; /* type of element for slide buffer */ struct my_struct_s { uint64_t a; uint32_t b, c; } my_struct; char i = 0; (void)signal(SIGINT, signal_handler); (void)signal(SIGTERM, signal_handler); /* Buffer initialization: * buf_in - simulates transmitting process */ if ((ret = shm_sbuf_init(&buf_in, 0x05, NMEMB, sizeof(struct my_struct_s))) >= 0) { printf("buf_in init succeded with %d\t%s\n", ret, shm_sbuf_error(ret)); } else { printf("shm_sbuf_init failed with %d\t%s\n", ret, shm_sbuf_error(ret)); printf("shmsbuf_err: %s\n", buf_in.err_msg); return -1; } for (; !g_stop; ++i) { /* Generate new data */ my_struct.a = i*100; my_struct.b = i*10; my_struct.c = i+1; /* Write data to shm buffer */ if ((ret = buf_in.add(&buf_in, &my_struct, 1)) < 0) { printf("buf.add failed with %d\t%s\n", ret, shm_sbuf_error(ret)); printf("shmsbuf_err: %s\n", buf_in.err_msg); break; } uSleep(500000); } /* Deinitialize shm buffer */ if ((ret = buf_in.deinit(&buf_in)) >= 0) { printf("buf_in.deinit succeded with %d\t%s\n", ret, shm_sbuf_error(ret)); } else { printf("buf_in.deinit failed with %d\t%s\n", ret, shm_sbuf_error(ret)); printf("shmsbuf_err: %s\n", buf_in.err_msg); } return 0; }
void DBReader::mainLoop() { OdometryEvent odom = this->getNextData(); if(odom.data().id()) { std::string goalId; double previousStamp = odom.data().stamp(); if(previousStamp == 0) { odom.data().setStamp(UTimer::now()); } if(!_goalsIgnored && odom.data().userDataRaw().type() == CV_8SC1 && odom.data().userDataRaw().cols >= 7 && // including null str ending odom.data().userDataRaw().rows == 1 && memcmp(odom.data().userDataRaw().data, "GOAL:", 5) == 0) { //GOAL format detected, remove it from the user data and send it as goal event std::string goalStr = (const char *)odom.data().userDataRaw().data; if(!goalStr.empty()) { std::list<std::string> strs = uSplit(goalStr, ':'); if(strs.size() == 2) { goalId = *strs.rbegin(); odom.data().setUserData(cv::Mat()); } } } if(!_odometryIgnored) { if(odom.pose().isNull()) { UWARN("Reading the database: odometry is null! " "Please set \"Ignore odometry = true\" if there is " "no odometry in the database."); } this->post(new OdometryEvent(odom)); } else { this->post(new CameraEvent(odom.data())); } if(!goalId.empty()) { double delay = 0.0; if(!_ignoreGoalDelay && _currentId != _ids.end()) { // get stamp for the next signature to compute the delay // that was used originally for planning int weight; std::string label; double stamp; int mapId; Transform localTransform, pose; _dbDriver->getNodeInfo(*_currentId, pose, mapId, weight, label, stamp); if(previousStamp && stamp && stamp > previousStamp) { delay = stamp - previousStamp; } } if(delay > 0.0) { UWARN("Goal \"%s\" detected, posting it! Waiting %f seconds before sending next data...", goalId.c_str(), delay); } else { UWARN("Goal \"%s\" detected, posting it!", goalId.c_str()); } if(uIsInteger(goalId)) { this->post(new RtabmapEventCmd(RtabmapEventCmd::kCmdGoal, atoi(goalId.c_str()))); } else { this->post(new RtabmapEventCmd(RtabmapEventCmd::kCmdGoal, goalId)); } if(delay > 0.0) { uSleep(delay*1000); } } } else if(!this->isKilled()) { UINFO("no more images..."); if(_paths.size() > 1) { _paths.pop_front(); UWARN("Loading next database \"%s\"...", _paths.front().c_str()); if(!this->init()) { UERROR("Failed to initialize the next database \"%s\"", _paths.front().c_str()); this->kill(); this->post(new CameraEvent()); } } else { this->kill(); this->post(new CameraEvent()); } } }
int main(int argc, char** argv) { ros::init(argc, argv, "data_player"); //ULogger::setType(ULogger::kTypeConsole); //ULogger::setLevel(ULogger::kDebug); //ULogger::setEventLevel(ULogger::kWarning); ros::NodeHandle nh; ros::NodeHandle pnh("~"); std::string frameId = "base_link"; std::string odomFrameId = "odom"; std::string cameraFrameId = "camera_optical_link"; double rate = 1.0f; std::string databasePath = ""; bool publishTf = true; int startId = 0; pnh.param("frame_id", frameId, frameId); pnh.param("odom_frame_id", odomFrameId, odomFrameId); pnh.param("camera_frame_id", cameraFrameId, cameraFrameId); pnh.param("rate", rate, rate); pnh.param("database", databasePath, databasePath); pnh.param("publish_tf", publishTf, publishTf); pnh.param("start_id", startId, startId); ROS_INFO("frame_id = %s", frameId.c_str()); ROS_INFO("odom_frame_id = %s", odomFrameId.c_str()); ROS_INFO("camera_frame_id = %s", cameraFrameId.c_str()); ROS_INFO("database = %s", databasePath.c_str()); ROS_INFO("rate = %f", rate); ROS_INFO("publish_tf = %s", publishTf?"true":"false"); rtabmap::DBReader reader(databasePath, rate); if(databasePath.empty()) { ROS_ERROR("Parameter \"database\" must be set (path to a RTAB-Map database)."); return -1; } if(!reader.init(startId)) { ROS_ERROR("Cannot open database \"%s\".", databasePath.c_str()); return -1; } ros::ServiceServer pauseSrv = pnh.advertiseService("pause", pauseCallback); ros::ServiceServer resumeSrv = pnh.advertiseService("resume", resumeCallback); image_transport::ImageTransport it(nh); image_transport::Publisher imagePub; image_transport::Publisher rgbPub; image_transport::Publisher depthPub; image_transport::Publisher leftPub; image_transport::Publisher rightPub; ros::Publisher rgbCamInfoPub; ros::Publisher depthCamInfoPub; ros::Publisher leftCamInfoPub; ros::Publisher rightCamInfoPub; ros::Publisher odometryPub; tf::TransformBroadcaster tfBroadcaster; rtabmap::SensorData data = reader.getNextData(); while(ros::ok() && !data.isValid()) { ROS_INFO("Reading sensor data %d...", data.id()); ros::Time time = ros::Time::now(); sensor_msgs::CameraInfo camInfoA; //rgb or left sensor_msgs::CameraInfo camInfoB; //depth or right camInfoA.K.assign(0); camInfoA.K[0] = camInfoA.K[4] = camInfoA.K[8] = 1; camInfoA.R.assign(0); camInfoA.R[0] = camInfoA.R[4] = camInfoA.R[8] = 1; camInfoA.P.assign(0); camInfoA.P[10] = 1; camInfoA.header.frame_id = cameraFrameId; camInfoA.header.stamp = time; camInfoB = camInfoA; int type = -1; if(!data.depth().empty() && (data.depth().type() == CV_32FC1 || data.depth().type() == CV_16UC1)) { //depth camInfoA.D.resize(5,0); camInfoA.P[0] = data.fx(); camInfoA.K[0] = data.fx(); camInfoA.P[5] = data.fy(); camInfoA.K[4] = data.fy(); camInfoA.P[2] = data.cx(); camInfoA.K[2] = data.cx(); camInfoA.P[6] = data.cy(); camInfoA.K[5] = data.cy(); camInfoB = camInfoA; type=0; if(rgbPub.getTopic().empty()) rgbPub = it.advertise("rgb/image", 1); if(depthPub.getTopic().empty()) depthPub = it.advertise("depth_registered/image", 1); if(rgbCamInfoPub.getTopic().empty()) rgbCamInfoPub = nh.advertise<sensor_msgs::CameraInfo>("rgb/camera_info", 1); if(depthCamInfoPub.getTopic().empty()) depthCamInfoPub = nh.advertise<sensor_msgs::CameraInfo>("depth_registered/camera_info", 1); } else if(!data.rightImage().empty() && data.rightImage().type() == CV_8U) { //stereo camInfoA.D.resize(8,0); camInfoA.P[0] = data.fx(); camInfoA.K[0] = data.fx(); camInfoA.P[5] = data.fx(); // fx = fy camInfoA.K[4] = data.fx(); // fx = fy camInfoA.P[2] = data.cx(); camInfoA.K[2] = data.cx(); camInfoA.P[6] = data.cy(); camInfoA.K[5] = data.cy(); camInfoB = camInfoA; camInfoB.P[3] = data.baseline()*-data.fx(); // Right_Tx = -baseline*fx type=1; if(leftPub.getTopic().empty()) leftPub = it.advertise("left/image", 1); if(rightPub.getTopic().empty()) rightPub = it.advertise("right/image", 1); if(leftCamInfoPub.getTopic().empty()) leftCamInfoPub = nh.advertise<sensor_msgs::CameraInfo>("left/camera_info", 1); if(rightCamInfoPub.getTopic().empty()) rightCamInfoPub = nh.advertise<sensor_msgs::CameraInfo>("right/camera_info", 1); } else { if(imagePub.getTopic().empty()) imagePub = it.advertise("image", 1); } camInfoA.height = data.image().rows; camInfoA.width = data.image().cols; camInfoB.height = data.depthOrRightImage().rows; camInfoB.width = data.depthOrRightImage().cols; // publish transforms first if(publishTf) { ros::Time tfExpiration = time + ros::Duration(1.0/rate); if(!data.localTransform().isNull()) { tf::Transform baseToCamera; rtabmap_ros::transformToTF(data.localTransform(), baseToCamera); tfBroadcaster.sendTransform( tf::StampedTransform (baseToCamera, tfExpiration, frameId, cameraFrameId)); } if(!data.pose().isNull()) { tf::Transform odomToBase; rtabmap_ros::transformToTF(data.pose(), odomToBase); tfBroadcaster.sendTransform( tf::StampedTransform (odomToBase, tfExpiration, odomFrameId, frameId)); } } if(!data.pose().isNull()) { if(odometryPub.getTopic().empty()) odometryPub = nh.advertise<nav_msgs::Odometry>("odom", 1); if(odometryPub.getNumSubscribers()) { nav_msgs::Odometry odom; odom.child_frame_id = frameId; odom.header.frame_id = odomFrameId; odom.header.stamp = time; rtabmap_ros::transformToPoseMsg(data.pose(), odom.pose.pose); odom.pose.covariance[0] = data.poseVariance(); odom.pose.covariance[7] = data.poseVariance(); odom.pose.covariance[14] = data.poseVariance(); odom.pose.covariance[21] = data.poseVariance(); odom.pose.covariance[28] = data.poseVariance(); odom.pose.covariance[35] = data.poseVariance(); odometryPub.publish(odom); } } if(type >= 0) { if(rgbCamInfoPub.getNumSubscribers() && type == 0) { rgbCamInfoPub.publish(camInfoA); } if(leftCamInfoPub.getNumSubscribers() && type == 1) { leftCamInfoPub.publish(camInfoA); } if(depthCamInfoPub.getNumSubscribers() && type == 0) { depthCamInfoPub.publish(camInfoB); } if(rightCamInfoPub.getNumSubscribers() && type == 1) { rightCamInfoPub.publish(camInfoB); } } if(imagePub.getNumSubscribers() || rgbPub.getNumSubscribers() || leftPub.getNumSubscribers()) { cv_bridge::CvImage img; if(data.image().channels() == 1) { img.encoding = sensor_msgs::image_encodings::MONO8; } else { img.encoding = sensor_msgs::image_encodings::BGR8; } img.image = data.image(); sensor_msgs::ImagePtr imageRosMsg = img.toImageMsg(); imageRosMsg->header.frame_id = cameraFrameId; imageRosMsg->header.stamp = time; if(imagePub.getNumSubscribers()) { imagePub.publish(imageRosMsg); } if(rgbPub.getNumSubscribers() && type == 0) { rgbPub.publish(imageRosMsg); } if(leftPub.getNumSubscribers() && type == 1) { leftPub.publish(imageRosMsg); leftCamInfoPub.publish(camInfoA); } } if(depthPub.getNumSubscribers() && !data.depth().empty() && type==0) { cv_bridge::CvImage img; if(data.depth().type() == CV_32FC1) { img.encoding = sensor_msgs::image_encodings::TYPE_32FC1; } else { img.encoding = sensor_msgs::image_encodings::TYPE_16UC1; } img.image = data.depth(); sensor_msgs::ImagePtr imageRosMsg = img.toImageMsg(); imageRosMsg->header.frame_id = cameraFrameId; imageRosMsg->header.stamp = time; depthPub.publish(imageRosMsg); depthCamInfoPub.publish(camInfoB); } if(rightPub.getNumSubscribers() && !data.rightImage().empty() && type==1) { cv_bridge::CvImage img; img.encoding = sensor_msgs::image_encodings::MONO8; img.image = data.rightImage(); sensor_msgs::ImagePtr imageRosMsg = img.toImageMsg(); imageRosMsg->header.frame_id = cameraFrameId; imageRosMsg->header.stamp = time; rightPub.publish(imageRosMsg); rightCamInfoPub.publish(camInfoB); } ros::spinOnce(); while(ros::ok() && paused) { uSleep(100); ros::spinOnce(); } data = reader.getNextData(); } return 0; }
OdometryEvent DBReader::getNextData() { OdometryEvent odom; if(_dbDriver) { if(!this->isKilled() && _currentId != _ids.end()) { int mapId; SensorData data; _dbDriver->getNodeData(*_currentId, data); // info Transform pose; int weight; std::string label; double stamp; _dbDriver->getNodeInfo(*_currentId, pose, mapId, weight, label, stamp); cv::Mat infMatrix = cv::Mat::eye(6,6,CV_64FC1); if(!_odometryIgnored) { std::map<int, Link> links; _dbDriver->loadLinks(*_currentId, links, Link::kNeighbor); if(links.size()) { // assume the first is the backward neighbor, take its variance infMatrix = links.begin()->second.infMatrix(); } } else { pose.setNull(); } int seq = *_currentId; ++_currentId; if(data.imageCompressed().empty()) { UWARN("No image loaded from the database for id=%d!", *_currentId); } // Frame rate if(_frameRate < 0.0f) { if(stamp == 0) { UERROR("The option to use database stamps is set (framerate<0), but there are no stamps saved in the database! Aborting..."); this->kill(); } else if(_previousMapID == mapId && _previousStamp > 0) { int sleepTime = 1000.0*(stamp-_previousStamp) - 1000.0*_timer.getElapsedTime(); if(sleepTime > 10000) { UWARN("Detected long delay (%d sec, stamps = %f vs %f). Waiting a maximum of 10 seconds.", sleepTime/1000, _previousStamp, stamp); sleepTime = 10000; } if(sleepTime > 2) { uSleep(sleepTime-2); } // Add precision at the cost of a small overhead while(_timer.getElapsedTime() < (stamp-_previousStamp)-0.000001) { // } double slept = _timer.getElapsedTime(); _timer.start(); UDEBUG("slept=%fs vs target=%fs", slept, stamp-_previousStamp); } _previousStamp = stamp; _previousMapID = mapId; } else if(_frameRate>0.0f) { int sleepTime = (1000.0f/_frameRate - 1000.0f*_timer.getElapsedTime()); if(sleepTime > 2) { uSleep(sleepTime-2); } // Add precision at the cost of a small overhead while(_timer.getElapsedTime() < 1.0/double(_frameRate)-0.000001) { // } double slept = _timer.getElapsedTime(); _timer.start(); UDEBUG("slept=%fs vs target=%fs", slept, 1.0/double(_frameRate)); } if(!this->isKilled()) { data.uncompressData(); data.setId(seq); data.setStamp(stamp); UDEBUG("Laser=%d RGB/Left=%d Depth/Right=%d, UserData=%d", data.laserScanRaw().empty()?0:1, data.imageRaw().empty()?0:1, data.depthOrRightRaw().empty()?0:1, data.userDataRaw().empty()?0:1); odom = OdometryEvent(data, pose, infMatrix.inv()); } } } else { UERROR("Not initialized..."); } return odom; }
int main (int argc, char * argv[]) { ULogger::setType(ULogger::kTypeConsole); ULogger::setLevel(ULogger::kInfo); ULogger::setPrintTime(false); ULogger::setPrintWhere(false); // parse arguments float rate = 0.0; std::string inputDatabase; int driver = 0; int odomType = rtabmap::Parameters::defaultOdomFeatureType(); bool icp = false; bool flow = false; bool mono = false; int nnType = rtabmap::Parameters::defaultOdomBowNNType(); float nndr = rtabmap::Parameters::defaultOdomBowNNDR(); float distance = rtabmap::Parameters::defaultOdomInlierDistance(); int maxWords = rtabmap::Parameters::defaultOdomMaxFeatures(); int minInliers = rtabmap::Parameters::defaultOdomMinInliers(); float maxDepth = rtabmap::Parameters::defaultOdomMaxDepth(); int iterations = rtabmap::Parameters::defaultOdomIterations(); int resetCountdown = rtabmap::Parameters::defaultOdomResetCountdown(); int decimation = 4; float voxel = 0.005; int samples = 10000; float ratio = 0.7f; int maxClouds = 10; int briefBytes = rtabmap::Parameters::defaultBRIEFBytes(); int fastThr = rtabmap::Parameters::defaultFASTThreshold(); float sec = 0.0f; bool gpu = false; int localHistory = rtabmap::Parameters::defaultOdomBowLocalHistorySize(); bool p2p = rtabmap::Parameters::defaultOdomPnPEstimation(); for(int i=1; i<argc; ++i) { if(strcmp(argv[i], "-driver") == 0) { ++i; if(i < argc) { driver = std::atoi(argv[i]); if(driver < 0 || driver > 7) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-o") == 0) { ++i; if(i < argc) { odomType = std::atoi(argv[i]); if(odomType < 0 || odomType > 6) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-nn") == 0) { ++i; if(i < argc) { nnType = std::atoi(argv[i]); if(nnType < 0 || nnType > 4) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-nndr") == 0) { ++i; if(i < argc) { nndr = uStr2Float(argv[i]); if(nndr < 0.0f) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-hz") == 0) { ++i; if(i < argc) { rate = uStr2Float(argv[i]); if(rate < 0) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-db") == 0) { ++i; if(i < argc) { inputDatabase = argv[i]; if(UFile::getExtension(inputDatabase).compare("db") != 0) { printf("Database path (%s) should end with \"db\" \n", inputDatabase.c_str()); showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-clouds") == 0) { ++i; if(i < argc) { maxClouds = std::atoi(argv[i]); if(maxClouds < 0) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-sec") == 0) { ++i; if(i < argc) { sec = uStr2Float(argv[i]); if(sec < 0.0f) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-in") == 0) { ++i; if(i < argc) { distance = uStr2Float(argv[i]); if(distance <= 0) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-max") == 0) { ++i; if(i < argc) { maxWords = std::atoi(argv[i]); if(maxWords < 0) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-min") == 0) { ++i; if(i < argc) { minInliers = std::atoi(argv[i]); if(minInliers < 0) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-depth") == 0) { ++i; if(i < argc) { maxDepth = uStr2Float(argv[i]); if(maxDepth < 0) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-i") == 0) { ++i; if(i < argc) { iterations = std::atoi(argv[i]); if(iterations <= 0) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-reset") == 0) { ++i; if(i < argc) { resetCountdown = std::atoi(argv[i]); if(resetCountdown < 0) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-d") == 0) { ++i; if(i < argc) { decimation = std::atoi(argv[i]); if(decimation < 1) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-v") == 0) { ++i; if(i < argc) { voxel = uStr2Float(argv[i]); if(voxel < 0) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-s") == 0) { ++i; if(i < argc) { samples = std::atoi(argv[i]); if(samples < 0) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-cr") == 0) { ++i; if(i < argc) { ratio = uStr2Float(argv[i]); if(ratio < 0.0f) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-gpu") == 0) { gpu = true; continue; } if(strcmp(argv[i], "-lh") == 0) { ++i; if(i < argc) { localHistory = std::atoi(argv[i]); if(localHistory < 0) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-brief_bytes") == 0) { ++i; if(i < argc) { briefBytes = std::atoi(argv[i]); if(briefBytes < 1) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-fast_thr") == 0) { ++i; if(i < argc) { fastThr = std::atoi(argv[i]); if(fastThr < 1) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-icp") == 0) { icp = true; continue; } if(strcmp(argv[i], "-flow") == 0) { flow = true; continue; } if(strcmp(argv[i], "-mono") == 0) { mono = true; continue; } if(strcmp(argv[i], "-p2p") == 0) { p2p = true; continue; } if(strcmp(argv[i], "-debug") == 0) { ULogger::setLevel(ULogger::kDebug); ULogger::setPrintTime(true); ULogger::setPrintWhere(true); continue; } printf("Unrecognized option : %s\n", argv[i]); showUsage(); } if(odomType > 1 && nnType == rtabmap::VWDictionary::kNNFlannKdTree) { UERROR("You set \"-o %d\" (binary descriptor), you must use \"-nn 2\" (any \"-nn\" other than kNNFlannKdTree)", odomType); showUsage(); } else if(odomType <= 1 && nnType == rtabmap::VWDictionary::kNNFlannLSH) { UERROR("You set \"-o %d\" (float descriptor), you must use \"-nn 1\" (any \"-nn\" other than kNNFlannLSH)", odomType); showUsage(); } if(inputDatabase.size()) { UINFO("Using database input \"%s\"", inputDatabase.c_str()); } else { UINFO("Using OpenNI camera"); } std::string odomName; if(odomType == 0) { odomName = "SURF"; } else if(odomType == 1) { odomName = "SIFT"; } else if(odomType == 2) { odomName = "ORB"; } else if(odomType == 3) { odomName = "FAST+FREAK"; } else if(odomType == 4) { odomName = "FAST+BRIEF"; } else if(odomType == 5) { odomName = "GFTT+FREAK"; } else if(odomType == 6) { odomName = "GFTT+BRIEF"; } else if(odomType == 7) { odomName = "BRISK"; } if(icp) { odomName= "ICP"; } if(flow) { odomName= "Optical Flow"; } std::string nnName; if(nnType == 0) { nnName = "kNNFlannLinear"; } else if(nnType == 1) { nnName = "kNNFlannKdTree"; } else if(nnType == 2) { nnName= "kNNFlannLSH"; } else if(nnType == 3) { nnName= "kNNBruteForce"; } else if(nnType == 4) { nnName= "kNNBruteForceGPU"; } UINFO("Odometry used = %s", odomName.c_str()); UINFO("Camera rate = %f Hz", rate); UINFO("Maximum clouds shown = %d", maxClouds); UINFO("Delay = %f s", sec); UINFO("Max depth = %f", maxDepth); UINFO("Reset odometry coutdown = %d", resetCountdown); QApplication app(argc, argv); rtabmap::Odometry * odom = 0; rtabmap::ParametersMap parameters; parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kOdomMaxDepth(), uNumber2Str(maxDepth))); parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kOdomResetCountdown(), uNumber2Str(resetCountdown))); if(!icp) { UINFO("Min inliers = %d", minInliers); UINFO("Inlier maximum correspondences distance = %f", distance); UINFO("RANSAC iterations = %d", iterations); UINFO("Max features = %d", maxWords); UINFO("GPU = %s", gpu?"true":"false"); parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kOdomInlierDistance(), uNumber2Str(distance))); parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kOdomMinInliers(), uNumber2Str(minInliers))); parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kOdomIterations(), uNumber2Str(iterations))); parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kOdomMaxFeatures(), uNumber2Str(maxWords))); parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kOdomFeatureType(), uNumber2Str(odomType))); if(odomType == 0) { parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kSURFGpuVersion(), uBool2Str(gpu))); } if(odomType == 2) { parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kORBGpu(), uBool2Str(gpu))); } if(odomType == 3 || odomType == 4) { UINFO("FAST threshold = %d", fastThr); parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kFASTThreshold(), uNumber2Str(fastThr))); parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kFASTGpu(), uBool2Str(gpu))); } if(odomType == 4 || odomType == 6) { UINFO("BRIEF bytes = %d", briefBytes); parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kBRIEFBytes(), uNumber2Str(briefBytes))); } if(flow) { // Optical Flow odom = new rtabmap::OdometryOpticalFlow(parameters); } else { //BOW UINFO("Nearest neighbor = %s", nnName.c_str()); UINFO("Nearest neighbor ratio = %f", nndr); UINFO("Local history = %d", localHistory); parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kOdomBowNNType(), uNumber2Str(nnType))); parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kOdomBowNNDR(), uNumber2Str(nndr))); parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kOdomBowLocalHistorySize(), uNumber2Str(localHistory))); if(mono) { parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kOdomPnPFlags(), uNumber2Str(0))); //CV_ITERATIVE parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kOdomPnPReprojError(), "4.0")); parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kOdomIterations(), "100")); odom = new rtabmap::OdometryMono(parameters); } else { odom = new rtabmap::OdometryBOW(parameters); } } } else if(icp) // ICP { UINFO("ICP maximum correspondences distance = %f", distance); UINFO("ICP iterations = %d", iterations); UINFO("Cloud decimation = %d", decimation); UINFO("Cloud voxel size = %f", voxel); UINFO("Cloud samples = %d", samples); UINFO("Cloud correspondence ratio = %f", ratio); UINFO("Cloud point to plane = %s", p2p?"false":"true"); odom = new rtabmap::OdometryICP(decimation, voxel, samples, distance, iterations, ratio, !p2p); } rtabmap::OdometryThread odomThread(odom); rtabmap::OdometryViewer odomViewer(maxClouds, 2, 0.0, 50); UEventsManager::addHandler(&odomThread); UEventsManager::addHandler(&odomViewer); odomViewer.setWindowTitle("Odometry view"); odomViewer.resize(1280, 480+QPushButton().minimumHeight()); if(inputDatabase.size()) { rtabmap::DBReader camera(inputDatabase, rate, true); if(camera.init()) { odomThread.start(); if(sec > 0) { uSleep(sec*1000); } camera.start(); app.exec(); camera.join(true); odomThread.join(true); } } else { rtabmap::CameraRGBD * camera = 0; rtabmap::Transform t=rtabmap::Transform(0,0,1,0, -1,0,0,0, 0,-1,0,0); if(driver == 0) { camera = new rtabmap::CameraOpenni("", rate, t); } else if(driver == 1) { if(!rtabmap::CameraOpenNI2::available()) { UERROR("Not built with OpenNI2 support..."); exit(-1); } camera = new rtabmap::CameraOpenNI2("", rate, t); } else if(driver == 2) { if(!rtabmap::CameraFreenect::available()) { UERROR("Not built with Freenect support..."); exit(-1); } camera = new rtabmap::CameraFreenect(0, rate, t); } else if(driver == 3) { if(!rtabmap::CameraOpenNICV::available()) { UERROR("Not built with OpenNI from OpenCV support..."); exit(-1); } camera = new rtabmap::CameraOpenNICV(false, rate, t); } else if(driver == 4) { if(!rtabmap::CameraOpenNICV::available()) { UERROR("Not built with OpenNI from OpenCV support..."); exit(-1); } camera = new rtabmap::CameraOpenNICV(true, rate, t); } else if(driver == 5) { if(!rtabmap::CameraFreenect2::available()) { UERROR("Not built with Freenect2 support..."); exit(-1); } camera = new rtabmap::CameraFreenect2(0, rtabmap::CameraFreenect2::kTypeRGBDepthSD, rate, t); } else if(driver == 6) { if(!rtabmap::CameraStereoDC1394::available()) { UERROR("Not built with dc1394 support..."); exit(-1); } camera = new rtabmap::CameraStereoDC1394(rate, t); } else if(driver == 7) { if(!rtabmap::CameraStereoFlyCapture2::available()) { UERROR("Not built with FlyCapture2/Triclops support..."); exit(-1); } camera = new rtabmap::CameraStereoFlyCapture2(rate, t); } else { UFATAL("Camera driver (%d) not found!", driver); } //pcl::console::setVerbosityLevel(pcl::console::L_DEBUG); if(camera->init()) { if(camera->isCalibrated()) { rtabmap::CameraThread cameraThread(camera); odomThread.start(); cameraThread.start(); odomViewer.exec(); cameraThread.join(true); odomThread.join(true); } else { printf("The camera is not calibrated! You should calibrate the camera first.\n"); delete camera; } } else { printf("Failed to initialize the camera! Please select another driver (see \"--help\").\n"); delete camera; } } return 0; }
/********************************* Fonction RecordFichier Permet d'enregistrer un fichier WAV *********************************/ void UAudioCaptureMic::mainLoop() { if(!_sound) { UERROR("Recorder is not initialized."); this->kill(); return; } FMOD_RESULT result; void *ptr1 = 0, *ptr2 = 0; int blockLength; unsigned int len1 = 0, len2 = 0; unsigned int recordPos = 0; result = UAudioSystem::getRecordPosition(_driver, &recordPos); UASSERT_MSG(result==FMOD_OK, FMOD_ErrorString(result)); if (recordPos != _lastRecordPos) { blockLength = (int)recordPos - (int)_lastRecordPos; if (blockLength < 0) { blockLength += _soundLength; } // * exinfo.numchannels * 2 = stereo 16bit. 1 sample = 4 bytes. // Lock the sound to get access to the raw data. FMOD_Sound_Lock(_sound, _lastRecordPos * channels() * bytesPerSample(), blockLength * channels() * bytesPerSample(), &ptr1, &ptr2, &len1, &len2); { if (ptr1 && len1) // Write it to disk. { if(_fp) { //write to file _dataLength += fwrite(ptr1, 1, len1, _fp); } // push back in the frames buffer pushBackSamples(ptr1, len1); } if (ptr2 && len2) // Write it to disk. { if(_fp) { //write to file _dataLength += fwrite(ptr2, 1, len2, _fp); } // push back in the frames buffer pushBackSamples(ptr2, len2); } } //Unlock the sound to allow FMOD to use it again. FMOD_Sound_Unlock(_sound, ptr1, ptr2, len1, len2); _lastRecordPos = recordPos; } UAudioSystem::update(); uSleep(10); // If we are recording to a file, make sure to stop // when the maximum file size is reached if (_fp && _maxFileSize != 0 && int(_dataLength) + frameLength()*bytesPerSample() > _maxFileSize) { UWARN("Recording max memory reached (%d Mb)... stopped", _maxFileSize/1000000); this->kill(); } }
void throttle_speed_part(int part, int totalparts) { static double ticks_per_sleep_msec = 0; cycles_t target, curr, cps; #ifdef VPINMAME if ((g_hEnterThrottle != INVALID_HANDLE_VALUE) && g_iSyncFactor) { if (g_iSyncFactor >= 1024) SetEvent(g_hEnterThrottle); else { iCurrentSyncValue += g_iSyncFactor; if (iCurrentSyncValue >= 1024) { SetEvent(g_hEnterThrottle); iCurrentSyncValue -= 1024; } } } #endif //// if we're only syncing to the refresh, bail now //if (win_sync_refresh) // return; // this counts as idle time profiler_mark(PROFILER_IDLE); // get the current time and the target time curr = osd_cycles(); cps = osd_cycles_per_second(); target = this_frame_base + (int)((double)frameskip_counter * (double)cps / video_fps); // If we are throttling to a fractional vsync, adjust target to the partial target. if (totalparts != 1) { // Meh. The points in the code where frameskip counter gets updated is different from where the frame base is // reset. Makes this delay computation complicated. if (frameskip_counter == 0) target += (int)((double)(FRAMESKIP_LEVELS) * (double)cps / video_fps); // MAGIC: Experimentation with actual resuts show the most even distribution if I throttle to 1/7th increments at each 25% timestep. target -= ((cycles_t)((double)cps / (video_fps * (totalparts + 3)))) * (totalparts - part + 3); } // initialize the ticks per sleep if (ticks_per_sleep_msec == 0) ticks_per_sleep_msec = (double)cps / 1000.; // Adjust target for sound catchup if (g_iThrottleAdj) { target -= (cycles_t)(g_iThrottleAdj*ticks_per_sleep_msec); } // sync if (curr - target < 0) { #ifdef DEBUG_THROTTLE { char tmp[91]; sprintf(tmp, "Throt: part %d of %d FS: %d Delta: %lld\n", part, totalparts, frameskip_counter, curr - target); OutputDebugString(tmp); } #endif // loop until we reach the target time while (curr - target < 0) { #if 1 // VPINMAME //if((INT64)((target - curr)/(ticks_per_sleep_msec*1.1))-1 > 0) // pessimistic estimate of stuff below, but still stutters then // uSleep((UINT64)((target - curr)*1000/(ticks_per_sleep_msec*1.1))-1); if (totalparts > 1) uUnderSleep((UINT64)((target - curr) * 1000 / ticks_per_sleep_msec)); // will sleep too short else uOverSleep((UINT64)((target - curr) * 1000 / ticks_per_sleep_msec)); // will sleep too long #else // if we have enough time to sleep, do it // ...but not if we're autoframeskipping and we're behind if (allow_sleep && (!autoframeskip || frameskip == 0) && (target - curr) > (cycles_t)(ticks_per_sleep_msec * 1.1)) { cycles_t next; // keep track of how long we actually slept uSleep(100); //1000? next = osd_cycles(); ticks_per_sleep_msec = (ticks_per_sleep_msec * 0.90) + ((double)(next - curr) * 0.10); curr = next; } else #endif { // update the current time curr = osd_cycles(); } } } else if (curr - target >= (int)(cps / video_fps) && totalparts == 1) { // We're behind schedule by a frame or more. Something must // have taken longer than it should have (e.g., a CPU emulator // time slice must have gone on too long). We don't have a // time machine, so we can't go back and sync this frame to a // time in the past, but we can at least sync up the current // frame with the current real time. // // Note that the 12-frame "skip" cycle would eventually get // things back in sync even without this adjustment, but it // can cause audio glitching if we wait until then. The skip // cycle will try to make up for the lost time by giving shorter // time slices to the next batch of 12 frames, but the way it // does its calculation, the time taken out of those short // frames will pile up in the *next next* skip cycle, causing // a long (order of 100ms) pause that can manifset as an audio // glitch and/or video hiccup. // // The adjustment here is simply the amount of real time by // which we're behind schedule. Add this to the base time, // since the real time for this frame is later than we expected. this_frame_base += curr - target; } // idle time done profiler_mark(PROFILER_END); }
int cDriverSED1520::Init() { int x; int i; struct timeval tv1, tv2; if (!(config->width % 8) == 0) { width = config->width + (8 - (config->width % 8)); } else { width = config->width; } if (!(config->height % 8) == 0) { height = config->height + (8 - (config->height % 8)); } else { height = config->height; } if (width < 0) width = 120; if (height < 0) height = 32; SEAD = kSEAD; SEPA = kSEPA; SEDS = kSEDS; DION = kDION; DIOF = kDIOF; LED = kLEDHI; CDHI = kCDHI; CDLO = kCDLO; CS1HI = kCS1HI; CS1LO = kCS1LO; CS2HI = kCS2HI; CS2LO = kCS2LO; for (unsigned int i = 0; i < config->options.size(); i++) { if (config->options[i].name == "") { } } // setup linear lcd array LCD = new unsigned char *[(width + 7) / 8]; if (LCD) { for (x = 0; x < (width + 7) / 8; x++) { LCD[x] = new unsigned char[height]; memset(LCD[x], 0, height); } } // setup the lcd array for the paged sed1520 LCD_page = new unsigned char *[width]; if (LCD_page) { for (x = 0; x < width; x++) { LCD_page[x] = new unsigned char[(height + 7) / 8]; memset(LCD_page[x], 0, (height + 7) / 8); } } if (config->device == "") { // use DirectIO if (port->Open(config->port) != 0) return -1; uSleep(10); } else { // use ppdev if (port->Open(config->device.c_str()) != 0) return -1; } if (nSleepInit() != 0) { syslog(LOG_DEBUG, "%s: INFO: cannot change wait parameters (cDriver::Init)\n", config->name.c_str()); useSleepInit = false; } else { useSleepInit = true; } syslog(LOG_DEBUG, "%s: benchmark started.\n", config->name.c_str()); gettimeofday(&tv1, 0); for (i = 0; i < 1000; i++) { port->WriteData(i % 0x100); } gettimeofday(&tv2, 0); if (useSleepInit) nSleepDeInit(); timeForPortCmdInNs = (tv2.tv_sec - tv1.tv_sec) * 1000000 + (tv2.tv_usec - tv1.tv_usec); syslog(LOG_DEBUG, "%s: benchmark stopped. Time for Command: %ldns\n", config->name.c_str(), timeForPortCmdInNs); // initialize graphic mode InitGraphic(); port->Release(); *oldConfig = *config; // clear display Clear(); syslog(LOG_INFO, "%s: SED1520 initialized.\n", config->name.c_str()); return 0; }
int main(int argc, char** argv) { ros::init(argc, argv, "data_player"); //ULogger::setType(ULogger::kTypeConsole); //ULogger::setLevel(ULogger::kDebug); //ULogger::setEventLevel(ULogger::kWarning); ros::NodeHandle nh; ros::NodeHandle pnh("~"); std::string frameId = "base_link"; std::string odomFrameId = "odom"; std::string cameraFrameId = "camera_optical_link"; std::string scanFrameId = "base_laser_link"; double rate = -1.0f; std::string databasePath = ""; bool publishTf = true; int startId = 0; pnh.param("frame_id", frameId, frameId); pnh.param("odom_frame_id", odomFrameId, odomFrameId); pnh.param("camera_frame_id", cameraFrameId, cameraFrameId); pnh.param("scan_frame_id", scanFrameId, scanFrameId); pnh.param("rate", rate, rate); // Set -1 to use database stamps pnh.param("database", databasePath, databasePath); pnh.param("publish_tf", publishTf, publishTf); pnh.param("start_id", startId, startId); // based on URG-04LX double scanHeight, scanAngleMin, scanAngleMax, scanAngleIncrement, scanTime, scanRangeMin, scanRangeMax; pnh.param<double>("scan_height", scanHeight, 0.3); pnh.param<double>("scan_angle_min", scanAngleMin, -M_PI / 2.0); pnh.param<double>("scan_angle_max", scanAngleMax, M_PI / 2.0); pnh.param<double>("scan_angle_increment", scanAngleIncrement, M_PI / 360.0); pnh.param<double>("scan_time", scanTime, 1.0 / 10.0); pnh.param<double>("scan_range_min", scanRangeMin, 0.02); pnh.param<double>("scan_range_max", scanRangeMax, 6.0); ROS_INFO("frame_id = %s", frameId.c_str()); ROS_INFO("odom_frame_id = %s", odomFrameId.c_str()); ROS_INFO("camera_frame_id = %s", cameraFrameId.c_str()); ROS_INFO("scan_frame_id = %s", scanFrameId.c_str()); ROS_INFO("rate = %f", rate); ROS_INFO("publish_tf = %s", publishTf?"true":"false"); ROS_INFO("start_id = %d", startId); if(databasePath.empty()) { ROS_ERROR("Parameter \"database\" must be set (path to a RTAB-Map database)."); return -1; } databasePath = uReplaceChar(databasePath, '~', UDirectory::homeDir()); if(databasePath.size() && databasePath.at(0) != '/') { databasePath = UDirectory::currentDir(true) + databasePath; } ROS_INFO("database = %s", databasePath.c_str()); rtabmap::DBReader reader(databasePath, rate, false, false, false, startId); if(!reader.init()) { ROS_ERROR("Cannot open database \"%s\".", databasePath.c_str()); return -1; } ros::ServiceServer pauseSrv = pnh.advertiseService("pause", pauseCallback); ros::ServiceServer resumeSrv = pnh.advertiseService("resume", resumeCallback); image_transport::ImageTransport it(nh); image_transport::Publisher imagePub; image_transport::Publisher rgbPub; image_transport::Publisher depthPub; image_transport::Publisher leftPub; image_transport::Publisher rightPub; ros::Publisher rgbCamInfoPub; ros::Publisher depthCamInfoPub; ros::Publisher leftCamInfoPub; ros::Publisher rightCamInfoPub; ros::Publisher odometryPub; ros::Publisher scanPub; tf2_ros::TransformBroadcaster tfBroadcaster; UTimer timer; rtabmap::CameraInfo info; rtabmap::SensorData data = reader.takeImage(&info); rtabmap::OdometryEvent odom(data, info.odomPose, info.odomCovariance); double acquisitionTime = timer.ticks(); while(ros::ok() && odom.data().id()) { ROS_INFO("Reading sensor data %d...", odom.data().id()); ros::Time time = ros::Time::now(); sensor_msgs::CameraInfo camInfoA; //rgb or left sensor_msgs::CameraInfo camInfoB; //depth or right camInfoA.K.assign(0); camInfoA.K[0] = camInfoA.K[4] = camInfoA.K[8] = 1; camInfoA.R.assign(0); camInfoA.R[0] = camInfoA.R[4] = camInfoA.R[8] = 1; camInfoA.P.assign(0); camInfoA.P[10] = 1; camInfoA.header.frame_id = cameraFrameId; camInfoA.header.stamp = time; camInfoB = camInfoA; int type = -1; if(!odom.data().depthRaw().empty() && (odom.data().depthRaw().type() == CV_32FC1 || odom.data().depthRaw().type() == CV_16UC1)) { if(odom.data().cameraModels().size() > 1) { ROS_WARN("Multi-cameras detected in database but this node cannot send multi-images yet..."); } else { //depth if(odom.data().cameraModels().size()) { camInfoA.D.resize(5,0); camInfoA.P[0] = odom.data().cameraModels()[0].fx(); camInfoA.K[0] = odom.data().cameraModels()[0].fx(); camInfoA.P[5] = odom.data().cameraModels()[0].fy(); camInfoA.K[4] = odom.data().cameraModels()[0].fy(); camInfoA.P[2] = odom.data().cameraModels()[0].cx(); camInfoA.K[2] = odom.data().cameraModels()[0].cx(); camInfoA.P[6] = odom.data().cameraModels()[0].cy(); camInfoA.K[5] = odom.data().cameraModels()[0].cy(); camInfoB = camInfoA; } type=0; if(rgbPub.getTopic().empty()) rgbPub = it.advertise("rgb/image", 1); if(depthPub.getTopic().empty()) depthPub = it.advertise("depth_registered/image", 1); if(rgbCamInfoPub.getTopic().empty()) rgbCamInfoPub = nh.advertise<sensor_msgs::CameraInfo>("rgb/camera_info", 1); if(depthCamInfoPub.getTopic().empty()) depthCamInfoPub = nh.advertise<sensor_msgs::CameraInfo>("depth_registered/camera_info", 1); } } else if(!odom.data().rightRaw().empty() && odom.data().rightRaw().type() == CV_8U) { //stereo if(odom.data().stereoCameraModel().isValidForProjection()) { camInfoA.D.resize(8,0); camInfoA.P[0] = odom.data().stereoCameraModel().left().fx(); camInfoA.K[0] = odom.data().stereoCameraModel().left().fx(); camInfoA.P[5] = odom.data().stereoCameraModel().left().fy(); camInfoA.K[4] = odom.data().stereoCameraModel().left().fy(); camInfoA.P[2] = odom.data().stereoCameraModel().left().cx(); camInfoA.K[2] = odom.data().stereoCameraModel().left().cx(); camInfoA.P[6] = odom.data().stereoCameraModel().left().cy(); camInfoA.K[5] = odom.data().stereoCameraModel().left().cy(); camInfoB = camInfoA; camInfoB.P[3] = odom.data().stereoCameraModel().right().Tx(); // Right_Tx = -baseline*fx } type=1; if(leftPub.getTopic().empty()) leftPub = it.advertise("left/image", 1); if(rightPub.getTopic().empty()) rightPub = it.advertise("right/image", 1); if(leftCamInfoPub.getTopic().empty()) leftCamInfoPub = nh.advertise<sensor_msgs::CameraInfo>("left/camera_info", 1); if(rightCamInfoPub.getTopic().empty()) rightCamInfoPub = nh.advertise<sensor_msgs::CameraInfo>("right/camera_info", 1); } else { if(imagePub.getTopic().empty()) imagePub = it.advertise("image", 1); } camInfoA.height = odom.data().imageRaw().rows; camInfoA.width = odom.data().imageRaw().cols; camInfoB.height = odom.data().depthOrRightRaw().rows; camInfoB.width = odom.data().depthOrRightRaw().cols; if(!odom.data().laserScanRaw().empty()) { if(scanPub.getTopic().empty()) scanPub = nh.advertise<sensor_msgs::LaserScan>("scan", 1); } // publish transforms first if(publishTf) { rtabmap::Transform localTransform; if(odom.data().cameraModels().size() == 1) { localTransform = odom.data().cameraModels()[0].localTransform(); } else if(odom.data().stereoCameraModel().isValidForProjection()) { localTransform = odom.data().stereoCameraModel().left().localTransform(); } if(!localTransform.isNull()) { geometry_msgs::TransformStamped baseToCamera; baseToCamera.child_frame_id = cameraFrameId; baseToCamera.header.frame_id = frameId; baseToCamera.header.stamp = time; rtabmap_ros::transformToGeometryMsg(localTransform, baseToCamera.transform); tfBroadcaster.sendTransform(baseToCamera); } if(!odom.pose().isNull()) { geometry_msgs::TransformStamped odomToBase; odomToBase.child_frame_id = frameId; odomToBase.header.frame_id = odomFrameId; odomToBase.header.stamp = time; rtabmap_ros::transformToGeometryMsg(odom.pose(), odomToBase.transform); tfBroadcaster.sendTransform(odomToBase); } if(!scanPub.getTopic().empty()) { geometry_msgs::TransformStamped baseToLaserScan; baseToLaserScan.child_frame_id = scanFrameId; baseToLaserScan.header.frame_id = frameId; baseToLaserScan.header.stamp = time; rtabmap_ros::transformToGeometryMsg(rtabmap::Transform(0,0,scanHeight,0,0,0), baseToLaserScan.transform); tfBroadcaster.sendTransform(baseToLaserScan); } } if(!odom.pose().isNull()) { if(odometryPub.getTopic().empty()) odometryPub = nh.advertise<nav_msgs::Odometry>("odom", 1); if(odometryPub.getNumSubscribers()) { nav_msgs::Odometry odomMsg; odomMsg.child_frame_id = frameId; odomMsg.header.frame_id = odomFrameId; odomMsg.header.stamp = time; rtabmap_ros::transformToPoseMsg(odom.pose(), odomMsg.pose.pose); UASSERT(odomMsg.pose.covariance.size() == 36 && odom.covariance().total() == 36 && odom.covariance().type() == CV_64FC1); memcpy(odomMsg.pose.covariance.begin(), odom.covariance().data, 36*sizeof(double)); odometryPub.publish(odomMsg); } } if(type >= 0) { if(rgbCamInfoPub.getNumSubscribers() && type == 0) { rgbCamInfoPub.publish(camInfoA); } if(leftCamInfoPub.getNumSubscribers() && type == 1) { leftCamInfoPub.publish(camInfoA); } if(depthCamInfoPub.getNumSubscribers() && type == 0) { depthCamInfoPub.publish(camInfoB); } if(rightCamInfoPub.getNumSubscribers() && type == 1) { rightCamInfoPub.publish(camInfoB); } } if(imagePub.getNumSubscribers() || rgbPub.getNumSubscribers() || leftPub.getNumSubscribers()) { cv_bridge::CvImage img; if(odom.data().imageRaw().channels() == 1) { img.encoding = sensor_msgs::image_encodings::MONO8; } else { img.encoding = sensor_msgs::image_encodings::BGR8; } img.image = odom.data().imageRaw(); sensor_msgs::ImagePtr imageRosMsg = img.toImageMsg(); imageRosMsg->header.frame_id = cameraFrameId; imageRosMsg->header.stamp = time; if(imagePub.getNumSubscribers()) { imagePub.publish(imageRosMsg); } if(rgbPub.getNumSubscribers() && type == 0) { rgbPub.publish(imageRosMsg); } if(leftPub.getNumSubscribers() && type == 1) { leftPub.publish(imageRosMsg); leftCamInfoPub.publish(camInfoA); } } if(depthPub.getNumSubscribers() && !odom.data().depthRaw().empty() && type==0) { cv_bridge::CvImage img; if(odom.data().depthRaw().type() == CV_32FC1) { img.encoding = sensor_msgs::image_encodings::TYPE_32FC1; } else { img.encoding = sensor_msgs::image_encodings::TYPE_16UC1; } img.image = odom.data().depthRaw(); sensor_msgs::ImagePtr imageRosMsg = img.toImageMsg(); imageRosMsg->header.frame_id = cameraFrameId; imageRosMsg->header.stamp = time; depthPub.publish(imageRosMsg); depthCamInfoPub.publish(camInfoB); } if(rightPub.getNumSubscribers() && !odom.data().rightRaw().empty() && type==1) { cv_bridge::CvImage img; img.encoding = sensor_msgs::image_encodings::MONO8; img.image = odom.data().rightRaw(); sensor_msgs::ImagePtr imageRosMsg = img.toImageMsg(); imageRosMsg->header.frame_id = cameraFrameId; imageRosMsg->header.stamp = time; rightPub.publish(imageRosMsg); rightCamInfoPub.publish(camInfoB); } if(scanPub.getNumSubscribers() && !odom.data().laserScanRaw().empty()) { //inspired from pointcloud_to_laserscan package sensor_msgs::LaserScan msg; msg.header.frame_id = scanFrameId; msg.header.stamp = time; msg.angle_min = scanAngleMin; msg.angle_max = scanAngleMax; msg.angle_increment = scanAngleIncrement; msg.time_increment = 0.0; msg.scan_time = scanTime; msg.range_min = scanRangeMin; msg.range_max = scanRangeMax; uint32_t rangesSize = std::ceil((msg.angle_max - msg.angle_min) / msg.angle_increment); msg.ranges.assign(rangesSize, 0.0); const cv::Mat & scan = odom.data().laserScanRaw(); UASSERT(scan.type() == CV_32FC2 || scan.type() == CV_32FC3); UASSERT(scan.rows == 1); for (int i=0; i<scan.cols; ++i) { cv::Vec2f pos = scan.at<cv::Vec2f>(i); double range = hypot(pos[0], pos[1]); if (range >= scanRangeMin && range <=scanRangeMax) { double angle = atan2(pos[1], pos[0]); if (angle >= msg.angle_min && angle <= msg.angle_max) { int index = (angle - msg.angle_min) / msg.angle_increment; if (index>=0 && index<rangesSize && (range < msg.ranges[index] || msg.ranges[index]==0)) { msg.ranges[index] = range; } } } } scanPub.publish(msg); } if(odom.data().userDataRaw().type() == CV_8SC1 && odom.data().userDataRaw().cols >= 7 && // including null str ending odom.data().userDataRaw().rows == 1 && memcmp(odom.data().userDataRaw().data, "GOAL:", 5) == 0) { //GOAL format detected, remove it from the user data and send it as goal event std::string goalStr = (const char *)odom.data().userDataRaw().data; if(!goalStr.empty()) { std::list<std::string> strs = uSplit(goalStr, ':'); if(strs.size() == 2) { int goalId = atoi(strs.rbegin()->c_str()); if(goalId > 0) { ROS_WARN("Goal %d detected, calling rtabmap's set_goal service!", goalId); rtabmap_ros::SetGoal setGoalSrv; setGoalSrv.request.node_id = goalId; setGoalSrv.request.node_label = ""; if(!ros::service::call("set_goal", setGoalSrv)) { ROS_ERROR("Can't call \"set_goal\" service"); } } } } } ros::spinOnce(); while(ros::ok() && paused) { uSleep(100); ros::spinOnce(); } timer.restart(); info = rtabmap::CameraInfo(); data = reader.takeImage(&info); odom = rtabmap::OdometryEvent(data, info.odomPose, info.odomCovariance); acquisitionTime = timer.ticks(); } return 0; }