int main(int ac, char *argv[]) { if (ac<2) { printf("\n UDP Receiver Illustration - by Kristof Van Laerhoven."); printf("\n syntax:"); printf("\n %s <portnr>", argv[0]); printf("\n"); printf("\n <portnr> is the port number to listen to"); printf("\n try for instance '%s 1501'.", argv[0]); printf("\n\n"); exit(0); } SensorData *s; // this is the only place where we treat the sensordata // as a specific UDP parser object, elsewhere, we don't // care where the sensordata came from. Neat, isn't it? int port = atoi(argv[1]); s = new UDPParser(port, 100); char buffer[1024]; while(1) { if (s->read(buffer)) printf("msg: \"%s\"\n\r", buffer); } delete s; return 0; }
void DBReader::mainLoop() { SensorData data = this->getNextData(); if(data.isValid()) { if(!_odometryIgnored) { if(data.pose().isNull()) { UWARN("Reading the database: odometry is null! " "Please set \"Ignore odometry = true\" if there is " "no odometry in the database."); } this->post(new OdometryEvent(data)); } else { this->post(new CameraEvent(data)); } } else if(!this->isKilled()) { UINFO("no more images..."); this->kill(); this->post(new CameraEvent()); } }
int main(int ac, char *argv[]) { if (ac<2) { printf("\n Sensor Sim Illustration - by Kristof Van Laerhoven."); printf("\n syntax:"); printf("\n %s <nr>", argv[0]); printf("\n"); printf("\n <nr> is the number of sensors to simulate"); printf("\n try for instance '%s 5'.", argv[0]); printf("\n\n"); exit(0); } SensorData *s; // this is the only place where we treat the sensordata // as a specific sim parser object, elsewhere, we don't // care where the sensordata came from. Neat, isn't it? int num = atoi(argv[1]); s = new SimParser(num); char buffer[1024]; while(1) { if (s->read(buffer)) printf("msg: \"%s\"\n\r", buffer); } delete s; return 0; }
int main(void) { CHR_6dm device("/dev/ttyUSB0"); if (device.open(20) != CHR_OK) { fprintf(stderr, "Error Open"); return -1; } printf("Open complete\n"); SensorData sensor; sensor.enableChannel(az); sensor.enableChannel(ax); printf("Now set active channel\n"); if (device.setActiveChannels(&sensor) != CHR_OK) { fprintf(stderr, "Error Open"); return -1; } printf("Now goto measurement mode\n"); if (device.gotoMeasurementMode(&sensor, 300) != CHR_OK) { fprintf(stderr, "Error goto measurement mode\n"); return -1; } for (int i=0; ; i++) { device.getData(&sensor); /* printf("Yaw:%f Pitch:%f Roll:%f\n", sensor.datafield.yaw(), sensor.datafield.pitch(), sensor.datafield.roll()); */ printf("Acc_x:%3.4f Acc_y:%3.4f Acc_z:%3.4f\n", sensor.datafield.accel_x(), sensor.datafield.accel_y(), sensor.datafield.accel_z()); printf("Mag_x:%3.4f Mag_y:%3.4f Mag_z:%3.4f\n", sensor.datafield.mag_x(), sensor.datafield.mag_y(), sensor.datafield.mag_z()); printf("Gyro_x:%3.4f Gyro_y:%3.4f Gyro_z:%3.4f\n", sensor.datafield.gyro_x(), sensor.datafield.gyro_y(), sensor.datafield.gyro_z()); usleep(100000); } printf("Go to config mode again!\n"); if (device.gotoConfigMode() != CHR_OK) { fprintf(stderr, "Error goto config mode\n"); return -1; } return 0; }
void Compass::OnDataReceived(SensorType sensorType, SensorData& sensorData, result r) { sensorData.GetValue((SensorDataKey)MAGNETIC_DATA_KEY_TIMESTAMP, timestamp); sensorData.GetValue((SensorDataKey)MAGNETIC_DATA_KEY_X, x); sensorData.GetValue((SensorDataKey)MAGNETIC_DATA_KEY_Y, y); sensorData.GetValue((SensorDataKey)MAGNETIC_DATA_KEY_Z, z); AppLogDebug("x: %f, y: %f, z: %f timestamp: %d", x, y, z, timestamp); String res; res.Format(256, L"PhoneGap.callbacks['%S'].success({x:%f,y:%f,z:%f,timestamp:%d});", callbackId.GetPointer(), x, y, z, timestamp); pWeb->EvaluateJavascriptN(res); }
SensorData Camera::takeImage(CameraInfo * info) { bool warnFrameRateTooHigh = false; float actualFrameRate = 0; if(_imageRate>0) { int sleepTime = (1000.0f/_imageRate - 1000.0f*_frameRateTimer->getElapsedTime()); if(sleepTime > 2) { uSleep(sleepTime-2); } else if(sleepTime < 0) { warnFrameRateTooHigh = true; actualFrameRate = 1.0/(_frameRateTimer->getElapsedTime()); } // Add precision at the cost of a small overhead while(_frameRateTimer->getElapsedTime() < 1.0/double(_imageRate)-0.000001) { // } double slept = _frameRateTimer->getElapsedTime(); _frameRateTimer->start(); UDEBUG("slept=%fs vs target=%fs", slept, 1.0/double(_imageRate)); } UTimer timer; SensorData data = this->captureImage(info); double captureTime = timer.ticks(); if(warnFrameRateTooHigh) { UWARN("Camera: Cannot reach target image rate %f Hz, current rate is %f Hz and capture time = %f s.", _imageRate, actualFrameRate, captureTime); } else { UDEBUG("Time capturing image = %fs", captureTime); } if(info) { info->id = data.id(); info->stamp = data.stamp(); info->timeCapture = captureTime; } return data; }
Signature::Signature(const SensorData & data) : _id(data.id()), _mapId(-1), _stamp(data.stamp()), _weight(0), _label(""), _saved(false), _modified(true), _linksModified(true), _enabled(false), _pose(Transform::getIdentity()), _groundTruthPose(data.groundTruth()), _sensorData(data) { }
Transform Odometry::process(const SensorData & data, OdometryInfo * info) { if(_pose.isNull()) { _pose.setIdentity(); // initialized } UASSERT(!data.image().empty()); if(dynamic_cast<OdometryMono*>(this) == 0) { UASSERT(!data.depthOrRightImage().empty()); } if(data.fx() <= 0 || data.fyOrBaseline() <= 0) { UERROR("Rectified images required! Calibrate your camera. (fx=%f, fy/baseline=%f, cx=%f, cy=%f)", data.fx(), data.fyOrBaseline(), data.cx(), data.cy()); return Transform(); } UTimer time; Transform t = this->computeTransform(data, info); if(info) { info->time = time.elapsed(); info->lost = t.isNull(); } if(!t.isNull()) { _resetCurrentCount = _resetCountdown; if(_force2D) { float x,y,z, roll,pitch,yaw; t.getTranslationAndEulerAngles(x, y, z, roll, pitch, yaw); t = Transform(x,y,0, 0,0,yaw); } return _pose *= t; // updated } else if(_resetCurrentCount > 0) { UWARN("Odometry lost! Odometry will be reset after next %d consecutive unsuccessful odometry updates...", _resetCurrentCount); --_resetCurrentCount; if(_resetCurrentCount == 0) { UWARN("Odometry automatically reset to latest pose!"); this->reset(_pose); } } return Transform(); }
void NotifySensorChange(const SensorData &aSensorData) { SensorObserverList &observers = GetSensorObservers(aSensorData.sensor()); AssertMainThread(); observers.Broadcast(aSensorData); }
void Accelerometer::OnDataReceived(SensorType sensorType, SensorData& sensorData, result r) { sensorData.GetValue((SensorDataKey)ACCELERATION_DATA_KEY_TIMESTAMP, timestamp); sensorData.GetValue((SensorDataKey)ACCELERATION_DATA_KEY_X, x); sensorData.GetValue((SensorDataKey)ACCELERATION_DATA_KEY_Y, y); sensorData.GetValue((SensorDataKey)ACCELERATION_DATA_KEY_Z, z); AppLogDebug("x: %f, y: %f, z: %f timestamp: %d", x, y, z, timestamp); String res; res.Format(256, L"PhoneGap.callbacks['%S'].success({x:%f,y:%f,z:%f,timestamp:%d});", callbackId.GetPointer(), x, y, z, timestamp); pWeb->EvaluateJavascriptN(res); res.Clear(); res.Format(256, L"navigator.accelerometer.lastAcceleration = new Acceleration(%f,%f,%f,%d});", x, y, z, timestamp); pWeb->EvaluateJavascriptN(res); }
void mapDataReceivedCallback(const rtabmap_ros::MapDataConstPtr & msg) { UTimer timer; std::map<int, Transform> poses; std::multimap<int, Link> constraints; Transform mapOdom; rtabmap_ros::mapGraphFromROS(msg->graph, poses, constraints, mapOdom); for(unsigned int i=0; i<msg->nodes.size(); ++i) { if(msg->nodes[i].image.size() || msg->nodes[i].depth.size() || msg->nodes[i].laserScan.size()) { uInsert(nodes_, std::make_pair(msg->nodes[i].id, rtabmap_ros::nodeDataFromROS(msg->nodes[i]))); } } // create a tmp signature with latest sensory data if(poses.size() && nodes_.find(poses.rbegin()->first) != nodes_.end()) { Signature tmpS = nodes_.at(poses.rbegin()->first); SensorData tmpData = tmpS.sensorData(); tmpData.setId(-1); uInsert(nodes_, std::make_pair(-1, Signature(-1, -1, 0, tmpS.getStamp(), "", tmpS.getPose(), Transform(), tmpData))); poses.insert(std::make_pair(-1, poses.rbegin()->second)); } // Update maps poses = mapsManager_.updateMapCaches( poses, 0, false, false, false, false, false, nodes_); mapsManager_.publishMaps(poses, msg->header.stamp, msg->header.frame_id); ROS_INFO("map_assembler: Publishing data = %fs", timer.ticks()); }
void PlatformSearch::wallFollow(SensorData data) { if (data.getDistanceB() < 15) { driveA->stop(); driveB->stop(); while (data.getDistanceB() < 30) { driveB->A->setSpeed(driveB->shield, 0.2); driveB->B->setSpeed(driveB->shield, -0.2); std::cout << "B" << std::endl; } } else if (data.getDistanceA() > 80) { driveA->A->setSpeed(driveB->shield, 0.2); driveA->B->setSpeed(driveB->shield, -0.2); usleep(300000); } else { driveA->drive(15, data.getDistanceA(), 0.2); std::cout << "A" << std::endl; usleep(100000); } }
//============================================================ // MAIN LOOP //============================================================ void RtabmapThread::process() { SensorData data; getData(data); if(data.isValid()) { if(_rtabmap->getMemory()) { if(_rtabmap->process(data)) { Statistics stats = _rtabmap->getStatistics(); stats.addStatistic(Statistics::kMemoryImages_buffered(), (float)_dataBuffer.size()); ULOGGER_DEBUG("posting statistics_ event..."); this->post(new RtabmapEvent(stats)); } } else { UERROR("RTAB-Map is not initialized! Ignoring received data..."); } } }
void CameraThread::mainLoop() { UTimer totalTime; UDEBUG(""); CameraInfo info; SensorData data = _camera->takeImage(&info); if(!data.imageRaw().empty() || (dynamic_cast<DBReader*>(_camera) != 0 && data.id()>0)) // intermediate nodes could not have image set { postUpdate(&data, &info); info.cameraName = _camera->getSerial(); info.timeTotal = totalTime.ticks(); this->post(new CameraEvent(data, info)); } else if(!this->isKilled()) { UWARN("no more images..."); this->kill(); this->post(new CameraEvent()); } }
NS_IMETHOD Run() { SensorData sensorData; sensorData.sensor() = static_cast<SensorType>(mType); sensorData.timestamp() = PR_Now(); sensorData.values().AppendElement(0.5f); sensorData.values().AppendElement(0.5f); sensorData.values().AppendElement(0.5f); sensorData.values().AppendElement(0.5f); sensorData.accuracy() = SENSOR_ACCURACY_UNRELIABLE; mTarget->Notify(sensorData); return NS_OK; }
void RtabmapThread::addData(const SensorData & sensorData) { if(!_paused) { if(!sensorData.isValid()) { ULOGGER_ERROR("data not valid !?"); return; } if(_rate>0.0f) { if(_frameRateTimer->getElapsedTime() < 1.0f/_rate) { return; } } _frameRateTimer->start(); bool notify = true; _dataMutex.lock(); { _dataBuffer.push_back(sensorData); while(_dataBufferMaxSize > 0 && _dataBuffer.size() > (unsigned int)_dataBufferMaxSize) { ULOGGER_WARN("Data buffer is full, the oldest data is removed to add the new one."); _dataBuffer.pop_front(); notify = false; } } _dataMutex.unlock(); if(notify) { _dataAdded.release(); } } }
//-------------------------------------------------------------- void testApp::onNewSensorData(SensorData & s){ cout << s.toString() << endl; if (bUseSensors){ float speed = 1.0*s.vitesse; float filterCoefSpeed = 0.00; wind_speed = speed * (1- filterCoefSpeed) + filterCoefSpeed*wind_speed; float direction; direction = 270 + s.direction; if (direction > 360) direction -= 360; if (wind.y <90 && direction > 270){ direction -=360; } if (wind.y >270 && direction < 90){ direction +=360; } float filterCoef = 0.90; wind.y = direction * (1- filterCoef) + filterCoef*wind.y; if (wind.y > 360) wind.y -= 360; temperature = s.temperature; pyranometre = s.pyranometre; } }
void OdometryThread::addData(const SensorData & data) { if(dynamic_cast<OdometryMono*>(_odometry) == 0 && dynamic_cast<OdometryF2M*>(_odometry) == 0) { if(data.imageRaw().empty() || data.depthOrRightRaw().empty() || (data.cameraModels().size()==0 && !data.stereoCameraModel().isValidForProjection())) { ULOGGER_ERROR("Missing some information (images empty or missing calibration)!?"); return; } } else { // Mono and BOW can accept RGB only if(data.imageRaw().empty() || (data.cameraModels().size()==0 && !data.stereoCameraModel().isValidForProjection())) { ULOGGER_ERROR("Missing some information (image empty or missing calibration)!?"); return; } } bool notify = true; _dataMutex.lock(); { _dataBuffer.push_back(data); while(_dataBufferMaxSize > 0 && _dataBuffer.size() > _dataBufferMaxSize) { UDEBUG("Data buffer is full, the oldest data is removed to add the new one."); _dataBuffer.pop_front(); notify = false; } } _dataMutex.unlock(); if(notify) { _dataAdded.release(); } }
SensorData CzmlReader::writeToSensorData(QJsonObject& data) { QJsonArray positionArray; QJsonObject positionObject; SensorData sensordata; QString id = data["id"].toString(); sensordata.setId(getIdFromCzmlString(id)); positionObject = data["position"].toObject(); positionArray = positionObject["cartographicDegrees"].toArray(); QGeoCoordinate parsedPosition; parsedPosition.setLatitude(positionArray[1].toDouble()); parsedPosition.setLongitude(positionArray[0].toDouble()); sensordata.setPosition(parsedPosition); sensordata.setHeight(positionArray[2].toDouble()); if(data["sensorvalue"].isString()) sensordata.setSensorValue(std::numeric_limits<double>::min()); else sensordata.setSensorValue(data["sensorvalue"].toDouble()); return sensordata; }
// return not null transform if odometry is correctly computed Transform OdometryDVO::computeTransform( SensorData & data, const Transform & guess, OdometryInfo * info) { Transform t; #ifdef RTABMAP_DVO UTimer timer; if(data.imageRaw().empty() || data.imageRaw().rows != data.depthOrRightRaw().rows || data.imageRaw().cols != data.depthOrRightRaw().cols) { UERROR("Not supported input!"); return t; } if(!(data.cameraModels().size() == 1 && data.cameraModels()[0].isValidForReprojection())) { UERROR("Invalid camera model! Only single RGB-D camera supported by DVO. Try another odometry approach."); return t; } if(dvo_ == 0) { dvo::DenseTracker::Config cfg = dvo::DenseTracker::getDefaultConfig(); dvo_ = new dvo::DenseTracker(cfg); } cv::Mat grey, grey_s16, depth_inpainted, depth_mask, depth_mono, depth_float; if(data.imageRaw().type() != CV_32FC1) { if(data.imageRaw().type() == CV_8UC3) { cv::cvtColor(data.imageRaw(), grey, CV_BGR2GRAY); } else { grey = data.imageRaw(); } grey.convertTo(grey_s16, CV_32F); } else { grey_s16 = data.imageRaw(); } // make sure all zeros are NAN if(data.depthRaw().type() == CV_32FC1) { depth_float = data.depthRaw(); for(int i=0; i<depth_float.rows; ++i) { for(int j=0; j<depth_float.cols; ++j) { float & d = depth_float.at<float>(i,j); if(d == 0.0f) { d = NAN; } } } } else if(data.depthRaw().type() == CV_16UC1) { depth_float = cv::Mat(data.depthRaw().size(), CV_32FC1); for(int i=0; i<data.depthRaw().rows; ++i) { for(int j=0; j<data.depthRaw().cols; ++j) { float d = float(data.depthRaw().at<unsigned short>(i,j))/1000.0f; depth_float.at<float>(i, j) = d==0.0f?NAN:d; } } } else { UFATAL("Unknown depth format!"); } if(camera_ == 0) { dvo::core::IntrinsicMatrix intrinsics = dvo::core::IntrinsicMatrix::create( data.cameraModels()[0].fx(), data.cameraModels()[0].fy(), data.cameraModels()[0].cx(), data.cameraModels()[0].cy()); camera_ = new dvo::core::RgbdCameraPyramid( data.cameraModels()[0].imageWidth(), data.cameraModels()[0].imageHeight(), intrinsics); } dvo::core::RgbdImagePyramid * current = new dvo::core::RgbdImagePyramid(*camera_, grey_s16, depth_float); cv::Mat covariance; if(reference_ == 0) { reference_ = current; if(!lost_) { t.setIdentity(); } covariance = cv::Mat::eye(6,6,CV_64FC1) * 9999.0; } else { dvo::DenseTracker::Result result; dvo_->match(*reference_, *current, result); t = Transform::fromEigen3d(result.Transformation); if(result.Information(0,0) > 0.0 && result.Information(0,0) != 1.0) { lost_ = false; cv::Mat information = cv::Mat::eye(6,6, CV_64FC1); memcpy(information.data, result.Information.data(), 36*sizeof(double)); covariance = information.inv(); covariance *= 100.0; // to be in the same scale than loop closure detection Transform currentMotion = t; t = motionFromKeyFrame_.inverse() * t; // TODO make parameters? if(currentMotion.getNorm() > 0.01 || currentMotion.getAngle() > 0.01) { if(info) { info->keyFrameAdded = true; } // new keyframe delete reference_; reference_ = current; motionFromKeyFrame_.setIdentity(); } else { delete current; motionFromKeyFrame_ = currentMotion; } } else { lost_ = true; delete reference_; delete current; reference_ = 0; // this will make restart from the next frame motionFromKeyFrame_.setIdentity(); t.setNull(); covariance = cv::Mat::eye(6,6,CV_64FC1) * 9999.0; UWARN("dvo failed to estimate motion, tracking will be reinitialized on next frame."); } } const Transform & localTransform = data.cameraModels()[0].localTransform(); if(!t.isNull() && !t.isIdentity() && !localTransform.isIdentity() && !localTransform.isNull()) { // from camera frame to base frame t = localTransform * t * localTransform.inverse(); } if(info) { info->type = (int)kTypeDVO; info->covariance = covariance; } UINFO("Odom update time = %fs", timer.elapsed()); #else UERROR("RTAB-Map is not built with DVO support! Select another visual odometry approach."); #endif return t; }
void CameraThread::mainLoop() { UTimer timer; UDEBUG(""); SensorData data = _camera->takeImage(); if(!data.imageRaw().empty()) { if(_colorOnly && !data.depthRaw().empty()) { data.setDepthOrRightRaw(cv::Mat()); } if(_mirroring && data.cameraModels().size() == 1) { cv::Mat tmpRgb; cv::flip(data.imageRaw(), tmpRgb, 1); data.setImageRaw(tmpRgb); if(data.cameraModels()[0].cx()) { CameraModel tmpModel( data.cameraModels()[0].fx(), data.cameraModels()[0].fy(), float(data.imageRaw().cols) - data.cameraModels()[0].cx(), data.cameraModels()[0].cy(), data.cameraModels()[0].localTransform()); data.setCameraModel(tmpModel); } if(!data.depthRaw().empty()) { cv::Mat tmpDepth; cv::flip(data.depthRaw(), tmpDepth, 1); data.setDepthOrRightRaw(tmpDepth); } } if(_stereoToDepth && data.stereoCameraModel().isValid() && !data.rightRaw().empty()) { cv::Mat depth = util2d::depthFromDisparity( util2d::disparityFromStereoImages(data.imageRaw(), data.rightRaw()), data.stereoCameraModel().left().fx(), data.stereoCameraModel().baseline()); data.setCameraModel(data.stereoCameraModel().left()); data.setDepthOrRightRaw(depth); data.setStereoCameraModel(StereoCameraModel()); } this->post(new CameraEvent(data, _camera->getSerial())); } else if(!this->isKilled()) { UWARN("no more images..."); this->kill(); this->post(new CameraEvent()); } }
Transform Odometry::process(SensorData & data, OdometryInfo * info) { if(_pose.isNull()) { _pose.setIdentity(); // initialized } UASSERT(!data.imageRaw().empty()); if(!data.stereoCameraModel().isValidForProjection() && (data.cameraModels().size() == 0 || !data.cameraModels()[0].isValidForProjection())) { UERROR("Rectified images required! Calibrate your camera."); return Transform(); } double dt = previousStamp_>0.0f?data.stamp() - previousStamp_:0.0; Transform guess = dt && guessFromMotion_?Transform::getIdentity():Transform(); UASSERT_MSG(dt>0.0 || (dt == 0.0 && previousVelocityTransform_.isNull()), uFormat("dt=%f previous transform=%s", dt, previousVelocityTransform_.prettyPrint().c_str()).c_str()); if(!previousVelocityTransform_.isNull()) { if(guessFromMotion_) { if(_filteringStrategy == 1) { // use Kalman predict transform float vx,vy,vz, vroll,vpitch,vyaw; predictKalmanFilter(dt, &vx,&vy,&vz,&vroll,&vpitch,&vyaw); guess = Transform(vx*dt, vy*dt, vz*dt, vroll*dt, vpitch*dt, vyaw*dt); } else { float vx,vy,vz, vroll,vpitch,vyaw; previousVelocityTransform_.getTranslationAndEulerAngles(vx,vy,vz, vroll,vpitch,vyaw); guess = Transform(vx*dt, vy*dt, vz*dt, vroll*dt, vpitch*dt, vyaw*dt); } } else if(_filteringStrategy == 1) { predictKalmanFilter(dt); } } UTimer time; Transform t; if(_imageDecimation > 1) { // Decimation of images with calibrations SensorData decimatedData = data; decimatedData.setImageRaw(util2d::decimate(decimatedData.imageRaw(), _imageDecimation)); decimatedData.setDepthOrRightRaw(util2d::decimate(decimatedData.depthOrRightRaw(), _imageDecimation)); std::vector<CameraModel> cameraModels = decimatedData.cameraModels(); for(unsigned int i=0; i<cameraModels.size(); ++i) { cameraModels[i] = cameraModels[i].scaled(1.0/double(_imageDecimation)); } decimatedData.setCameraModels(cameraModels); StereoCameraModel stereoModel = decimatedData.stereoCameraModel(); if(stereoModel.isValidForProjection()) { stereoModel.scale(1.0/double(_imageDecimation)); } decimatedData.setStereoCameraModel(stereoModel); // compute transform t = this->computeTransform(decimatedData, guess, info); // transform back the keypoints in the original image std::vector<cv::KeyPoint> kpts = decimatedData.keypoints(); double log2value = log(double(_imageDecimation))/log(2.0); for(unsigned int i=0; i<kpts.size(); ++i) { kpts[i].pt.x *= _imageDecimation; kpts[i].pt.y *= _imageDecimation; kpts[i].size *= _imageDecimation; kpts[i].octave += log2value; } data.setFeatures(kpts, decimatedData.descriptors()); if(info) { UASSERT(info->newCorners.size() == info->refCorners.size()); for(unsigned int i=0; i<info->newCorners.size(); ++i) { info->refCorners[i].x *= _imageDecimation; info->refCorners[i].y *= _imageDecimation; info->newCorners[i].x *= _imageDecimation; info->newCorners[i].y *= _imageDecimation; } for(std::multimap<int, cv::KeyPoint>::iterator iter=info->words.begin(); iter!=info->words.end(); ++iter) { iter->second.pt.x *= _imageDecimation; iter->second.pt.y *= _imageDecimation; iter->second.size *= _imageDecimation; iter->second.octave += log2value; } } } else { t = this->computeTransform(data, guess, info); } if(info) { info->timeEstimation = time.ticks(); info->lost = t.isNull(); info->stamp = data.stamp(); info->interval = dt; info->transform = t; if(!data.groundTruth().isNull()) { if(!previousGroundTruthPose_.isNull()) { info->transformGroundTruth = previousGroundTruthPose_.inverse() * data.groundTruth(); } previousGroundTruthPose_ = data.groundTruth(); } } if(!t.isNull()) { _resetCurrentCount = _resetCountdown; float vx,vy,vz, vroll,vpitch,vyaw; t.getTranslationAndEulerAngles(vx,vy,vz, vroll,vpitch,vyaw); // transform to velocity if(dt) { vx /= dt; vy /= dt; vz /= dt; vroll /= dt; vpitch /= dt; vyaw /= dt; } if(_force3DoF || !_holonomic || particleFilters_.size() || _filteringStrategy==1) { if(_filteringStrategy == 1) { if(previousVelocityTransform_.isNull()) { // reset Kalman if(dt) { initKalmanFilter(t, vx,vy,vz,vroll,vpitch,vyaw); } else { initKalmanFilter(t); } } else { // Kalman filtering updateKalmanFilter(vx,vy,vz,vroll,vpitch,vyaw); } } else if(particleFilters_.size()) { // Particle filtering UASSERT(particleFilters_.size()==6); if(previousVelocityTransform_.isNull()) { particleFilters_[0]->init(vx); particleFilters_[1]->init(vy); particleFilters_[2]->init(vz); particleFilters_[3]->init(vroll); particleFilters_[4]->init(vpitch); particleFilters_[5]->init(vyaw); } else { vx = particleFilters_[0]->filter(vx); vy = particleFilters_[1]->filter(vy); vyaw = particleFilters_[5]->filter(vyaw); if(!_holonomic) { // arc trajectory around ICR float tmpY = vyaw!=0.0f ? vx / tan((CV_PI-vyaw)/2.0f) : 0.0f; if(fabs(tmpY) < fabs(vy) || (tmpY<=0 && vy >=0) || (tmpY>=0 && vy<=0)) { vy = tmpY; } else { vyaw = (atan(vx/vy)*2.0f-CV_PI)*-1; } } if(!_force3DoF) { vz = particleFilters_[2]->filter(vz); vroll = particleFilters_[3]->filter(vroll); vpitch = particleFilters_[4]->filter(vpitch); } } if(info) { info->timeParticleFiltering = time.ticks(); } if(_force3DoF) { vz = 0.0f; vroll = 0.0f; vpitch = 0.0f; } } else if(!_holonomic) { // arc trajectory around ICR vy = vyaw!=0.0f ? vx / tan((CV_PI-vyaw)/2.0f) : 0.0f; if(_force3DoF) { vz = 0.0f; vroll = 0.0f; vpitch = 0.0f; } } if(dt) { t = Transform(vx*dt, vy*dt, vz*dt, vroll*dt, vpitch*dt, vyaw*dt); } else { t = Transform(vx, vy, vz, vroll, vpitch, vyaw); } if(info) { info->transformFiltered = t; } } previousStamp_ = data.stamp(); previousVelocityTransform_.setNull(); if(dt) { previousVelocityTransform_ = Transform(vx, vy, vz, vroll, vpitch, vyaw); } if(info) { distanceTravelled_ += t.getNorm(); info->distanceTravelled = distanceTravelled_; } return _pose *= t; // updated } else if(_resetCurrentCount > 0) { UWARN("Odometry lost! Odometry will be reset after next %d consecutive unsuccessful odometry updates...", _resetCurrentCount); --_resetCurrentCount; if(_resetCurrentCount == 0) { UWARN("Odometry automatically reset to latest pose!"); this->reset(_pose); } } previousVelocityTransform_.setNull(); previousStamp_ = 0; return Transform(); }
std::vector<CorrelatedData> MeanShiftCorrelation::correlate(std::vector<SensorData> planes) { std::vector<CorrelatedData> correlatedPlanes; std::vector<double> data; bool correlationFailed = false; if(planes.size() == 0) { return correlatedPlanes; } //Otherwise try and cluster for(int i = 0 ; i < planes.size(); i++){ SensorData plane = planes[i]; if(this->currFlags & COR_POS) { data.push_back(plane.getPurePosition().x); data.push_back(plane.getPurePosition().y); data.push_back(plane.getPurePosition().z / FEET_TO_NAUT_MILES); } if(this->currFlags & COR_VEL) { data.push_back(plane.getVelocity().x / FEET_TO_NAUT_MILES); data.push_back(plane.getVelocity().y / FEET_TO_NAUT_MILES); data.push_back(plane.getVelocity().z / FEET_TO_NAUT_MILES); } } //Data is constructed, set dimensionality. int dimensionality = 0; if(this->currFlags & COR_POS) { dimensionality += 3; } if(this->currFlags & COR_VEL) { dimensionality += 3; } arma::Col<size_t> assignments; arma::mat centroids; meanshift->setWindow(10); auto dataMtx = arma::Mat<double>(&data[0],dimensionality,planes.size()); //dataMtx = dataMtx.; //Transpose data matrix //Radius test //try //{ meanshift->Cluster(dataMtx, assignments, centroids); // } /* * MeanShift has trouble processing data sets. For now, we'll return a blank correlation.. */ /** * If correlation fails, use the assignments from the previous run and vector summations to create new planes. */ if(correlationFailed && prevAssignments.size() > 0) { correlatedPlanes.resize(planes.size()); /* Sum correlations */ for(int i = 0; i < planes.size(); i++) { correlatedPlanes[prevAssignments.at(i)].addPosition(planes[i].getPurePosition()); correlatedPlanes[prevAssignments.at(i)].addSensor(planes[i].getSensor(),planes[i].getPlaneTag(), planes[i] .getTailNumber(), planes[i].getVelocity(), planes[i].getPurePosition()); } /*Trim planes that didn't actually correlate*/ correlatedPlanes.erase(std::remove_if(correlatedPlanes.begin(), correlatedPlanes.end(), [](CorrelatedData & data){return data.getSensor().size() == 0;}), correlatedPlanes.end()); //For the remainder of planes, average for(auto cPlane : correlatedPlanes) { Vector3d velocity = cPlane.getVelocity(); Vector3d position = cPlane.getPosition(); position.x /= cPlane.getSensor().size(); position.y /= cPlane.getSensor().size(); position.z /= cPlane.getSensor().size(); cPlane.setPosition(position); } return correlatedPlanes; } else if(correlationFailed) { return correlatedPlanes; } else { correlatedPlanes.reserve(centroids.n_cols); for (int i = 0; i < centroids.n_cols; i++) { correlatedPlanes.push_back(CorrelatedData(centroids.at(0, i), centroids.at(1, i), centroids.at(2, i), 0, 0, 0)); } for (int i = 0; i < planes.size(); i++) { correlatedPlanes[assignments.at(i)].addSensor(planes[i].getSensor(), planes[i].getPlaneTag(), planes[i] .getTailNumber(), planes[i].getVelocity(), planes[i].getPurePosition()); } prevAssignments = assignments; } return correlatedPlanes; }
TEST(TestLibAnchialeSensors, TestSensorData) { SensorData sd; sd.SetIdent( "abcde" ); STRCMP_EQUAL( "abcde", sd.GetIdent().c_str() ); sd.SetIdent( "fghij" ); STRCMP_EQUAL( "fghij", sd.GetIdent().c_str() ); sd.SetDeviceType( 9 ); CHECK( 9 == sd.GetDeviceType() ); sd.SetDeviceType( 5 ); CHECK( 5 == sd.GetDeviceType() ); sd.SetDataType( 8 ); CHECK( 8 == sd.GetDataType() ); sd.SetDataType( 2 ); CHECK( 2 == sd.GetDataType() ); sd.SetTemperature( 20.5 ); CHECK( 20.5 == sd.GetTemperature() ); sd.SetTemperature( 19.5 ); CHECK( 19.5 == sd.GetTemperature() ); }
int main(int argc, char * argv[]) { ULogger::setType(ULogger::kTypeConsole); ULogger::setLevel(ULogger::kError); if(argc < 8) { showUsage(); } int argIndex = 1; int cameraRate = atoi(argv[argIndex++]); if(cameraRate <= 0) { printf("camera_rate should be > 0\n"); showUsage(); } int odomUpdate = atoi(argv[argIndex++]); if(odomUpdate <= 0) { printf("odom_update should be > 0\n"); showUsage(); } int mapUpdate = atoi(argv[argIndex++]); if(mapUpdate <= 0) { printf("map_update should be > 0\n"); showUsage(); } printf("Camera rate = %d Hz\n", cameraRate); printf("Odometry update rate = %d Hz\n", cameraRate/odomUpdate); printf("Map update rate = %d Hz\n", (cameraRate/odomUpdate)/mapUpdate); std::string calibrationDir = argv[argIndex++]; std::string calibrationName = argv[argIndex++]; std::string pathLeftImages = argv[argIndex++]; std::string pathRightImages = argv[argIndex++]; Transform opticalRotation(0,0,1,0, -1,0,0,0, 0,-1,0,0); CameraStereoImages camera( pathLeftImages, pathRightImages, false, // assume that images are already rectified (float)cameraRate, opticalRotation); if(camera.init(calibrationDir, calibrationName)) { OdometryF2M odom; Rtabmap rtabmap; rtabmap.init(); QApplication app(argc, argv); MapBuilder mapBuilder; mapBuilder.show(); QApplication::processEvents(); SensorData data = camera.takeImage(); int cameraIteration = 0; int odometryIteration = 0; printf("Press \"Space\" in the window to pause\n"); while(data.isValid() && mapBuilder.isVisible()) { if(cameraIteration++ % odomUpdate == 0) { OdometryInfo info; Transform pose = odom.process(data, &info); if(odometryIteration++ % mapUpdate == 0) { if(rtabmap.process(data, pose)) { mapBuilder.processStatistics(rtabmap.getStatistics()); if(rtabmap.getLoopClosureId() > 0) { printf("Loop closure detected!\n"); } } } mapBuilder.processOdometry(data, pose, info); } QApplication::processEvents(); while(mapBuilder.isPaused() && mapBuilder.isVisible()) { uSleep(100); QApplication::processEvents(); } data = camera.takeImage(); } if(mapBuilder.isVisible()) { printf("Processed all frames\n"); app.exec(); } } else { UERROR("Camera init failed!"); } return 0; }
// return not null transform if odometry is correctly computed Transform OdometryF2F::computeTransform( SensorData & data, const Transform & guess, OdometryInfo * info) { UTimer timer; Transform output; if(!data.rightRaw().empty() && !data.stereoCameraModel().isValidForProjection()) { UERROR("Calibrated stereo camera required"); return output; } if(!data.depthRaw().empty() && (data.cameraModels().size() != 1 || !data.cameraModels()[0].isValidForProjection())) { UERROR("Calibrated camera required (multi-cameras not supported)."); return output; } RegistrationInfo regInfo; UASSERT(!this->getPose().isNull()); if(lastKeyFramePose_.isNull()) { lastKeyFramePose_ = this->getPose(); // reset to current pose } Transform motionSinceLastKeyFrame = lastKeyFramePose_.inverse()*this->getPose(); Signature newFrame(data); if(refFrame_.sensorData().isValid()) { Signature tmpRefFrame = refFrame_; output = registrationPipeline_->computeTransformationMod( tmpRefFrame, newFrame, !guess.isNull()?motionSinceLastKeyFrame*guess:Transform(), ®Info); if(info && this->isInfoDataFilled()) { std::list<std::pair<int, std::pair<cv::KeyPoint, cv::KeyPoint> > > pairs; EpipolarGeometry::findPairsUnique(tmpRefFrame.getWords(), newFrame.getWords(), pairs); info->refCorners.resize(pairs.size()); info->newCorners.resize(pairs.size()); std::map<int, int> idToIndex; int i=0; for(std::list<std::pair<int, std::pair<cv::KeyPoint, cv::KeyPoint> > >::iterator iter=pairs.begin(); iter!=pairs.end(); ++iter) { info->refCorners[i] = iter->second.first.pt; info->newCorners[i] = iter->second.second.pt; idToIndex.insert(std::make_pair(iter->first, i)); ++i; } info->cornerInliers.resize(regInfo.inliersIDs.size(), 1); i=0; for(; i<(int)regInfo.inliersIDs.size(); ++i) { info->cornerInliers[i] = idToIndex.at(regInfo.inliersIDs[i]); } Transform t = this->getPose()*motionSinceLastKeyFrame.inverse(); for(std::multimap<int, cv::Point3f>::const_iterator iter=tmpRefFrame.getWords3().begin(); iter!=tmpRefFrame.getWords3().end(); ++iter) { info->localMap.insert(std::make_pair(iter->first, util3d::transformPoint(iter->second, t))); } info->words = newFrame.getWords(); } } else { //return Identity output = Transform::getIdentity(); // a very high variance tells that the new pose is not linked with the previous one regInfo.variance = 9999; } if(!output.isNull()) { output = motionSinceLastKeyFrame.inverse() * output; // new key-frame? if( (registrationPipeline_->isImageRequired() && (keyFrameThr_ == 0 || float(regInfo.inliers) <= keyFrameThr_*float(refFrame_.sensorData().keypoints().size()))) || (registrationPipeline_->isScanRequired() && (scanKeyFrameThr_ == 0 || regInfo.icpInliersRatio <= scanKeyFrameThr_))) { UDEBUG("Update key frame"); int features = newFrame.getWordsDescriptors().size(); if(features == 0) { newFrame = Signature(data); // this will generate features only for the first frame or if optical flow was used (no 3d words) Signature dummy; registrationPipeline_->computeTransformationMod( newFrame, dummy); features = (int)newFrame.sensorData().keypoints().size(); } if((features >= registrationPipeline_->getMinVisualCorrespondences()) && (registrationPipeline_->getMinGeometryCorrespondencesRatio()==0.0f || (newFrame.sensorData().laserScanRaw().cols && (newFrame.sensorData().laserScanMaxPts() == 0 || float(newFrame.sensorData().laserScanRaw().cols)/float(newFrame.sensorData().laserScanMaxPts())>=registrationPipeline_->getMinGeometryCorrespondencesRatio())))) { refFrame_ = newFrame; refFrame_.setWords(std::multimap<int, cv::KeyPoint>()); refFrame_.setWords3(std::multimap<int, cv::Point3f>()); refFrame_.setWordsDescriptors(std::multimap<int, cv::Mat>()); //reset motion lastKeyFramePose_.setNull(); } else { if (!refFrame_.sensorData().isValid()) { // Don't send odometry if we don't have a keyframe yet output.setNull(); } if(features < registrationPipeline_->getMinVisualCorrespondences()) { UWARN("Too low 2D features (%d), keeping last key frame...", features); } if(registrationPipeline_->getMinGeometryCorrespondencesRatio()>0.0f && newFrame.sensorData().laserScanRaw().cols==0) { UWARN("Too low scan points (%d), keeping last key frame...", newFrame.sensorData().laserScanRaw().cols); } else if(registrationPipeline_->getMinGeometryCorrespondencesRatio()>0.0f && newFrame.sensorData().laserScanMaxPts() != 0 && float(newFrame.sensorData().laserScanRaw().cols)/float(newFrame.sensorData().laserScanMaxPts())<registrationPipeline_->getMinGeometryCorrespondencesRatio()) { UWARN("Too low scan points ratio (%d < %d), keeping last key frame...", float(newFrame.sensorData().laserScanRaw().cols)/float(newFrame.sensorData().laserScanMaxPts()), registrationPipeline_->getMinGeometryCorrespondencesRatio()); } } } } else if(!regInfo.rejectedMsg.empty()) { UWARN("Registration failed: \"%s\"", regInfo.rejectedMsg.c_str()); } data.setFeatures(newFrame.sensorData().keypoints(), newFrame.sensorData().descriptors()); if(info) { info->type = 1; info->variance = regInfo.variance; info->inliers = regInfo.inliers; info->icpInliersRatio = regInfo.icpInliersRatio; info->matches = regInfo.matches; info->features = newFrame.sensorData().keypoints().size(); } UINFO("Odom update time = %fs lost=%s inliers=%d, ref frame corners=%d, transform accepted=%s", timer.elapsed(), output.isNull()?"true":"false", (int)regInfo.inliers, (int)newFrame.sensorData().keypoints().size(), !output.isNull()?"true":"false"); return output; }
int main(int argc, char * argv[]) { ULogger::setType(ULogger::kTypeConsole); ULogger::setLevel(ULogger::kWarning); ParametersMap pm = Parameters::parseArguments(argc, argv); pm.insert(ParametersPair(Parameters::kRtabmapWorkingDirectory(), ".")); int argIndex = 1; int cameraRate = atoi(argv[argIndex++]); if(cameraRate <= 0) { printf("camera_rate should be > 0\n"); showUsage(); } int odomUpdate = atoi(argv[argIndex++]); if(odomUpdate <= 0) { printf("odom_update should be > 0\n"); showUsage(); } int mapUpdate = atoi(argv[argIndex++]); if(mapUpdate <= 0) { printf("map_update should be > 0\n"); showUsage(); } printf("Camera rate = %d Hz\n", cameraRate); printf("Odometry update rate = %d Hz\n", cameraRate/odomUpdate); printf("Map update rate = %d Hz\n", (cameraRate/odomUpdate)/mapUpdate); std::string calibrationDir = argv[argIndex++]; std::string calibrationName = argv[argIndex++]; std::string pathRGBImages = argv[argIndex++]; std::string pathDepthImages = argv[argIndex++]; Transform opticalRotation(0,0,1,0, -1,0,0,0, 0,-1,0,0); //CameraStereoImages camera( // pathLeftImages, // pathRightImages, // false, // assume that images are already rectified // (float)cameraRate, // opticalRotation); // CameraFreenect2 camera(0, CameraFreenect2::Type::kTypeColor2DepthSD, 0.0, opticalRotation); CameraRGBDImages camera(pathRGBImages, pathDepthImages, 1, float(cameraRate), opticalRotation); std::ofstream stats_file; stats_file.open("statistics.txt"); stats_file << "[" << std::endl; if(camera.init(calibrationDir, calibrationName)) { OdometryF2M odom; Rtabmap rtabmap; rtabmap.init(pm); QApplication app(argc, argv); MapBuilder mapBuilder; mapBuilder.show(); QApplication::processEvents(); SensorData data = camera.takeImage(); int cameraIteration = 0; int odometryIteration = 0; printf("Press \"Space\" in the window to pause\n"); while(data.isValid() && mapBuilder.isVisible()) { if(cameraIteration++ % odomUpdate == 0) { OdometryInfo info; Transform pose = odom.process(data, &info); if(odometryIteration++ % mapUpdate == 0) { if(rtabmap.process(data, pose)) { mapBuilder.processStatistics(rtabmap.getStatistics()); if(rtabmap.getLoopClosureId() > 0) { printf("Loop closure detected!\n"); } std::map<std::string, float> statz = rtabmap.getStatistics().data(); stats_file << " {" << std::endl; for(std::map<std::string,float>::iterator it = statz.begin(); it != statz.end(); ++it) { std::string name = it->first; std::replace(name.begin(), name.end(), '/', '.'); stats_file << " \"" << name << "\": " << it->second << "," << std::endl; } stats_file << " }," << std::endl; } } mapBuilder.processOdometry(data, pose, info); } QApplication::processEvents(); while(mapBuilder.isPaused() && mapBuilder.isVisible()) { uSleep(100); QApplication::processEvents(); } data = camera.takeImage(); } if(mapBuilder.isVisible()) { printf("Processed all frames\n"); app.exec(); } } else { UERROR("Camera init failed!"); } stats_file << "]" << std::endl; stats_file.close(); return 0; }
int main(int argc, char * argv[]) { signal(SIGABRT, &sighandler); signal(SIGTERM, &sighandler); signal(SIGINT, &sighandler); /*for(int i=0; i<argc; i++) { printf("argv[%d] = %s\n", i, argv[i]); }*/ const ParametersMap & defaultParameters = Parameters::getDefaultParameters(); if(argc < 2) { showUsage(); } else if(argc == 2 && strcmp(argv[1], "-v") == 0) { printf("%s\n", Rtabmap::getVersion().c_str()); exit(0); } else if(argc == 2 && strcmp(argv[1], "-default_params") == 0) { for(ParametersMap::const_iterator iter = defaultParameters.begin(); iter!=defaultParameters.end(); ++iter) { printf("%s=%s\n", iter->first.c_str(), iter->second.c_str()); } exit(0); } printf("\n"); std::string path; float rate = 0.0; int loopDataset = 0; int repeat = 0; bool createGT = false; std::string inputDbPath; int startAt = 1; ParametersMap pm; ULogger::Level logLevel = ULogger::kError; ULogger::Level exitLevel = ULogger::kFatal; bool logConsole = false; for(int i=1; i<argc; ++i) { if(i == argc-1) { // The last must be the path path = argv[i]; if(!UDirectory::exists(path.c_str()) && !UFile::exists(path.c_str())) { printf("Path not valid : %s\n", path.c_str()); showUsage(); exit(1); } break; } if(strcmp(argv[i], "-rate") == 0) { ++i; if(i < argc) { rate = uStr2Float(argv[i]); if(rate < 0) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-rateHz") == 0) { ++i; if(i < argc) { rate = uStr2Float(argv[i]); if(rate < 0) { showUsage(); } else if(rate) { rate = 1/rate; } } else { showUsage(); } continue; } if(strcmp(argv[i], "-repeat") == 0) { ++i; if(i < argc) { repeat = std::atoi(argv[i]); if(repeat < 1) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-start_at") == 0) { ++i; if(i < argc) { startAt = std::atoi(argv[i]); if(startAt < 0) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-createGT") == 0) { createGT = true; continue; } if(strcmp(argv[i], "-input") == 0) { ++i; if(i < argc) { inputDbPath = argv[i]; } else { showUsage(); } continue; } if(strcmp(argv[i], "-debug") == 0) { logLevel = ULogger::kDebug; continue; } if(strcmp(argv[i], "-info") == 0) { logLevel = ULogger::kInfo; continue; } if(strcmp(argv[i], "-warn") == 0) { logLevel = ULogger::kWarning; continue; } if(strcmp(argv[i], "-exit_warn") == 0) { exitLevel = ULogger::kWarning; continue; } if(strcmp(argv[i], "-exit_error") == 0) { exitLevel = ULogger::kError; continue; } if(strcmp(argv[i], "-log_console") == 0) { logConsole = true; continue; } // Check for RTAB-Map's parameters std::string key = argv[i]; key = uSplit(key, '-').back(); if(defaultParameters.find(key) != defaultParameters.end()) { ++i; if(i < argc) { std::string value = argv[i]; if(value.empty()) { showUsage(); } else { value = uReplaceChar(value, ',', ' '); } std::pair<ParametersMap::iterator, bool> inserted = pm.insert(ParametersPair(key, value)); if(inserted.second == false) { inserted.first->second = value; } } else { showUsage(); } continue; } printf("Unrecognized option : %s\n", argv[i]); showUsage(); } if(repeat && createGT) { printf("Cannot create a Ground truth if repeat is on.\n"); showUsage(); } if(logConsole) { ULogger::setType(ULogger::kTypeConsole); } //ULogger::setType(ULogger::kTypeFile, rtabmap.getWorkingDir()+"/LogConsole.txt", false); //ULogger::setBuffered(true); ULogger::setLevel(logLevel); ULogger::setExitLevel(exitLevel); UTimer timer; timer.start(); std::queue<double> iterationMeanTime; Camera * camera = 0; if(UDirectory::exists(path)) { camera = new CameraImages(path, startAt, false, false, false, rate>0.0f?1.0f/rate:0.0f); } else { camera = new CameraVideo(path, false, rate>0.0f?1.0f/rate:0.0f); } if(!camera || !camera->init()) { printf("Camera init failed, using path \"%s\"\n", path.c_str()); exit(1); } std::map<int, int> groundTruth; // Create tasks Rtabmap rtabmap; if(inputDbPath.empty()) { inputDbPath = "rtabmapconsole.db"; if(UFile::erase(inputDbPath) == 0) { printf("Deleted database \"%s\".\n", inputDbPath.c_str()); } } else { printf("Loading database \"%s\".\n", inputDbPath.c_str()); } // Disable statistics (we don't need them) pm.insert(ParametersPair(Parameters::kRtabmapPublishStats(), "false")); pm.insert(ParametersPair(Parameters::kRGBDEnabled(), "false")); rtabmap.init(pm, inputDbPath); printf("Avpd init time = %fs\n", timer.ticks()); // Start thread's task int loopClosureId; int count = 0; int countLoopDetected=0; printf("\nParameters : \n"); printf(" Data set : %s\n", path.c_str()); printf(" Time threshold = %1.2f ms\n", rtabmap.getTimeThreshold()); printf(" Image rate = %1.2f s (%1.2f Hz)\n", rate, 1/rate); printf(" Repeating data set = %s\n", repeat?"true":"false"); printf(" Camera starts at image %d (default 1)\n", startAt); if(createGT) { printf(" Creating the ground truth matrix.\n"); } printf(" INFO: All other parameters are set to defaults\n"); if(pm.size()>1) { printf(" Overwritten parameters :\n"); for(ParametersMap::iterator iter = pm.begin(); iter!=pm.end(); ++iter) { printf(" %s=%s\n",iter->first.c_str(), iter->second.c_str()); } } if(rtabmap.getWM().size() || rtabmap.getSTM().size()) { printf("[Warning] RTAB-Map database is not empty (%s)\n", inputDbPath.c_str()); } printf("\nProcessing images...\n"); UTimer iterationTimer; UTimer rtabmapTimer; int imagesProcessed = 0; std::list<std::vector<float> > teleopActions; while(loopDataset <= repeat && g_forever) { SensorData data = camera->takeImage(); int i=0; double maxIterationTime = 0.0; int maxIterationTimeId = 0; while(!data.imageRaw().empty() && g_forever) { ++imagesProcessed; iterationTimer.start(); rtabmapTimer.start(); rtabmap.process(data.imageRaw()); double rtabmapTime = rtabmapTimer.elapsed(); loopClosureId = rtabmap.getLoopClosureId(); if(rtabmap.getLoopClosureId()) { ++countLoopDetected; } data = camera->takeImage(); if(++count % 100 == 0) { printf(" count = %d, loop closures = %d, max time (at %d) = %fs\n", count, countLoopDetected, maxIterationTimeId, maxIterationTime); maxIterationTime = 0.0; maxIterationTimeId = 0; std::map<int, int> wm = rtabmap.getWeights(); printf(" WM(%d)=[", (int)wm.size()); for(std::map<int, int>::iterator iter=wm.begin(); iter!=wm.end();++iter) { if(iter != wm.begin()) { printf(";"); } printf("%d,%d", iter->first, iter->second); } printf("]\n"); } // Update generated ground truth matrix if(createGT) { if(loopClosureId > 0) { groundTruth.insert(std::make_pair(i, loopClosureId-1)); } } ++i; double iterationTime = iterationTimer.ticks(); if(rtabmapTime > maxIterationTime) { maxIterationTime = rtabmapTime; maxIterationTimeId = count; } ULogger::flush(); if(rtabmap.getLoopClosureId()) { printf(" iteration(%d) loop(%d) hyp(%.2f) time=%fs/%fs *\n", count, rtabmap.getLoopClosureId(), rtabmap.getLoopClosureValue(), rtabmapTime, iterationTime); } else if(rtabmap.getHighestHypothesisId()) { printf(" iteration(%d) high(%d) hyp(%.2f) time=%fs/%fs\n", count, rtabmap.getHighestHypothesisId(), rtabmap.getHighestHypothesisValue(), rtabmapTime, iterationTime); } else { printf(" iteration(%d) time=%fs/%fs\n", count, rtabmapTime, iterationTime); } if(rtabmap.getTimeThreshold() && rtabmapTime > rtabmap.getTimeThreshold()*100.0f) { printf(" ERROR, there is problem, too much time taken... %fs", rtabmapTime); break; // there is problem, don't continue } } ++loopDataset; if(loopDataset <= repeat) { camera->init(); printf(" Beginning loop %d...\n", loopDataset); } } printf("Processing images completed. Loop closures found = %d\n", countLoopDetected); printf(" Total time = %fs\n", timer.ticks()); if(imagesProcessed && createGT) { cv::Mat groundTruthMat = cv::Mat::zeros(imagesProcessed, imagesProcessed, CV_8U); for(std::map<int, int>::iterator iter = groundTruth.begin(); iter!=groundTruth.end(); ++iter) { groundTruthMat.at<unsigned char>(iter->first, iter->second) = 255; } // Generate the ground truth file printf("Generate ground truth to file %s, size of %d\n", GENERATED_GT_NAME, groundTruthMat.rows); cv::imwrite(GENERATED_GT_NAME, groundTruthMat); printf(" Creating ground truth file = %fs\n", timer.ticks()); } if(camera) { delete camera; camera = 0 ; } rtabmap.close(); printf(" Cleanup time = %fs\n", timer.ticks()); printf("Database (\"%s\") and log files saved to current directory.\n", inputDbPath.c_str()); return 0; }
OdometryEvent DBReader::getNextData() { OdometryEvent odom; if(_dbDriver) { if(!this->isKilled() && _currentId != _ids.end()) { int mapId; SensorData data; _dbDriver->getNodeData(*_currentId, data); // info Transform pose; int weight; std::string label; double stamp; _dbDriver->getNodeInfo(*_currentId, pose, mapId, weight, label, stamp); cv::Mat infMatrix = cv::Mat::eye(6,6,CV_64FC1); if(!_odometryIgnored) { std::map<int, Link> links; _dbDriver->loadLinks(*_currentId, links, Link::kNeighbor); if(links.size()) { // assume the first is the backward neighbor, take its variance infMatrix = links.begin()->second.infMatrix(); } } else { pose.setNull(); } int seq = *_currentId; ++_currentId; if(data.imageCompressed().empty()) { UWARN("No image loaded from the database for id=%d!", *_currentId); } // Frame rate if(_frameRate < 0.0f) { if(stamp == 0) { UERROR("The option to use database stamps is set (framerate<0), but there are no stamps saved in the database! Aborting..."); this->kill(); } else if(_previousMapID == mapId && _previousStamp > 0) { int sleepTime = 1000.0*(stamp-_previousStamp) - 1000.0*_timer.getElapsedTime(); if(sleepTime > 10000) { UWARN("Detected long delay (%d sec, stamps = %f vs %f). Waiting a maximum of 10 seconds.", sleepTime/1000, _previousStamp, stamp); sleepTime = 10000; } if(sleepTime > 2) { uSleep(sleepTime-2); } // Add precision at the cost of a small overhead while(_timer.getElapsedTime() < (stamp-_previousStamp)-0.000001) { // } double slept = _timer.getElapsedTime(); _timer.start(); UDEBUG("slept=%fs vs target=%fs", slept, stamp-_previousStamp); } _previousStamp = stamp; _previousMapID = mapId; } else if(_frameRate>0.0f) { int sleepTime = (1000.0f/_frameRate - 1000.0f*_timer.getElapsedTime()); if(sleepTime > 2) { uSleep(sleepTime-2); } // Add precision at the cost of a small overhead while(_timer.getElapsedTime() < 1.0/double(_frameRate)-0.000001) { // } double slept = _timer.getElapsedTime(); _timer.start(); UDEBUG("slept=%fs vs target=%fs", slept, 1.0/double(_frameRate)); } if(!this->isKilled()) { data.uncompressData(); data.setId(seq); data.setStamp(stamp); UDEBUG("Laser=%d RGB/Left=%d Depth/Right=%d, UserData=%d", data.laserScanRaw().empty()?0:1, data.imageRaw().empty()?0:1, data.depthOrRightRaw().empty()?0:1, data.userDataRaw().empty()?0:1); odom = OdometryEvent(data, pose, infMatrix.inv()); } } } else { UERROR("Not initialized..."); } return odom; }
void writeQueue() { SensorData* data; std::ofstream ofG2O(&filename[0]); geometry_msgs::TransformStamped msg; int num = 0; // this is the vertex where we are packing the data g2o::VertexSE3* activeVertex = 0; // this is the timestamp of the first measurement added to the vertex double activeVertexTime=0; // this is the previous vertex g2o::VertexSE3* previousVertex = 0; // this is the timestamp of the first measurement added to the previous vertex double previousVertexTime=0; // the last position of the robot (not of the vertex, needed to measure the distances) Eigen::Isometry3d lastRobotPose; // set of sensors we packed in the current data. // We do not want to put 10 camera images of the same camera in the same vertex. std::set<Sensor*> addedSensors; Eigen::Vector2d distances(0.,0); while (true) { if(! _queue.empty()) { data = (SensorData*)_queue.front(); double timeNow = _queue.lastElementTime(); conditionalPrint(annoyingLevel) << "size=" << _queue.size() << " lastTime=" << FIXED(timeNow) << endl; if (timeNow - data->timeStamp()> initialDelay) { // we have enough stuff in the queue _queue.pop_front(); if (! nptr->ok()) continue; tf::StampedTransform transform; bool we_got_transf = false; try { ros::Time timeStamp; // Get transformation (*tfListener).lookupTransform("/odom", "/base_link", timeStamp.fromSec(data->timeStamp()), transform); we_got_transf = true; } catch (tf::TransformException & ex) { ROS_ERROR("%s", ex.what()); } if (! we_got_transf) continue; Eigen::Isometry3d currentRobotPose = fromStampedTransform(transform); double currentDataTime = data->timeStamp(); distances += isometry2distance(lastRobotPose.inverse()*currentRobotPose); double passedTime = currentDataTime-previousVertexTime; lastRobotPose = currentRobotPose; conditionalPrint(annoyingLevel) << "distances: " << distances[0] << " " << distances[1] << " " << passedTime << endl; if (distances[0] < minDistances[0] && distances[1] < minDistances[1] && passedTime < minTime){ conditionalPrint(annoyingLevel) << "reject: (time/distance)" << endl; // SKIP THE FRAME delete data; data = 0; continue; } if (!activeVertex) { activeVertex = new g2o::VertexSE3(); activeVertex->setId(num); activeVertex->setEstimate(fromStampedTransform(transform)); activeVertexTime = currentDataTime; } Sensor* sensor = data->sensor(); assert (sensor && "!"); // check if we already packed the data for this kind of sensor if (addedSensors.count(sensor)){ conditionalPrint(annoyingLevel) << "reject: (sensor) "<< endl; delete data; } else { addedSensors.insert(sensor); Parameter* parameter = sensor->parameter(); assert (parameter && "!@#"); //data->writeOut(filename); if (! graph->parameters().getParameter(parameter->id())){ graph->parameters().addParameter(parameter); graph->saveParameter(ofG2O, parameter); } activeVertex->addUserData(data); data->setDataContainer(activeVertex); } // detach the data from the thing data = 0; if (currentDataTime - activeVertexTime > vertexTimeWindow) { conditionalPrint(annoyingLevel) << "flush" << endl; graph->addVertex(activeVertex); graph->saveVertex(ofG2O, activeVertex); if (previousVertex) { EdgeSE3* e = new EdgeSE3(); e->setVertex(0, previousVertex); e->setVertex(1, activeVertex); e->setMeasurementFromState(); Eigen::Matrix<double, 6,6> m; m.setIdentity(); e->setInformation(m); graph->addEdge(e); graph->saveEdge(ofG2O, e); // JACP: do not do the remove, scan the data list and do a release() of the images which are big. The rest can stay in memory g2o::HyperGraph::Data* d = previousVertex->userData(); while (d) { RGBDData* rgbd = dynamic_cast<RGBDData*> (d); if (rgbd) rgbd->release(); d = d->next(); } vertecesQueue.push_back(previousVertex); } previousVertex = activeVertex; previousVertexTime = activeVertexTime; addedSensors.clear(); activeVertex = 0; distances.setZero(); num++; conditionalPrint(defaultLevel) << "."; } } } usleep(20e3); // queue is emp-ty } }