void ViewerState::processCommands(){ _meHasNewFrame = false; refreshFlags(); updateDrawableParameters(); if(!wasInitialGuess && !newCloudAdded && drawableFrameVector.size() > 1 && *initialGuessViewer) { initialGuessSelected(); continuousMode = false; } else if(newCloudAdded && drawableFrameVector.size() > 1 && *optimizeViewer && !(*stepByStepViewer)) { optimizeSelected(); continuousMode = false; } // Add cloud was pressed. else if(*addCloud) { addCloudSelected(); continuousMode = false; } // clear buttons pressed. else if(*clearAll) { clear(); *clearAll = 0; continuousMode = false; } else if(*clearLast) { clearLastSelected(); continuousMode = false; } else if(continuousMode){ addNextAndOptimizeSelected(); } // To avoid memorized commands to be managed. *initialGuessViewer = 0; *optimizeViewer = 0; *addCloud = 0; *clearAll = 0; *clearLast = 0; if (0 && drawableFrameVector.size()){ Eigen::Isometry3f globalT = drawableFrameVector.front()->transformation(); qglviewer::Vec robotPose(globalT.translation().x(), globalT.translation().y(), globalT.translation().z()); qglviewer::Vec robotAxisX(globalT.linear()(0,0), globalT.linear()(1,0), globalT.linear()(2,0)); qglviewer::Vec robotAxisZ(globalT.linear()(0,2), globalT.linear()(1,2), globalT.linear()(2,2)); pwnGMW->viewer_3d->camera()->setPosition(robotPose+.5*robotAxisZ+.5*robotAxisX); pwnGMW->viewer_3d->camera()->setUpVector(robotAxisX+robotAxisZ); pwnGMW->viewer_3d->camera()->setViewDirection(robotPose+.5*robotAxisZ+.5*robotAxisX); } pwnGMW->viewer_3d->updateGL(); }
void DrawableCloud::draw() { if(_parameter->show() && _cloud) { glPushMatrix(); glMultMatrixf(_transformation.data()); if(_drawablePoints) _drawablePoints->draw(); if(_drawableNormals) _drawableNormals->draw(); if(_drawableCovariances) _drawableCovariances->draw(); if(_drawableCorrespondences) _drawableCorrespondences->draw(); glPushMatrix(); Eigen::Isometry3f sensorOffset; sensorOffset.translation() = Eigen::Vector3f(0.0f, 0.0f, 0.0f); Eigen::Quaternionf quaternion = Eigen::Quaternionf(-.5f, -0.5f, 0.5f, 0.5f); sensorOffset.linear() = quaternion.toRotationMatrix(); sensorOffset.matrix().row(3) << 0.0f, 0.0f, 0.0f, 1.0f; glMultMatrixf(sensorOffset.data()); glColor4f(1,0,0,0.5); glPopMatrix(); glPopMatrix(); } }
bool extractAbsolutePrior(Eigen::Isometry3f& priorMean, Matrix6f& priorInfo, const DrawableFrame* current){ VertexSE3* currentVertex =current->_vertex; ImuData* imuData = 0; OptimizableGraph::Data* d = currentVertex->userData(); while(d) { ImuData* imuData_ = dynamic_cast<ImuData*>(d); if (imuData_){ imuData = imuData_; } d=d->next(); } if (imuData){ Eigen::Matrix3d R=imuData->getOrientation().matrix(); Eigen::Matrix3d Omega = imuData->getOrientationCovariance().inverse(); priorMean.setIdentity(); priorInfo.setZero(); for (int c = 0; c<3; c++) for (int r = 0; r<3; r++) priorMean.linear()(r,c)=R(r,c); for (int c = 0; c<3; c++) for (int r = 0; r<3; r++) priorInfo(r+3,c+3)=Omega(r,c); return true; } return false; }
void DrawableTransformCovariance::updateCovarianceDrawList() { GLParameterTransformCovariance *covarianceParameter = dynamic_cast<GLParameterTransformCovariance*>(_parameter); glNewList(_covarianceDrawList, GL_COMPILE); if(_covariance != Eigen::Matrix3f::Zero() && covarianceParameter && covarianceParameter->show() && covarianceParameter->scale() > 0.0f) { float scale = covarianceParameter->scale(); Eigen::Vector4f color = covarianceParameter->color(); Eigen::SelfAdjointEigenSolver<Eigen::Matrix3f> eigenSolver; eigenSolver.computeDirect(_covariance, Eigen::ComputeEigenvectors); Eigen::Vector3f lambda = eigenSolver.eigenvalues(); Eigen::Isometry3f I = Eigen::Isometry3f::Identity(); I.linear() = eigenSolver.eigenvectors(); I.translation() = Eigen::Vector3f(_mean.x(), _mean.y(), _mean.z()); float sx = sqrt(lambda[0]) * scale; float sy = sqrt(lambda[1]) * scale; float sz = sqrt(lambda[2]) * scale; glPushMatrix(); glMultMatrixf(I.data()); glColor4f(color[0], color[1], color[2], color[3]); glScalef(sx, sy, sz); glCallList(_sphereDrawList); glPopMatrix(); } glEndList(); }
void DrawableUncertainty::updateCovarianceDrawList() { GLParameterUncertainty *uncertaintyParameter = dynamic_cast<GLParameterUncertainty*>(_parameter); glNewList(_covarianceDrawList, GL_COMPILE); if(_covarianceDrawList && _covariances && uncertaintyParameter && uncertaintyParameter->ellipsoidScale() > 0.0f) { uncertaintyParameter->applyGLParameter(); Eigen::SelfAdjointEigenSolver<Eigen::Matrix3f> eigenSolver; float ellipsoidScale = uncertaintyParameter->ellipsoidScale(); for(size_t i = 0; i < _covariances->size(); i += uncertaintyParameter->step()) { Gaussian3f &gaussian3f = _covariances->at(i); Eigen::Matrix3f covariance = gaussian3f.covarianceMatrix(); Eigen::Vector3f mean = gaussian3f.mean(); eigenSolver.computeDirect(covariance, Eigen::ComputeEigenvectors); Eigen::Vector3f eigenValues = eigenSolver.eigenvalues(); Eigen::Isometry3f I = Eigen::Isometry3f::Identity(); I.linear() = eigenSolver.eigenvectors(); I.translation() = mean; float sx = sqrt(eigenValues[0]) * ellipsoidScale; float sy = sqrt(eigenValues[1]) * ellipsoidScale; float sz = sqrt(eigenValues[2]) * ellipsoidScale; glPushMatrix(); glMultMatrixf(I.data()); sx = sx; sy = sy; sz = sz; glScalef(sx, sy, sz); glCallList(_sphereDrawList); glPopMatrix(); } } glEndList(); }
bool LcmTranslator:: fromLcm(const drc::map_scan_t& iMessage, LidarScan& oScan) { const auto& msg = iMessage; // basic info oScan.setTimestamp(msg.utime); oScan.setAngles(msg.theta_min, msg.theta_step); // poses Eigen::Isometry3f startPose = Eigen::Isometry3f::Identity(); Eigen::Isometry3f endPose = Eigen::Isometry3f::Identity(); for (int k = 0; k < 3; ++k) startPose(k,3) = msg.translation_start[k]; for (int k = 0; k < 3; ++k) endPose(k,3) = msg.translation_end[k]; Eigen::Quaternionf startQuat, endQuat; for (int k = 0; k < 4; ++k) startQuat.coeffs()[k] = msg.quaternion_start[(k+1)%4]; for (int k = 0; k < 4; ++k) endQuat.coeffs()[k] = msg.quaternion_end[(k+1)%4]; startPose.linear() = startQuat.matrix(); endPose.linear() = endQuat.matrix(); oScan.setPoses(startPose, endPose); // ranges DataBlob blob; if (!fromLcm(msg.range_blob, blob)) return false; blob.convertTo(DataBlob::CompressionTypeNone, DataBlob::DataTypeFloat32); float* raw = (float*)(&blob.getBytes()[0]); std::vector<float> ranges(raw, raw+blob.getBytes().size()/sizeof(float)); for (auto& r : ranges) r *= msg.range_scale; oScan.setRanges(ranges); // intensities if (fromLcm(msg.intensity_blob, blob)) { blob.convertTo(DataBlob::CompressionTypeNone, DataBlob::DataTypeFloat32); raw = (float*)blob.getBytes().data(); std::vector<float> intensities (raw, raw+blob.getBytes().size()/sizeof(float)); if (intensities.size() == ranges.size()) { for (auto& val : intensities) val *= msg.intensity_scale; oScan.setIntensities(intensities); } } return true; }
void draw(const std::shared_ptr<maps::ViewBase>& iView, const std::shared_ptr<MeshRenderer>& iMeshRenderer) { if (!mVisible) return; // set mesh properties iMeshRenderer->setRangeOrigin(mLatestTransform.translation()); iMeshRenderer->setScaleRange(mAttributes.mMinZ, mAttributes.mMaxZ); iMeshRenderer->setPointSize(mAttributes.mPointSize); Eigen::Projective3f worldToMap = mUseTransform ? iView->getTransform() : Eigen::Projective3f::Identity(); Eigen::Projective3f mapToWorld = worldToMap.inverse(); Eigen::Matrix3f calib; Eigen::Isometry3f pose; bool ortho; maps::Utils::factorViewMatrix(worldToMap, calib, pose, ortho); iMeshRenderer->setNormalZero(-pose.linear().col(2)); // see whether we need to (and can) get a mesh representation bool usePoints = false; maps::TriangleMesh::Ptr mesh; if (mAttributes.mMeshMode == MeshRenderer::MeshModePoints) { usePoints = true; } else { mesh = iView->getAsMesh(!mUseTransform); if (mesh == NULL) usePoints = true; } // just a point cloud if (usePoints) { mesh.reset(new maps::TriangleMesh()); maps::PointCloud::Ptr cloud = iView->getAsPointCloud(!mUseTransform); mesh->mVertices.reserve(cloud->size()); for (size_t i = 0; i < cloud->size(); ++i) { mesh->mVertices.push_back((*cloud)[i].getVector3fMap()); } } // set up mesh renderer iMeshRenderer->setColor(mAttributes.mColor[0], mAttributes.mColor[1], mAttributes.mColor[2]); iMeshRenderer->setColorMode ((MeshRenderer::ColorMode)mAttributes.mColorMode); iMeshRenderer->setMeshMode ((MeshRenderer::MeshMode)mAttributes.mMeshMode); if (usePoints) iMeshRenderer->setMeshMode(MeshRenderer::MeshModePoints); iMeshRenderer->setData(mesh->mVertices, mesh->mNormals, mesh->mFaces, mapToWorld); // draw this view's data iMeshRenderer->draw(); drawBounds(); }
void PinholePointProjector::_updateMatrices() { Eigen::Isometry3f t =_transform.inverse(); t.matrix().block<1, 4>(3, 0) << 0.0f, 0.0f, 0.0f, 1.0f; _iK = _cameraMatrix.inverse(); _KR = _cameraMatrix * t.linear(); _Kt = _cameraMatrix * t.translation(); _iKR = _transform.linear() * _iK; _iKt = _transform.translation(); _KRt.setIdentity(); _iKRt.setIdentity(); _KRt.block<3, 3>(0, 0) = _KR; _KRt.block<3, 1>(0, 3) = _Kt; _iKRt.block<3, 3>(0, 0) = _iKR; _iKRt.block<3, 1>(0, 3) = _iKt; }
void NICPQGLViewer::updateCameraPosition(Eigen::Isometry3f pose) { qglviewer::Camera *oldcam = camera(); qglviewer::Camera *cam = new StandardCamera(); setCamera(cam); Eigen::Vector3f position = pose*Vector3f(-2.0f, 0.0f, 1.0f); cam->setPosition(qglviewer::Vec(position[0], position[1], position[2])); Eigen::Vector3f upVector = pose.linear()*Vector3f(0.0f, 0.0f, 0.5f); upVector.normalize(); cam->setUpVector(qglviewer::Vec(upVector[0], upVector[1], upVector[2]), true); Eigen::Vector3f lookAt = pose*Vector3f(4.0f, 0.0f, 0.0f); cam->lookAt(qglviewer::Vec(lookAt[0], lookAt[1], lookAt[2])); delete oldcam; }
void DrawableCovariances::updateCovarianceDrawList() { GLParameterCovariances *covariancesParameter = dynamic_cast<GLParameterCovariances*>(_parameter); glNewList(_covarianceDrawList, GL_COMPILE); if(_covariances && covariancesParameter && covariancesParameter->show() && covariancesParameter->ellipsoidScale() > 0.0f) { float ellipsoidScale = covariancesParameter->ellipsoidScale(); Eigen::Vector4f colorLowCurvature = covariancesParameter->colorLowCurvature(); Eigen::Vector4f colorHighCurvature = covariancesParameter->colorHighCurvature(); float curvatureThreshold = covariancesParameter->curvatureThreshold(); for(size_t i = 0; i < _covariances->size(); i += covariancesParameter->step()) { Stats cov = _covariances->at(i); Eigen::Vector3f lambda = cov.eigenValues(); Eigen::Isometry3f I = Eigen::Isometry3f::Identity(); I.linear() = cov.eigenVectors(); if(cov.n() == 0 ) continue; I.translation() = Eigen::Vector3f(cov.mean()[0], cov.mean()[1], cov.mean()[2]); float sx = sqrt(lambda[0]) * ellipsoidScale; float sy = sqrt(lambda[1]) * ellipsoidScale; float sz = sqrt(lambda[2]) * ellipsoidScale; float curvature = cov.curvature(); glPushMatrix(); glMultMatrixf(I.data()); if(curvature > curvatureThreshold) { glColor4f(colorHighCurvature[0] - curvature, colorHighCurvature[1], colorHighCurvature[2], colorHighCurvature[3]); sx = ellipsoidScale; sy = ellipsoidScale; sz = ellipsoidScale; } else { glColor4f(colorLowCurvature[0], colorLowCurvature[1] - curvature, colorLowCurvature[2], colorLowCurvature[3]); sx = 1e-03; sy = ellipsoidScale; sz = ellipsoidScale; } glScalef(sx, sy, sz); glCallList(_sphereDrawList); glPopMatrix(); } } glEndList(); }
void ViewerState::addCloudSelected(){ if(listItem) { cerr << "adding a frame" << endl; int index = pwnGMW->listWidget->row(listItem); cerr << "index: " << endl; std::string fname = listAssociations[index]->baseFilename()+"_depth.pgm"; DepthImage depthImage; if (!depthImage.load(fname.c_str(), true)){ cerr << " skipping " << fname << endl; newCloudAdded = false; *addCloud = 0; return; } DepthImage scaledDepthImage; DepthImage::scale(scaledDepthImage, depthImage, reduction); imageRows = scaledDepthImage.rows(); imageCols = scaledDepthImage.cols(); correspondenceFinder->setSize(imageRows, imageCols); Frame * frame=new Frame(); converter->compute(*frame, scaledDepthImage, sensorOffset); OptimizableGraph::DataContainer* dc =listAssociations[index]->dataContainer(); cerr << "datacontainer: " << dc << endl; VertexSE3* v = dynamic_cast<VertexSE3*>(dc); cerr << "vertex of the frame: " << v->id() << endl; DrawableFrame* drawableFrame = new DrawableFrame(globalT, drawableFrameParameters, frame); drawableFrame->_vertex = v; Eigen::Isometry3f priorMean; Matrix6f priorInfo; bool hasImu =extractAbsolutePrior(priorMean, priorInfo, drawableFrame); if (hasImu && drawableFrameVector.size()==0){ globalT.linear() = priorMean.linear(); cerr << "!!!! i found an imu for the first vertex, me happy" << endl; drawableFrame->setTransformation(globalT); } drawableFrameVector.push_back(drawableFrame); pwnGMW->viewer_3d->addDrawable(drawableFrame); } newCloudAdded = true; *addCloud = 0; _meHasNewFrame = true; }
void computeSensorOffsetAndK(Eigen::Isometry3f &sensorOffset, Eigen::Matrix3f &cameraMatrix, ParameterCamera *cameraParam, int reduction) { sensorOffset = Isometry3f::Identity(); cameraMatrix.setZero(); int cmax = 4; int rmax = 3; for (int c=0; c<cmax; c++){ for (int r=0; r<rmax; r++){ sensorOffset.matrix()(r, c) = cameraParam->offset()(r, c); if (c<3) cameraMatrix(r,c) = cameraParam->Kcam()(r, c); } } sensorOffset.translation() = Vector3f(0.15f, 0.0f, 0.05f); Quaternionf quat = Quaternionf(0.5, -0.5, 0.5, -0.5); sensorOffset.linear() = quat.toRotationMatrix(); sensorOffset.matrix().block<1, 4>(3, 0) << 0.0f, 0.0f, 0.0f, 1.0f; float scale = 1./reduction; cameraMatrix *= scale; cameraMatrix(2,2) = 1; }
bool Utils:: factorViewMatrix(const Eigen::Projective3f& iMatrix, Eigen::Matrix3f& oCalib, Eigen::Isometry3f& oPose, bool& oIsOrthographic) { oIsOrthographic = isOrthographic(iMatrix.matrix()); // get appropriate rows std::vector<int> rows = {0,1,2}; if (!oIsOrthographic) rows[2] = 3; // get A matrix (upper left 3x3) and t vector Eigen::Matrix3f A; Eigen::Vector3f t; for (int i = 0; i < 3; ++i) { for (int j = 0; j < 3; ++j) { A(i,j) = iMatrix(rows[i],j); } t[i] = iMatrix(rows[i],3); } // determine translation vector oPose.setIdentity(); oPose.translation() = -(A.inverse()*t); // determine calibration matrix Eigen::Matrix3f AAtrans = A*A.transpose(); AAtrans.col(0).swap(AAtrans.col(2)); AAtrans.row(0).swap(AAtrans.row(2)); Eigen::LLT<Eigen::Matrix3f, Eigen::Upper> llt(AAtrans); oCalib = llt.matrixU(); oCalib.col(0).swap(oCalib.col(2)); oCalib.row(0).swap(oCalib.row(2)); oCalib.transposeInPlace(); // compute rotation matrix oPose.linear() = (oCalib.inverse()*A).transpose(); return true; }
void ImageMessageListener::imageCallback(const sensor_msgs::ImageConstPtr& in_msg) { static int counter = 0; // counter to add periodic line breaks _image_deque.push_back(*in_msg); if (_image_deque.size()<_deque_length){ return; } sensor_msgs::Image msg = _image_deque.front(); _image_deque.pop_front(); if (_listener) { tf::StampedTransform transform; try{ _listener->waitForTransform(_odom_frame_id, _base_link_frame_id, msg.header.stamp, ros::Duration(0.5) ); _listener->lookupTransform (_odom_frame_id, _base_link_frame_id, msg.header.stamp, transform); } catch (tf::TransformException ex){ ROS_ERROR("%s",ex.what()); } Eigen::Quaternionf q; q.x() = transform.getRotation().x(); q.y() = transform.getRotation().y(); q.z() = transform.getRotation().z(); q.w() = transform.getRotation().w(); _odom_pose.linear()=q.toRotationMatrix(); _odom_pose.translation()=Eigen::Vector3f(transform.getOrigin().x(), transform.getOrigin().y(), transform.getOrigin().z()); } cv_bridge::CvImageConstPtr bridge; bridge = cv_bridge::toCvCopy(msg,msg.encoding); if (! _has_camera_matrix){ cerr << "received: " << _image_topic << " waiting for camera matrix" << endl; return; } _cv_image = bridge->image.clone(); PinholeImageMessage* img_msg = new PinholeImageMessage(_image_topic, msg.header.frame_id, msg.header.seq, msg.header.stamp.toSec()); img_msg->setImage(_cv_image); img_msg->setOffset(_camera_offset); img_msg->setCameraMatrix(_K); img_msg->setDepthScale(_depth_scale); if (_imu_interpolator && _listener) { Eigen::Isometry3f imu = Eigen::Isometry3f::Identity(); Eigen::Quaternionf q_imu; if (!_imu_interpolator->getOrientation(q_imu, msg.header.stamp.toSec())) return; imu.linear() = q_imu.toRotationMatrix(); if (_verbose) { cerr << "i"; counter++; if( counter % 1024 == 0 ) cerr << endl; } img_msg->setImu(imu); } else if (_listener) { img_msg->setOdometry(_odom_pose); if (_verbose) cerr << "o"; counter++; if( counter % 1024 == 0 ) cerr << endl; } else { if (_verbose) cerr << "x"; counter++; if( counter % 1024 == 0 ) cerr << endl; } _sorter->insertMessage(img_msg); }
BlockFitter::Result BlockFitter:: go() { Result result; result.mSuccess = false; if (mCloud->size() < 100) return result; // voxelize LabeledCloud::Ptr cloud(new LabeledCloud()); pcl::VoxelGrid<pcl::PointXYZL> voxelGrid; voxelGrid.setInputCloud(mCloud); voxelGrid.setLeafSize(mDownsampleResolution, mDownsampleResolution, mDownsampleResolution); voxelGrid.filter(*cloud); for (int i = 0; i < (int)cloud->size(); ++i) cloud->points[i].label = i; if (mDebug) { std::cout << "Original cloud size " << mCloud->size() << std::endl; std::cout << "Voxelized cloud size " << cloud->size() << std::endl; pcl::io::savePCDFileBinary("cloud_full.pcd", *cloud); } if (cloud->size() < 100) return result; // pose cloud->sensor_origin_.head<3>() = mOrigin; cloud->sensor_origin_[3] = 1; Eigen::Vector3f rz = mLookDir; Eigen::Vector3f rx = rz.cross(Eigen::Vector3f::UnitZ()); Eigen::Vector3f ry = rz.cross(rx); Eigen::Matrix3f rotation; rotation.col(0) = rx.normalized(); rotation.col(1) = ry.normalized(); rotation.col(2) = rz.normalized(); Eigen::Isometry3f pose = Eigen::Isometry3f::Identity(); pose.linear() = rotation; pose.translation() = mOrigin; // ground removal if (mRemoveGround) { Eigen::Vector4f groundPlane; // filter points float minZ = mMinGroundZ; float maxZ = mMaxGroundZ; if ((minZ > 10000) && (maxZ > 10000)) { std::vector<float> zVals(cloud->size()); for (int i = 0; i < (int)cloud->size(); ++i) { zVals[i] = cloud->points[i].z; } std::sort(zVals.begin(), zVals.end()); minZ = zVals[0]-0.1; maxZ = minZ + 0.5; } LabeledCloud::Ptr tempCloud(new LabeledCloud()); for (int i = 0; i < (int)cloud->size(); ++i) { const Eigen::Vector3f& p = cloud->points[i].getVector3fMap(); if ((p[2] < minZ) || (p[2] > maxZ)) continue; tempCloud->push_back(cloud->points[i]); } // downsample voxelGrid.setInputCloud(tempCloud); voxelGrid.setLeafSize(0.1, 0.1, 0.1); voxelGrid.filter(*tempCloud); if (tempCloud->size() < 100) return result; // find ground plane std::vector<Eigen::Vector3f> pts(tempCloud->size()); for (int i = 0; i < (int)tempCloud->size(); ++i) { pts[i] = tempCloud->points[i].getVector3fMap(); } const float kGroundPlaneDistanceThresh = 0.01; // TODO: param PlaneFitter planeFitter; planeFitter.setMaxDistance(kGroundPlaneDistanceThresh); planeFitter.setRefineUsingInliers(true); auto res = planeFitter.go(pts); groundPlane = res.mPlane; if (groundPlane[2] < 0) groundPlane = -groundPlane; if (mDebug) { std::cout << "dominant plane: " << groundPlane.transpose() << std::endl; std::cout << " inliers: " << res.mInliers.size() << std::endl; } if (std::acos(groundPlane[2]) > 30*M_PI/180) { std::cout << "error: ground plane not found!" << std::endl; std::cout << "proceeding with full segmentation (may take a while)..." << std::endl; } else { // compute convex hull result.mGroundPlane = groundPlane; { tempCloud.reset(new LabeledCloud()); for (int i = 0; i < (int)cloud->size(); ++i) { Eigen::Vector3f p = cloud->points[i].getVector3fMap(); float dist = groundPlane.head<3>().dot(p) + groundPlane[3]; if (std::abs(dist) > kGroundPlaneDistanceThresh) continue; p -= (groundPlane.head<3>()*dist); pcl::PointXYZL cloudPt; cloudPt.getVector3fMap() = p; tempCloud->push_back(cloudPt); } pcl::ConvexHull<pcl::PointXYZL> chull; pcl::PointCloud<pcl::PointXYZL> hull; chull.setInputCloud(tempCloud); chull.reconstruct(hull); result.mGroundPolygon.resize(hull.size()); for (int i = 0; i < (int)hull.size(); ++i) { result.mGroundPolygon[i] = hull[i].getVector3fMap(); } } // remove points below or near ground tempCloud.reset(new LabeledCloud()); for (int i = 0; i < (int)cloud->size(); ++i) { Eigen::Vector3f p = cloud->points[i].getVector3fMap(); float dist = p.dot(groundPlane.head<3>()) + groundPlane[3]; if ((dist < mMinHeightAboveGround) || (dist > mMaxHeightAboveGround)) continue; float range = (p-mOrigin).norm(); if (range > mMaxRange) continue; tempCloud->push_back(cloud->points[i]); } std::swap(tempCloud, cloud); if (mDebug) { std::cout << "Filtered cloud size " << cloud->size() << std::endl; } } } // normal estimation auto t0 = std::chrono::high_resolution_clock::now(); if (mDebug) { std::cout << "computing normals..." << std::flush; } RobustNormalEstimator normalEstimator; normalEstimator.setMaxEstimationError(0.01); normalEstimator.setRadius(0.1); normalEstimator.setMaxCenterError(0.02); normalEstimator.setMaxIterations(100); NormalCloud::Ptr normals(new NormalCloud()); normalEstimator.go(cloud, *normals); if (mDebug) { auto t1 = std::chrono::high_resolution_clock::now(); auto dt = std::chrono::duration_cast<std::chrono::milliseconds>(t1-t0); std::cout << "finished in " << dt.count()/1e3 << " sec" << std::endl; } // filter non-horizontal points const float maxNormalAngle = mMaxAngleFromHorizontal*M_PI/180; LabeledCloud::Ptr tempCloud(new LabeledCloud()); NormalCloud::Ptr tempNormals(new NormalCloud()); for (int i = 0; i < (int)normals->size(); ++i) { const auto& norm = normals->points[i]; Eigen::Vector3f normal(norm.normal_x, norm.normal_y, norm.normal_z); float angle = std::acos(normal[2]); if (angle > maxNormalAngle) continue; tempCloud->push_back(cloud->points[i]); tempNormals->push_back(normals->points[i]); } std::swap(tempCloud, cloud); std::swap(tempNormals, normals); if (mDebug) { std::cout << "Horizontal points remaining " << cloud->size() << std::endl; pcl::io::savePCDFileBinary("cloud.pcd", *cloud); pcl::io::savePCDFileBinary("robust_normals.pcd", *normals); } // plane segmentation t0 = std::chrono::high_resolution_clock::now(); if (mDebug) { std::cout << "segmenting planes..." << std::flush; } PlaneSegmenter segmenter; segmenter.setData(cloud, normals); segmenter.setMaxError(0.05); segmenter.setMaxAngle(5); segmenter.setMinPoints(100); PlaneSegmenter::Result segmenterResult = segmenter.go(); if (mDebug) { auto t1 = std::chrono::high_resolution_clock::now(); auto dt = std::chrono::duration_cast<std::chrono::milliseconds>(t1-t0); std::cout << "finished in " << dt.count()/1e3 << " sec" << std::endl; std::ofstream ofs("labels.txt"); for (const int lab : segmenterResult.mLabels) { ofs << lab << std::endl; } ofs.close(); ofs.open("planes.txt"); for (auto it : segmenterResult.mPlanes) { auto& plane = it.second; ofs << it.first << " " << plane.transpose() << std::endl; } ofs.close(); } // create point clouds std::unordered_map<int,std::vector<Eigen::Vector3f>> cloudMap; for (int i = 0; i < (int)segmenterResult.mLabels.size(); ++i) { int label = segmenterResult.mLabels[i]; if (label <= 0) continue; cloudMap[label].push_back(cloud->points[i].getVector3fMap()); } struct Plane { MatrixX3f mPoints; Eigen::Vector4f mPlane; }; std::vector<Plane> planes; planes.reserve(cloudMap.size()); for (auto it : cloudMap) { int n = it.second.size(); Plane plane; plane.mPoints.resize(n,3); for (int i = 0; i < n; ++i) plane.mPoints.row(i) = it.second[i]; plane.mPlane = segmenterResult.mPlanes[it.first]; planes.push_back(plane); } std::vector<RectangleFitter::Result> results; for (auto& plane : planes) { RectangleFitter fitter; fitter.setDimensions(mBlockDimensions.head<2>()); fitter.setAlgorithm((RectangleFitter::Algorithm)mRectangleFitAlgorithm); fitter.setData(plane.mPoints, plane.mPlane); auto result = fitter.go(); results.push_back(result); } if (mDebug) { std::ofstream ofs("boxes.txt"); for (int i = 0; i < (int)results.size(); ++i) { auto& res = results[i]; for (auto& p : res.mPolygon) { ofs << i << " " << p.transpose() << std::endl; } } ofs.close(); ofs.open("hulls.txt"); for (int i = 0; i < (int)results.size(); ++i) { auto& res = results[i]; for (auto& p : res.mConvexHull) { ofs << i << " " << p.transpose() << std::endl; } } ofs.close(); ofs.open("poses.txt"); for (int i = 0; i < (int)results.size(); ++i) { auto& res = results[i]; auto transform = res.mPose; ofs << transform.matrix() << std::endl; } ofs.close(); } for (int i = 0; i < (int)results.size(); ++i) { const auto& res = results[i]; if (mBlockDimensions.head<2>().norm() > 1e-5) { float areaRatio = mBlockDimensions.head<2>().prod()/res.mConvexArea; if ((areaRatio < mAreaThreshMin) || (areaRatio > mAreaThreshMax)) continue; } Block block; block.mSize << res.mSize[0], res.mSize[1], mBlockDimensions[2]; block.mPose = res.mPose; block.mPose.translation() -= block.mPose.rotation().col(2)*mBlockDimensions[2]/2; block.mHull = res.mConvexHull; result.mBlocks.push_back(block); } if (mDebug) { std::cout << "Surviving blocks: " << result.mBlocks.size() << std::endl; } result.mSuccess = true; return result; }
gtsam::Pose3 Convert(const Eigen::Isometry3f& iso) { gtsam::Matrix3 mat = iso.linear().cast<double>(); Eigen::Vector3d trans = iso.translation().cast<double>(); return gtsam::Pose3(mat, gtsam::Point3(trans)); }