void ViewerState::processCommands(){ _meHasNewFrame = false; refreshFlags(); updateDrawableParameters(); if(!wasInitialGuess && !newCloudAdded && drawableFrameVector.size() > 1 && *initialGuessViewer) { initialGuessSelected(); continuousMode = false; } else if(newCloudAdded && drawableFrameVector.size() > 1 && *optimizeViewer && !(*stepByStepViewer)) { optimizeSelected(); continuousMode = false; } // Add cloud was pressed. else if(*addCloud) { addCloudSelected(); continuousMode = false; } // clear buttons pressed. else if(*clearAll) { clear(); *clearAll = 0; continuousMode = false; } else if(*clearLast) { clearLastSelected(); continuousMode = false; } else if(continuousMode){ addNextAndOptimizeSelected(); } // To avoid memorized commands to be managed. *initialGuessViewer = 0; *optimizeViewer = 0; *addCloud = 0; *clearAll = 0; *clearLast = 0; if (0 && drawableFrameVector.size()){ Eigen::Isometry3f globalT = drawableFrameVector.front()->transformation(); qglviewer::Vec robotPose(globalT.translation().x(), globalT.translation().y(), globalT.translation().z()); qglviewer::Vec robotAxisX(globalT.linear()(0,0), globalT.linear()(1,0), globalT.linear()(2,0)); qglviewer::Vec robotAxisZ(globalT.linear()(0,2), globalT.linear()(1,2), globalT.linear()(2,2)); pwnGMW->viewer_3d->camera()->setPosition(robotPose+.5*robotAxisZ+.5*robotAxisX); pwnGMW->viewer_3d->camera()->setUpVector(robotAxisX+robotAxisZ); pwnGMW->viewer_3d->camera()->setViewDirection(robotPose+.5*robotAxisZ+.5*robotAxisX); } pwnGMW->viewer_3d->updateGL(); }
int main(int argc, char** argv){ string dirname; string graphFilename; float numThreads; int ng_step; int ng_minPoints; int ng_imageRadius; float ng_worldRadius; float ng_maxCurvature; float al_inlierDistance; float al_inlierCurvatureRatio; float al_inlierNormalAngle; float al_inlierMaxChi2; float al_scale; float al_flatCurvatureThreshold; float al_outerIterations; float al_nonLinearIterations; float al_linearIterations; float al_minInliers; float al_lambda; float al_debug; PointWithNormalStatistcsGenerator normalGenerator; PointWithNormalAligner aligner; g2o::CommandArgs arg; arg.param("ng_step",ng_step,normalGenerator.step(),"compute a normal each x pixels") ; arg.param("ng_minPoints",ng_minPoints,normalGenerator.minPoints(),"minimum number of points in a region to compute the normal"); arg.param("ng_imageRadius",ng_imageRadius,normalGenerator.imageRadius(), "radius of a ball in the works where to compute the normal for a pixel"); arg.param("ng_worldRadius",ng_worldRadius, normalGenerator.worldRadius(), "radius of a ball in the works where to compute the normal for a pixel"); arg.param("ng_maxCurvature",ng_maxCurvature, normalGenerator.maxCurvature(), "above this threshold the normal is not computed"); arg.param("al_inlierDistance", al_inlierDistance, aligner.inlierDistanceThreshold(), "max metric distance between two points to regard them as iniliers"); arg.param("al_inlierCurvatureRatio", al_inlierCurvatureRatio, aligner.inlierCurvatureRatioThreshold(), "max metric distance between two points to regard them as iniliers"); arg.param("al_inlierNormalAngle", al_inlierNormalAngle, aligner.inlierNormalAngularThreshold(), "max metric distance between two points to regard them as iniliers"); arg.param("al_inlierMaxChi2", al_inlierMaxChi2, aligner.inlierMaxChi2(), "max metric distance between two points to regard them as iniliers"); arg.param("al_minInliers", al_minInliers, aligner.minInliers(), "minimum numver of inliers to do the matching"); arg.param("al_scale", al_scale, aligner.scale(), "scale of the range image for the alignment"); arg.param("al_flatCurvatureThreshold", al_flatCurvatureThreshold, aligner.flatCurvatureThreshold(), "curvature above which the patches are not considered planar"); arg.param("al_outerIterations", al_outerIterations, aligner.outerIterations(), "outer interations (incl. data association)"); arg.param("al_linearIterations", al_linearIterations, aligner.linearIterations(), "linear iterations for each outer one (uses R,t)"); arg.param("al_nonLinearIterations", al_nonLinearIterations, aligner.nonLinearIterations(), "nonlinear iterations for each outer one (uses q,t)"); arg.param("al_lambda", al_lambda, aligner.lambda(), "damping factor for the transformation update, the higher the smaller the step"); arg.param("al_debug", al_debug, aligner.debug(), "prints lots of stuff"); arg.param("numThreads", numThreads, 1, "numver of threads for openmp"); if (numThreads<1) numThreads = 1; std::vector<string> fileNames; arg.paramLeftOver("dirname", dirname, "", "", true); arg.paramLeftOver("graph-file", graphFilename, "out.g2o", "graph-file", true); arg.parseArgs(argc, argv); cerr << "dirname " << dirname << endl; std::vector<string> filenames; std::set<string> filenamesset = readDir(dirname); for(set<string>::const_iterator it =filenamesset.begin(); it!=filenamesset.end(); it++) { filenames.push_back(*it); } normalGenerator.setStep(ng_step); normalGenerator.setMinPoints(ng_minPoints); normalGenerator.setImageRadius(ng_imageRadius); normalGenerator.setWorldRadius(ng_worldRadius); normalGenerator.setMaxCurvature(ng_maxCurvature); #ifdef _PWN_USE_OPENMP_ normalGenerator.setNumThreads(numThreads); #endif //_PWN_USE_OPENMP_ aligner.setInlierDistanceThreshold(al_inlierDistance); aligner.setInlierCurvatureRatioThreshold(al_inlierCurvatureRatio); aligner.setInlierNormalAngularThreshold(al_inlierNormalAngle); aligner.setInlierMaxChi2(al_inlierMaxChi2); aligner.setMinInliers(al_minInliers); aligner.setScale(al_scale); aligner.setFlatCurvatureThreshold(al_flatCurvatureThreshold); aligner.setOuterIterations(al_outerIterations); aligner.setLinearIterations(al_linearIterations); aligner.setNonLinearIterations(al_nonLinearIterations); aligner.setLambda(al_lambda); aligner.setDebug(al_debug); #ifdef _PWN_USE_OPENMP_ aligner.setNumThreads(numThreads); #endif //_PWN_USE_OPENMP_ Frame* referenceFrame= 0; Eigen::Matrix3f cameraMatrix; cameraMatrix << 525.0f, 0.0f, 319.5f, 0.0f, 525.0f, 239.5f, 0.0f, 0.0f, 1.0f; ostringstream os(graphFilename.c_str()); cerr << "there are " << filenames.size() << " files in the pool" << endl; Isometry3f trajectory; trajectory.setIdentity(); int previousIndex=-1; int graphNum=0; int nFrames = 0; string baseFilename = graphFilename.substr( 0, graphFilename.find_last_of( '.' ) ); for (size_t i=0; i<filenames.size(); i++){ cerr << endl << endl << endl; cerr << ">>>>>>>>>>>>>>>>>>>>>>>> PROCESSING " << filenames[i] << " <<<<<<<<<<<<<<<<<<<<" << endl; Frame* currentFrame= new Frame(); if(!currentFrame->load(filenames[i])) { cerr << "failure in loading image: " << filenames[i] << ", skipping" << endl; delete currentFrame; continue; } nFrames ++; if (! referenceFrame ){ os << "PARAMS_CAMERACALIB 0 0 0 0 0 0 0 1 525 525 319.5 239.5"<< endl; trajectory.setIdentity(); } { // write the vertex frame Vector6f x = t2v(trajectory); Vector3f t = x.head<3>(); Vector3f mq = x.tail<3>(); float w = mq.squaredNorm(); if (w>1){ mq.setZero(); w = 1.0f; } else { w = sqrt(1-w); } os << "VERTEX_SE3:QUAT " << i << " " << t.transpose() << " " << mq.transpose() << " " << w << endl; os << "DEPTH_IMAGE_DATA 0 " << filenames[i] << " 0 0 " << endl; } currentFrame->computeStats(normalGenerator,cameraMatrix); if (referenceFrame) { referenceFrame->setAligner(aligner, true); currentFrame->setAligner(aligner, false); Matrix6f omega; Vector6f mean; float tratio; float rratio; aligner.setImageSize(currentFrame->depthImage.rows(), currentFrame->depthImage.cols()); Eigen::Isometry3f X; X.setIdentity(); double ostart = get_time(); float error; int result = aligner.align(error, X, mean, omega, tratio, rratio); cerr << "inliers=" << result << " error/inliers: " << error/result << endl; cerr << "localTransform : " << endl; cerr << X.inverse().matrix() << endl; trajectory=trajectory*X; cerr << "globaltransform: " << endl; cerr << trajectory.matrix() << endl; double oend = get_time(); cerr << "alignment took: " << oend-ostart << " sec." << endl; cerr << "aligner scaled image size: " << aligner.scaledImageRows() << " " << aligner.scaledImageCols() << endl; if(rratio < 100 && tratio < 100) { // write the edge frame Vector6f x = mean; Vector3f t = x.head<3>(); Vector3f mq = x.tail<3>(); float w = mq.squaredNorm(); if (w>1){ mq.setZero(); w = 1.0f; } else { w = sqrt(1-w); } os << "EDGE_SE3:QUAT " << previousIndex << " " << i << " "; os << t.transpose() << " " << mq.transpose() << " " << w << " "; for (int r=0; r<6; r++){ for (int c=r; c<6; c++){ os << omega(r,c) << " "; } } os << endl; } else { if (nFrames >10) { char buf[1024]; sprintf(buf, "%s-%03d.g2o", baseFilename.c_str(), graphNum); ofstream gs(buf); gs << os.str(); gs.close(); } os.str(""); os.clear(); if (referenceFrame) delete referenceFrame; referenceFrame = 0; graphNum ++; nFrames = 0; } } previousIndex = i; if (referenceFrame) delete referenceFrame; referenceFrame = currentFrame; } char buf[1024]; sprintf(buf, "%s-%03d.g2o", baseFilename.c_str(), graphNum); cerr << "saving final frames, n: " << nFrames << " in file [" << buf << "]" << endl; cerr << "filesize:" << os.str().length() << endl; ofstream gs(buf); gs << os.str(); cout << os.str(); gs.close(); }
void ICPOdometry::getIncrementalTransformation(Eigen::Vector3f & trans, Eigen::Matrix<float, 3, 3, Eigen::RowMajor> & rot, int threads, int blocks) { iterations[0] = 10; iterations[1] = 5; iterations[2] = 4; Eigen::Matrix<float, 3, 3, Eigen::RowMajor> Rprev = rot; Eigen::Vector3f tprev = trans; Eigen::Matrix<float, 3, 3, Eigen::RowMajor> Rcurr = Rprev; Eigen::Vector3f tcurr = tprev; Eigen::Matrix<float, 3, 3, Eigen::RowMajor> Rprev_inv = Rprev.inverse(); Mat33 & device_Rprev_inv = device_cast<Mat33>(Rprev_inv); float3& device_tprev = device_cast<float3>(tprev); cv::Mat resultRt = cv::Mat::eye(4, 4, CV_64FC1); for(int i = NUM_PYRS - 1; i >= 0; i--) { for(int j = 0; j < iterations[i]; j++) { Eigen::Matrix<float, 6, 6, Eigen::RowMajor> A_icp; Eigen::Matrix<float, 6, 1> b_icp; Mat33& device_Rcurr = device_cast<Mat33> (Rcurr); float3& device_tcurr = device_cast<float3>(tcurr); DeviceArray2D<float>& vmap_curr = vmaps_curr_[i]; DeviceArray2D<float>& nmap_curr = nmaps_curr_[i]; DeviceArray2D<float>& vmap_g_prev = vmaps_g_prev_[i]; DeviceArray2D<float>& nmap_g_prev = nmaps_g_prev_[i]; float residual[2]; icpStep(device_Rcurr, device_tcurr, vmap_curr, nmap_curr, device_Rprev_inv, device_tprev, intr(i), vmap_g_prev, nmap_g_prev, distThres_, angleThres_, sumData, outData, A_icp.data(), b_icp.data(), &residual[0], threads, blocks); lastICPError = sqrt(residual[0]) / residual[1]; lastICPCount = residual[1]; Eigen::Matrix<double, 6, 1> result; Eigen::Matrix<double, 6, 6, Eigen::RowMajor> dA_icp = A_icp.cast<double>(); Eigen::Matrix<double, 6, 1> db_icp = b_icp.cast<double>(); lastA = dA_icp; lastb = db_icp; result = lastA.ldlt().solve(lastb); Eigen::Isometry3f incOdom; OdometryProvider::computeProjectiveMatrix(resultRt, result, incOdom); Eigen::Isometry3f currentT; currentT.setIdentity(); currentT.rotate(Rprev); currentT.translation() = tprev; currentT = currentT * incOdom.inverse(); tcurr = currentT.translation(); Rcurr = currentT.rotation(); } } trans = tcurr; rot = Rcurr; }
void Aligner::align() { struct timeval tvStart, tvEnd; gettimeofday(&tvStart,0); if (! _projector || !_linearizer || !_correspondenceGenerator){ cerr << "I do nothing since you did not set all required algorithms" << endl; return; } // the current points are seen from the frame of the sensor _projector->setTransform(_sensorOffset); _projector->project(_correspondenceGenerator->currentIndexImage(), _correspondenceGenerator->currentDepthImage(), _currentScene->points()); _T = _initialGuess; for(int i = 0; i < _outerIterations; i++) { //cout << "********************* Iteration " << i << " *********************" << endl; /************************************************************************ * Correspondence Computation * ************************************************************************/ //cout << "Computing correspondences..."; // compute the indices of the current scene from the point of view of the sensor _projector->setTransform(_T*_sensorOffset); _projector->project(_correspondenceGenerator->referenceIndexImage(), _correspondenceGenerator->referenceDepthImage(), _referenceScene->points()); // Correspondences computation. _correspondenceGenerator->compute(*_referenceScene, *_currentScene, _T.inverse()); //cout << " done." << endl; //cout << "# inliers found: " << _correspondenceGenerator->numCorrespondences() << endl; /************************************************************************ * Alignment * ************************************************************************/ Eigen::Isometry3f invT = _T.inverse(); for (int k = 0; k < _innerIterations; k++) { invT.matrix().block<1, 4>(3, 0) << 0, 0, 0, 1; Matrix6f H; Vector6f b; _linearizer->setT(invT); _linearizer->update(); H = _linearizer->H() + Matrix6f::Identity() * 10.0f; b = _linearizer->b(); // add the priors for (size_t i=0; i<_priors.size(); i++){ const SE3Prior* prior = _priors[i]; Vector6f priorError = prior->error(invT); Matrix6f priorJacobian = prior->jacobian(invT); Matrix6f priorInformationRemapped = prior->errorInformation(invT); Matrix6f Hp = priorJacobian.transpose()*priorInformationRemapped*priorJacobian; Vector6f bp = priorJacobian.transpose()*priorInformationRemapped*priorError; H += Hp; b += bp; } Vector6f dx = H.ldlt().solve(-b); Eigen::Isometry3f dT = v2t(dx); invT = dT * invT; } _T = invT.inverse(); _T.matrix().block<1, 4>(3, 0) << 0, 0, 0, 1; } gettimeofday(&tvEnd, 0); double tStart = tvStart.tv_sec*1000+tvStart.tv_usec*0.001; double tEnd = tvEnd.tv_sec*1000+tvEnd.tv_usec*0.001; _totalTime = tEnd - tStart; _error = _linearizer->error(); _inliers = _linearizer->inliers(); }
void ImageMessageListener::imageCallback(const sensor_msgs::ImageConstPtr& in_msg) { static int counter = 0; // counter to add periodic line breaks _image_deque.push_back(*in_msg); if (_image_deque.size()<_deque_length){ return; } sensor_msgs::Image msg = _image_deque.front(); _image_deque.pop_front(); if (_listener) { tf::StampedTransform transform; try{ _listener->waitForTransform(_odom_frame_id, _base_link_frame_id, msg.header.stamp, ros::Duration(0.5) ); _listener->lookupTransform (_odom_frame_id, _base_link_frame_id, msg.header.stamp, transform); } catch (tf::TransformException ex){ ROS_ERROR("%s",ex.what()); } Eigen::Quaternionf q; q.x() = transform.getRotation().x(); q.y() = transform.getRotation().y(); q.z() = transform.getRotation().z(); q.w() = transform.getRotation().w(); _odom_pose.linear()=q.toRotationMatrix(); _odom_pose.translation()=Eigen::Vector3f(transform.getOrigin().x(), transform.getOrigin().y(), transform.getOrigin().z()); } cv_bridge::CvImageConstPtr bridge; bridge = cv_bridge::toCvCopy(msg,msg.encoding); if (! _has_camera_matrix){ cerr << "received: " << _image_topic << " waiting for camera matrix" << endl; return; } _cv_image = bridge->image.clone(); PinholeImageMessage* img_msg = new PinholeImageMessage(_image_topic, msg.header.frame_id, msg.header.seq, msg.header.stamp.toSec()); img_msg->setImage(_cv_image); img_msg->setOffset(_camera_offset); img_msg->setCameraMatrix(_K); img_msg->setDepthScale(_depth_scale); if (_imu_interpolator && _listener) { Eigen::Isometry3f imu = Eigen::Isometry3f::Identity(); Eigen::Quaternionf q_imu; if (!_imu_interpolator->getOrientation(q_imu, msg.header.stamp.toSec())) return; imu.linear() = q_imu.toRotationMatrix(); if (_verbose) { cerr << "i"; counter++; if( counter % 1024 == 0 ) cerr << endl; } img_msg->setImu(imu); } else if (_listener) { img_msg->setOdometry(_odom_pose); if (_verbose) cerr << "o"; counter++; if( counter % 1024 == 0 ) cerr << endl; } else { if (_verbose) cerr << "x"; counter++; if( counter % 1024 == 0 ) cerr << endl; } _sorter->insertMessage(img_msg); }
void ViewerState::load(const std::string& filename){ clear(); listWidget->clear(); graph->clear(); graph->load(filename.c_str()); vector<int> vertexIds(graph->vertices().size()); int k=0; for (OptimizableGraph::VertexIDMap::iterator it = graph->vertices().begin(); it != graph->vertices().end(); ++it) { vertexIds[k++] = (it->first); } sort(vertexIds.begin(), vertexIds.end()); listAssociations.clear(); size_t maxCount = 20000; for(size_t i = 0; i < vertexIds.size() && i< maxCount; ++i) { OptimizableGraph::Vertex* _v = graph->vertex(vertexIds[i]); g2o::VertexSE3* v = dynamic_cast<g2o::VertexSE3*>(_v); if (! v) continue; OptimizableGraph::Data* d = v->userData(); while(d) { RGBDData* rgbdData = dynamic_cast<RGBDData*>(d); if (!rgbdData){ d=d->next(); continue; } // retrieve from the rgb data the index of the parameter int paramIndex = rgbdData->paramIndex(); // retrieve from the graph the parameter given the index g2o::Parameter* _cameraParam = graph->parameter(paramIndex); // attempt a cast to a parameter camera ParameterCamera* cameraParam = dynamic_cast<ParameterCamera*>(_cameraParam); if (! cameraParam){ cerr << "shall thou be damned forever" << endl; return; } // yayyy we got the parameter Eigen::Matrix3f cameraMatrix; Eigen::Isometry3f sensorOffset; cameraMatrix.setZero(); int cmax = 4; int rmax = 3; for (int c=0; c<cmax; c++){ for (int r=0; r<rmax; r++){ sensorOffset.matrix()(r,c)= cameraParam->offset()(r, c); if (c<3) cameraMatrix(r,c) = cameraParam->Kcam()(r, c); } } char buf[1024]; sprintf(buf,"%d",v->id()); QString listItem(buf); listAssociations.push_back(rgbdData); listWidget->addItem(listItem); d=d->next(); } } }
void PointWithNormalStatistcsGenerator::computeNormalsAndSVD(PointWithNormalVector& points, PointWithNormalSVDVector& svds, const Eigen::MatrixXi& indices, const Eigen::Matrix3f& cameraMatrix, const Eigen::Isometry3f& transform){ _integralImage.compute(indices,points); int q=0; int outerStep = _numThreads * _step; PixelMapper pixelMapper; pixelMapper.setCameraMatrix(cameraMatrix); pixelMapper.setTransform(transform); Eigen::Isometry3f inverseTransform = transform.inverse(); #pragma omp parallel { #ifdef _PWN_USE_OPENMP_ int threadNum = omp_get_thread_num(); #else // _PWN_USE_OPENMP_ int threadNum = 0; Eigen::SelfAdjointEigenSolver<Eigen::Matrix3f> eigenSolver; #endif // _PWN_USE_OPENMP_ for (int c=threadNum; c<indices.cols(); c+=outerStep) { #ifdef _PWN_USE_OPENMP_ Eigen::SelfAdjointEigenSolver<Eigen::Matrix3f> eigenSolver; #endif // _PWN_USE_OPENMP_ for (int r=0; r<indices.rows(); r+=_step, q++){ int index = indices(r,c); //cerr << "index(" << r <<"," << c << ")=" << index << endl; if (index<0) continue; // determine the region PointWithNormal& point = points[index]; PointWithNormalSVD& originalSVD = svds[index]; PointWithNormalSVD svd; Eigen::Vector3f normal = point.normal(); Eigen::Vector3f coord = pixelMapper.projectPoint(point.point()+Eigen::Vector3f(_worldRadius, _worldRadius, 0)); svd._z=point(2); coord.head<2>()*=(1./coord(2)); int dx = abs(c - coord[0]); int dy = abs(r - coord[1]); if (dx>_imageRadius) dx = _imageRadius; if (dy>_imageRadius) dy = _imageRadius; PointAccumulator acc = _integralImage.getRegion(c-dx, c+dx, r-dy, r+dy); svd._mean=point.point(); if (acc.n()>_minPoints){ Eigen::Vector3f mean = acc.mean(); svd._mean = mean; svd._n = acc.n(); Eigen::Matrix3f cov = acc.covariance(); eigenSolver.compute(cov); svd._U=eigenSolver.eigenvectors(); svd._singularValues=eigenSolver.eigenvalues(); if (svd._singularValues(0) <0) svd._singularValues(0) = 0; /* cerr << "\t svd.singularValues():" << svd.singularValues() << endl; cerr << "\t svd.U():" << endl << svd.U() << endl; //cerr << "\t svd.curvature():" << svd.curvature() << endl; */ normal = eigenSolver.eigenvectors().col(0).normalized(); if (normal.dot(inverseTransform * mean) > 0.0f) normal =-normal; svd.updateCurvature(); //cerr << "n(" << index << ") c:" << svd.curvature() << endl << point.tail<3>() << endl; if (svd.curvature()>_maxCurvature){ //cerr << "region: " << c-dx << " " << c+dx << " " << r-dx << " " << r+dx << " points: " << acc.n() << endl; normal.setZero(); } } else { normal.setZero(); svd = PointWithNormalSVD(); } if (svd.n() > originalSVD.n()){ originalSVD = svd; point.setNormal(normal); } } } } }
int main (int argc, char** argv) { // Handle input float pointSize; float pointStep; float alpha; int applyTransform; int step; string logFilename; string configFilename; float di_scaleFactor; float scale; g2o::CommandArgs arg; arg.param("vz_pointSize", pointSize, 1.0f, "Size of the points where are visualized"); arg.param("vz_transform", applyTransform, 1, "Choose if you want to apply the absolute transform of the point clouds"); arg.param("vz_step", step, 1, "Visualize a point cloud each vz_step point clouds"); arg.param("vz_alpha", alpha, 1.0f, "Alpha channel value used for the color points"); arg.param("vz_pointStep", pointStep, 1, "Step at which point are drawn"); arg.param("vz_scale", scale, 2, "Depth image size reduction factor"); arg.param("di_scaleFactor", di_scaleFactor, 0.001f, "Scale factor to apply to convert depth images in meters"); arg.paramLeftOver("configFilename", configFilename, "", "Configuration filename", true); arg.paramLeftOver("logFilename", logFilename, "", "Log filename", true); arg.parseArgs(argc, argv); // Create GUI QApplication application(argc,argv); QWidget *mainWindow = new QWidget(); mainWindow->setWindowTitle("pwn_tracker_log_viewer"); QHBoxLayout *hlayout = new QHBoxLayout(); mainWindow->setLayout(hlayout); QVBoxLayout *vlayout = new QVBoxLayout(); hlayout->addItem(vlayout); QVBoxLayout *vlayout2 = new QVBoxLayout(); hlayout->addItem(vlayout2); hlayout->setStretch(1, 1); QListWidget* listWidget = new QListWidget(mainWindow); listWidget->setSelectionMode(QAbstractItemView::MultiSelection); vlayout->addWidget(listWidget); PWNQGLViewer* viewer = new PWNQGLViewer(mainWindow); vlayout2->addWidget(viewer); Eigen::Isometry3f T; T.setIdentity(); T.matrix().row(3) << 0.0f, 0.0f, 0.0f, 1.0f; // Read config file cout << "Loading config file..." << endl; Aligner *aligner; DepthImageConverter *converter; std::vector<Serializable*> instances = readConfig(aligner, converter, configFilename.c_str()); converter->projector()->scale(1.0f/scale); cout << "... done" << endl; // Read and parse log file std::vector<boss::Serializable*> objects; std::vector<PwnTrackerFrame*> trackerFrames; std::vector<PwnTrackerRelation*> trackerRelations; Deserializer des; des.setFilePath(logFilename); cout << "Loading log file..." << endl; load(trackerFrames, trackerRelations, objects, des, step); cout << "... done" << endl; // Load the drawable list with the PwnTrackerFrame objects std::vector<Frame*> pointClouds; pointClouds.resize(trackerFrames.size()); Frame *dummyFrame = 0; std::fill(pointClouds.begin(), pointClouds.end(), dummyFrame); for(size_t i = 0; i < trackerFrames.size(); i++) { PwnTrackerFrame *pwnTrackerFrame = trackerFrames[i]; char nummero[1024]; sprintf(nummero, "%05d", (int)i); listWidget->addItem(QString(nummero)); QListWidgetItem *lastItem = listWidget->item(listWidget->count() - 1); Isometry3f transform = Isometry3f::Identity(); if(applyTransform) { isometry3d2f(transform, pwnTrackerFrame->transform()); transform = transform*pwnTrackerFrame->sensorOffset; } transform.matrix().row(3) << 0.0f, 0.0f, 0.0f, 1.0f; GLParameterFrame *frameParams = new GLParameterFrame(); frameParams->setStep(pointStep); frameParams->setShow(false); DrawableFrame* drawableFrame = new DrawableFrame(transform, frameParams, pointClouds[i]); viewer->addDrawable(drawableFrame); } // Manage GUI viewer->init(); mainWindow->show(); viewer->show(); listWidget->show(); viewer->setAxisIsDrawn(true); bool selectionChanged = false; QListWidgetItem *item = 0; DepthImage depthImage; DepthImage scaledDepthImage; while (mainWindow->isVisible()) { selectionChanged = false; for (int i = 0; i<listWidget->count(); i++) { item = 0; item = listWidget->item(i); DrawableFrame *drawableFrame = dynamic_cast<DrawableFrame*>(viewer->drawableList()[i]); if (item && item->isSelected()) { if(!drawableFrame->parameter()->show()) { drawableFrame->parameter()->setShow(true); selectionChanged = true; } if(pointClouds[i] == 0) { pointClouds[i] = new Frame(); boss_map::ImageBLOB* fromDepthBlob = trackerFrames[i]->depthImage.get(); DepthImage depthImage; depthImage.fromCvMat(fromDepthBlob->cvImage()); DepthImage::scale(scaledDepthImage, depthImage, scale); converter->compute(*pointClouds[i], scaledDepthImage); drawableFrame->setFrame(pointClouds[i]); delete fromDepthBlob; } } else { drawableFrame->parameter()->setShow(false); selectionChanged = true; } } if (selectionChanged) viewer->updateGL(); application.processEvents(); } return 0; }
BlockFitter::Result BlockFitter:: go() { Result result; result.mSuccess = false; if (mCloud->size() < 100) return result; // voxelize LabeledCloud::Ptr cloud(new LabeledCloud()); pcl::VoxelGrid<pcl::PointXYZL> voxelGrid; voxelGrid.setInputCloud(mCloud); voxelGrid.setLeafSize(mDownsampleResolution, mDownsampleResolution, mDownsampleResolution); voxelGrid.filter(*cloud); for (int i = 0; i < (int)cloud->size(); ++i) cloud->points[i].label = i; if (mDebug) { std::cout << "Original cloud size " << mCloud->size() << std::endl; std::cout << "Voxelized cloud size " << cloud->size() << std::endl; pcl::io::savePCDFileBinary("cloud_full.pcd", *cloud); } if (cloud->size() < 100) return result; // pose cloud->sensor_origin_.head<3>() = mOrigin; cloud->sensor_origin_[3] = 1; Eigen::Vector3f rz = mLookDir; Eigen::Vector3f rx = rz.cross(Eigen::Vector3f::UnitZ()); Eigen::Vector3f ry = rz.cross(rx); Eigen::Matrix3f rotation; rotation.col(0) = rx.normalized(); rotation.col(1) = ry.normalized(); rotation.col(2) = rz.normalized(); Eigen::Isometry3f pose = Eigen::Isometry3f::Identity(); pose.linear() = rotation; pose.translation() = mOrigin; // ground removal if (mRemoveGround) { Eigen::Vector4f groundPlane; // filter points float minZ = mMinGroundZ; float maxZ = mMaxGroundZ; if ((minZ > 10000) && (maxZ > 10000)) { std::vector<float> zVals(cloud->size()); for (int i = 0; i < (int)cloud->size(); ++i) { zVals[i] = cloud->points[i].z; } std::sort(zVals.begin(), zVals.end()); minZ = zVals[0]-0.1; maxZ = minZ + 0.5; } LabeledCloud::Ptr tempCloud(new LabeledCloud()); for (int i = 0; i < (int)cloud->size(); ++i) { const Eigen::Vector3f& p = cloud->points[i].getVector3fMap(); if ((p[2] < minZ) || (p[2] > maxZ)) continue; tempCloud->push_back(cloud->points[i]); } // downsample voxelGrid.setInputCloud(tempCloud); voxelGrid.setLeafSize(0.1, 0.1, 0.1); voxelGrid.filter(*tempCloud); if (tempCloud->size() < 100) return result; // find ground plane std::vector<Eigen::Vector3f> pts(tempCloud->size()); for (int i = 0; i < (int)tempCloud->size(); ++i) { pts[i] = tempCloud->points[i].getVector3fMap(); } const float kGroundPlaneDistanceThresh = 0.01; // TODO: param PlaneFitter planeFitter; planeFitter.setMaxDistance(kGroundPlaneDistanceThresh); planeFitter.setRefineUsingInliers(true); auto res = planeFitter.go(pts); groundPlane = res.mPlane; if (groundPlane[2] < 0) groundPlane = -groundPlane; if (mDebug) { std::cout << "dominant plane: " << groundPlane.transpose() << std::endl; std::cout << " inliers: " << res.mInliers.size() << std::endl; } if (std::acos(groundPlane[2]) > 30*M_PI/180) { std::cout << "error: ground plane not found!" << std::endl; std::cout << "proceeding with full segmentation (may take a while)..." << std::endl; } else { // compute convex hull result.mGroundPlane = groundPlane; { tempCloud.reset(new LabeledCloud()); for (int i = 0; i < (int)cloud->size(); ++i) { Eigen::Vector3f p = cloud->points[i].getVector3fMap(); float dist = groundPlane.head<3>().dot(p) + groundPlane[3]; if (std::abs(dist) > kGroundPlaneDistanceThresh) continue; p -= (groundPlane.head<3>()*dist); pcl::PointXYZL cloudPt; cloudPt.getVector3fMap() = p; tempCloud->push_back(cloudPt); } pcl::ConvexHull<pcl::PointXYZL> chull; pcl::PointCloud<pcl::PointXYZL> hull; chull.setInputCloud(tempCloud); chull.reconstruct(hull); result.mGroundPolygon.resize(hull.size()); for (int i = 0; i < (int)hull.size(); ++i) { result.mGroundPolygon[i] = hull[i].getVector3fMap(); } } // remove points below or near ground tempCloud.reset(new LabeledCloud()); for (int i = 0; i < (int)cloud->size(); ++i) { Eigen::Vector3f p = cloud->points[i].getVector3fMap(); float dist = p.dot(groundPlane.head<3>()) + groundPlane[3]; if ((dist < mMinHeightAboveGround) || (dist > mMaxHeightAboveGround)) continue; float range = (p-mOrigin).norm(); if (range > mMaxRange) continue; tempCloud->push_back(cloud->points[i]); } std::swap(tempCloud, cloud); if (mDebug) { std::cout << "Filtered cloud size " << cloud->size() << std::endl; } } } // normal estimation auto t0 = std::chrono::high_resolution_clock::now(); if (mDebug) { std::cout << "computing normals..." << std::flush; } RobustNormalEstimator normalEstimator; normalEstimator.setMaxEstimationError(0.01); normalEstimator.setRadius(0.1); normalEstimator.setMaxCenterError(0.02); normalEstimator.setMaxIterations(100); NormalCloud::Ptr normals(new NormalCloud()); normalEstimator.go(cloud, *normals); if (mDebug) { auto t1 = std::chrono::high_resolution_clock::now(); auto dt = std::chrono::duration_cast<std::chrono::milliseconds>(t1-t0); std::cout << "finished in " << dt.count()/1e3 << " sec" << std::endl; } // filter non-horizontal points const float maxNormalAngle = mMaxAngleFromHorizontal*M_PI/180; LabeledCloud::Ptr tempCloud(new LabeledCloud()); NormalCloud::Ptr tempNormals(new NormalCloud()); for (int i = 0; i < (int)normals->size(); ++i) { const auto& norm = normals->points[i]; Eigen::Vector3f normal(norm.normal_x, norm.normal_y, norm.normal_z); float angle = std::acos(normal[2]); if (angle > maxNormalAngle) continue; tempCloud->push_back(cloud->points[i]); tempNormals->push_back(normals->points[i]); } std::swap(tempCloud, cloud); std::swap(tempNormals, normals); if (mDebug) { std::cout << "Horizontal points remaining " << cloud->size() << std::endl; pcl::io::savePCDFileBinary("cloud.pcd", *cloud); pcl::io::savePCDFileBinary("robust_normals.pcd", *normals); } // plane segmentation t0 = std::chrono::high_resolution_clock::now(); if (mDebug) { std::cout << "segmenting planes..." << std::flush; } PlaneSegmenter segmenter; segmenter.setData(cloud, normals); segmenter.setMaxError(0.05); segmenter.setMaxAngle(5); segmenter.setMinPoints(100); PlaneSegmenter::Result segmenterResult = segmenter.go(); if (mDebug) { auto t1 = std::chrono::high_resolution_clock::now(); auto dt = std::chrono::duration_cast<std::chrono::milliseconds>(t1-t0); std::cout << "finished in " << dt.count()/1e3 << " sec" << std::endl; std::ofstream ofs("labels.txt"); for (const int lab : segmenterResult.mLabels) { ofs << lab << std::endl; } ofs.close(); ofs.open("planes.txt"); for (auto it : segmenterResult.mPlanes) { auto& plane = it.second; ofs << it.first << " " << plane.transpose() << std::endl; } ofs.close(); } // create point clouds std::unordered_map<int,std::vector<Eigen::Vector3f>> cloudMap; for (int i = 0; i < (int)segmenterResult.mLabels.size(); ++i) { int label = segmenterResult.mLabels[i]; if (label <= 0) continue; cloudMap[label].push_back(cloud->points[i].getVector3fMap()); } struct Plane { MatrixX3f mPoints; Eigen::Vector4f mPlane; }; std::vector<Plane> planes; planes.reserve(cloudMap.size()); for (auto it : cloudMap) { int n = it.second.size(); Plane plane; plane.mPoints.resize(n,3); for (int i = 0; i < n; ++i) plane.mPoints.row(i) = it.second[i]; plane.mPlane = segmenterResult.mPlanes[it.first]; planes.push_back(plane); } std::vector<RectangleFitter::Result> results; for (auto& plane : planes) { RectangleFitter fitter; fitter.setDimensions(mBlockDimensions.head<2>()); fitter.setAlgorithm((RectangleFitter::Algorithm)mRectangleFitAlgorithm); fitter.setData(plane.mPoints, plane.mPlane); auto result = fitter.go(); results.push_back(result); } if (mDebug) { std::ofstream ofs("boxes.txt"); for (int i = 0; i < (int)results.size(); ++i) { auto& res = results[i]; for (auto& p : res.mPolygon) { ofs << i << " " << p.transpose() << std::endl; } } ofs.close(); ofs.open("hulls.txt"); for (int i = 0; i < (int)results.size(); ++i) { auto& res = results[i]; for (auto& p : res.mConvexHull) { ofs << i << " " << p.transpose() << std::endl; } } ofs.close(); ofs.open("poses.txt"); for (int i = 0; i < (int)results.size(); ++i) { auto& res = results[i]; auto transform = res.mPose; ofs << transform.matrix() << std::endl; } ofs.close(); } for (int i = 0; i < (int)results.size(); ++i) { const auto& res = results[i]; if (mBlockDimensions.head<2>().norm() > 1e-5) { float areaRatio = mBlockDimensions.head<2>().prod()/res.mConvexArea; if ((areaRatio < mAreaThreshMin) || (areaRatio > mAreaThreshMax)) continue; } Block block; block.mSize << res.mSize[0], res.mSize[1], mBlockDimensions[2]; block.mPose = res.mPose; block.mPose.translation() -= block.mPose.rotation().col(2)*mBlockDimensions[2]/2; block.mHull = res.mConvexHull; result.mBlocks.push_back(block); } if (mDebug) { std::cout << "Surviving blocks: " << result.mBlocks.size() << std::endl; } result.mSuccess = true; return result; }
Eigen::Matrix3f Homography::calcHomography( const Eigen::Isometry3f &trans, const Eigen::Vector3f &n, float dist ) const { return calcHomography( trans.rotation(), trans.translation(), n, dist ); }
gtsam::Pose3 Convert(const Eigen::Isometry3f& iso) { gtsam::Matrix3 mat = iso.linear().cast<double>(); Eigen::Vector3d trans = iso.translation().cast<double>(); return gtsam::Pose3(mat, gtsam::Point3(trans)); }
void RGBDOdometry::getIncrementalTransformation(Eigen::Vector3f & trans, Eigen::Matrix<float, 3, 3, Eigen::RowMajor> & rot, const bool & rgbOnly, const float & icpWeight, const bool & pyramid, const bool & fastOdom, const bool & so3) { bool icp = !rgbOnly && icpWeight > 0; bool rgb = rgbOnly || icpWeight < 100; Eigen::Matrix<float, 3, 3, Eigen::RowMajor> Rprev = rot; Eigen::Vector3f tprev = trans; Eigen::Matrix<float, 3, 3, Eigen::RowMajor> Rcurr = Rprev; Eigen::Vector3f tcurr = tprev; if(rgb) { for(int i = 0; i < NUM_PYRS; i++) { computeDerivativeImages(nextImage[i], nextdIdx[i], nextdIdy[i]); } } Eigen::Matrix<double, 3, 3, Eigen::RowMajor> resultR = Eigen::Matrix<double, 3, 3, Eigen::RowMajor>::Identity(); if(so3) { int pyramidLevel = 2; Eigen::Matrix<float, 3, 3, Eigen::RowMajor> R_lr = Eigen::Matrix<float, 3, 3, Eigen::RowMajor>::Identity(); Eigen::Matrix<double, 3, 3, Eigen::RowMajor> K = Eigen::Matrix<double, 3, 3, Eigen::RowMajor>::Zero(); K(0, 0) = intr(pyramidLevel).fx; K(1, 1) = intr(pyramidLevel).fy; K(0, 2) = intr(pyramidLevel).cx; K(1, 2) = intr(pyramidLevel).cy; K(2, 2) = 1; float lastError = std::numeric_limits<float>::max() / 2; float lastCount = std::numeric_limits<float>::max() / 2; Eigen::Matrix<double, 3, 3, Eigen::RowMajor> lastResultR = Eigen::Matrix<double, 3, 3, Eigen::RowMajor>::Identity(); for(int i = 0; i < 10; i++) { Eigen::Matrix<float, 3, 3, Eigen::RowMajor> jtj; Eigen::Matrix<float, 3, 1> jtr; Eigen::Matrix<double, 3, 3, Eigen::RowMajor> homography = K * resultR * K.inverse(); mat33 imageBasis; memcpy(&imageBasis.data[0], homography.cast<float>().eval().data(), sizeof(mat33)); Eigen::Matrix<double, 3, 3, Eigen::RowMajor> K_inv = K.inverse(); mat33 kinv; memcpy(&kinv.data[0], K_inv.cast<float>().eval().data(), sizeof(mat33)); Eigen::Matrix<double, 3, 3, Eigen::RowMajor> K_R_lr = K * resultR; mat33 krlr; memcpy(&krlr.data[0], K_R_lr.cast<float>().eval().data(), sizeof(mat33)); float residual[2]; TICK("so3Step"); so3Step(lastNextImage[pyramidLevel], nextImage[pyramidLevel], imageBasis, kinv, krlr, sumDataSO3, outDataSO3, jtj.data(), jtr.data(), &residual[0], GPUConfig::getInstance().so3StepThreads, GPUConfig::getInstance().so3StepBlocks); TOCK("so3Step"); lastSO3Error = sqrt(residual[0]) / residual[1]; lastSO3Count = residual[1]; //Converged if(lastSO3Error < lastError && lastCount == lastSO3Count) { break; } else if(lastSO3Error > lastError + 0.001) //Diverging { lastSO3Error = lastError; lastSO3Count = lastCount; resultR = lastResultR; break; } lastError = lastSO3Error; lastCount = lastSO3Count; lastResultR = resultR; Eigen::Vector3f delta = jtj.ldlt().solve(jtr); Eigen::Matrix<double, 3, 3, Eigen::RowMajor> rotUpdate = OdometryProvider::rodrigues(delta.cast<double>()); R_lr = rotUpdate.cast<float>() * R_lr; for(int x = 0; x < 3; x++) { for(int y = 0; y < 3; y++) { resultR(x, y) = R_lr(x, y); } } } } iterations[0] = fastOdom ? 3 : 10; iterations[1] = pyramid ? 5 : 0; iterations[2] = pyramid ? 4 : 0; Eigen::Matrix<float, 3, 3, Eigen::RowMajor> Rprev_inv = Rprev.inverse(); mat33 device_Rprev_inv = Rprev_inv; float3 device_tprev = *reinterpret_cast<float3*>(tprev.data()); Eigen::Matrix<double, 4, 4, Eigen::RowMajor> resultRt = Eigen::Matrix<double, 4, 4, Eigen::RowMajor>::Identity(); if(so3) { for(int x = 0; x < 3; x++) { for(int y = 0; y < 3; y++) { resultRt(x, y) = resultR(x, y); } } } for(int i = NUM_PYRS - 1; i >= 0; i--) { if(rgb) { projectToPointCloud(lastDepth[i], pointClouds[i], intr, i); } Eigen::Matrix<double, 3, 3, Eigen::RowMajor> K = Eigen::Matrix<double, 3, 3, Eigen::RowMajor>::Zero(); K(0, 0) = intr(i).fx; K(1, 1) = intr(i).fy; K(0, 2) = intr(i).cx; K(1, 2) = intr(i).cy; K(2, 2) = 1; lastRGBError = std::numeric_limits<float>::max(); for(int j = 0; j < iterations[i]; j++) { Eigen::Matrix<double, 4, 4, Eigen::RowMajor> Rt = resultRt.inverse(); Eigen::Matrix<double, 3, 3, Eigen::RowMajor> R = Rt.topLeftCorner(3, 3); Eigen::Matrix<double, 3, 3, Eigen::RowMajor> KRK_inv = K * R * K.inverse(); mat33 krkInv; memcpy(&krkInv.data[0], KRK_inv.cast<float>().eval().data(), sizeof(mat33)); Eigen::Vector3d Kt = Rt.topRightCorner(3, 1); Kt = K * Kt; float3 kt = {(float)Kt(0), (float)Kt(1), (float)Kt(2)}; int sigma = 0; int rgbSize = 0; if(rgb) { TICK("computeRgbResidual"); computeRgbResidual(pow(minimumGradientMagnitudes[i], 2.0) / pow(sobelScale, 2.0), nextdIdx[i], nextdIdy[i], lastDepth[i], nextDepth[i], lastImage[i], nextImage[i], corresImg[i], sumResidualRGB, maxDepthDeltaRGB, kt, krkInv, sigma, rgbSize, GPUConfig::getInstance().rgbResThreads, GPUConfig::getInstance().rgbResBlocks); TOCK("computeRgbResidual"); } float sigmaVal = std::sqrt((float)sigma / rgbSize == 0 ? 1 : rgbSize); float rgbError = std::sqrt(sigma) / (rgbSize == 0 ? 1 : rgbSize); if(rgbOnly && rgbError > lastRGBError) { break; } lastRGBError = rgbError; lastRGBCount = rgbSize; if(rgbOnly) { sigmaVal = -1; //Signals the internal optimisation to weight evenly } Eigen::Matrix<float, 6, 6, Eigen::RowMajor> A_icp; Eigen::Matrix<float, 6, 1> b_icp; mat33 device_Rcurr = Rcurr; float3 device_tcurr = *reinterpret_cast<float3*>(tcurr.data()); DeviceArray2D<float>& vmap_curr = vmaps_curr_[i]; DeviceArray2D<float>& nmap_curr = nmaps_curr_[i]; DeviceArray2D<float>& vmap_g_prev = vmaps_g_prev_[i]; DeviceArray2D<float>& nmap_g_prev = nmaps_g_prev_[i]; float residual[2]; if(icp) { TICK("icpStep"); icpStep(device_Rcurr, device_tcurr, vmap_curr, nmap_curr, device_Rprev_inv, device_tprev, intr(i), vmap_g_prev, nmap_g_prev, distThres_, angleThres_, sumDataSE3, outDataSE3, A_icp.data(), b_icp.data(), &residual[0], GPUConfig::getInstance().icpStepThreads, GPUConfig::getInstance().icpStepBlocks); TOCK("icpStep"); } lastICPError = sqrt(residual[0]) / residual[1]; lastICPCount = residual[1]; Eigen::Matrix<float, 6, 6, Eigen::RowMajor> A_rgbd; Eigen::Matrix<float, 6, 1> b_rgbd; if(rgb) { TICK("rgbStep"); rgbStep(corresImg[i], sigmaVal, pointClouds[i], intr(i).fx, intr(i).fy, nextdIdx[i], nextdIdy[i], sobelScale, sumDataSE3, outDataSE3, A_rgbd.data(), b_rgbd.data(), GPUConfig::getInstance().rgbStepThreads, GPUConfig::getInstance().rgbStepBlocks); TOCK("rgbStep"); } Eigen::Matrix<double, 6, 1> result; Eigen::Matrix<double, 6, 6, Eigen::RowMajor> dA_rgbd = A_rgbd.cast<double>(); Eigen::Matrix<double, 6, 6, Eigen::RowMajor> dA_icp = A_icp.cast<double>(); Eigen::Matrix<double, 6, 1> db_rgbd = b_rgbd.cast<double>(); Eigen::Matrix<double, 6, 1> db_icp = b_icp.cast<double>(); if(icp && rgb) { double w = icpWeight; lastA = dA_rgbd + w * w * dA_icp; lastb = db_rgbd + w * db_icp; result = lastA.ldlt().solve(lastb); } else if(icp) { lastA = dA_icp; lastb = db_icp; result = lastA.ldlt().solve(lastb); } else if(rgb) { lastA = dA_rgbd; lastb = db_rgbd; result = lastA.ldlt().solve(lastb); } else { assert(false && "Control shouldn't reach here"); } Eigen::Isometry3f rgbOdom; OdometryProvider::computeUpdateSE3(resultRt, result, rgbOdom); Eigen::Isometry3f currentT; currentT.setIdentity(); currentT.rotate(Rprev); currentT.translation() = tprev; currentT = currentT * rgbOdom.inverse(); tcurr = currentT.translation(); Rcurr = currentT.rotation(); } } if(rgb && (tcurr - tprev).norm() > 0.3) { Rcurr = Rprev; tcurr = tprev; } if(so3) { for(int i = 0; i < NUM_PYRS; i++) { std::swap(lastNextImage[i], nextImage[i]); } } trans = tcurr; rot = Rcurr; }
void Merger::merge(Cloud *cloud, Eigen::Isometry3f transform) { assert(_indexImage.rows > 0 && _indexImage.cols > 0 && "Merger: _indexImage has zero size"); assert(_depthImageConverter && "Merger: missing _depthImageConverter"); assert(_depthImageConverter->projector() && "Merger: missing projector in _depthImageConverter"); PointProjector *pointProjector = _depthImageConverter->projector(); Eigen::Isometry3f oldTransform = pointProjector->transform(); pointProjector->setTransform(transform); pointProjector->project(_indexImage, _depthImage, cloud->points()); int target = 0; int distance = 0; _collapsedIndices.resize(cloud->points().size()); std::fill(_collapsedIndices.begin(), _collapsedIndices.end(), -1); int killed = 0; int currentIndex = 0; for(size_t i = 0; i < cloud->points().size(); currentIndex++ ,i++) { const Point currentPoint = cloud->points()[i]; // const Normal currentNormal = cloud->normals()[i]; int r = -1, c = -1; float depth = 0.0f; pointProjector->project(c, r, depth, currentPoint); if(depth < 0 || depth > _maxPointDepth || r < 0 || r >= _depthImage.rows || c < 0 || c >= _depthImage.cols) { distance++; continue; } float &targetZ = _depthImage(r, c); int targetIndex = _indexImage(r, c); if(targetIndex < 0) { target++; continue; } // const Normal &targetNormal = cloud->normals().at(targetIndex); Eigen::Vector4f viewPointDirection = transform.matrix().col(3)-currentPoint; viewPointDirection(3)=0; if(targetIndex == currentIndex) { _collapsedIndices[currentIndex] = currentIndex; } else if(fabs(depth - targetZ) < _distanceThreshold /*&& currentNormal.dot(targetNormal) > _normalThreshold && (viewPointDirection.dot(targetNormal)>cos(0))*/ ) { Gaussian3f &targetGaussian = cloud->gaussians()[targetIndex]; Gaussian3f ¤tGaussian = cloud->gaussians()[currentIndex]; targetGaussian.addInformation(currentGaussian); _collapsedIndices[currentIndex] = targetIndex; killed++; } } int murdered = 0; int k = 0; for(size_t i = 0; i < _collapsedIndices.size(); i++) { int collapsedIndex = _collapsedIndices[i]; if(collapsedIndex == (int)i) { cloud->points()[i].head<3>() = cloud->gaussians()[i].mean(); } if(collapsedIndex < 0 || collapsedIndex == (int)i) { cloud->points()[k] = cloud->points()[i]; cloud->normals()[k] = cloud->normals()[i]; cloud->stats()[k] = cloud->stats()[i]; cloud->pointInformationMatrix()[k] = cloud->pointInformationMatrix()[i]; cloud->normalInformationMatrix()[k] = cloud->normalInformationMatrix()[i]; cloud->gaussians()[k] = cloud->gaussians()[i]; if(cloud->rgbs().size()) cloud->rgbs()[k]=cloud->rgbs()[i]; k++; } else { murdered ++; } } int originalSize = cloud->points().size(); // Kill the leftover points cloud->points().resize(k); cloud->normals().resize(k); cloud->stats().resize(k); cloud->pointInformationMatrix().resize(k); cloud->normalInformationMatrix().resize(k); if(cloud->rgbs().size()) cloud->rgbs().resize(k); std::cout << "[INFO]: number of suppressed points " << murdered << std::endl; std::cout << "[INFO]: resized cloud from " << originalSize << " to " << k << " points" <<std::endl; pointProjector->setTransform(oldTransform); }
void Aligner::align() { assert(_projector && "Aligner: missing _projector"); assert(_linearizer && "Aligner: missing _linearizer"); assert(_correspondenceFinder && "Aligner: missing _correspondenceFinder"); assert(_referenceCloud && "Aligner: missing _referenceCloud"); assert(_currentCloud && "Aligner: missing _currentCloud"); struct timeval tvStart, tvEnd; gettimeofday(&tvStart, 0); // The current points are seen from the frame of the sensor _projector->setTransform(_currentSensorOffset); _projector->project(_correspondenceFinder->currentIndexImage(), _correspondenceFinder->currentDepthImage(), _currentCloud->points()); _T = _initialGuess; for(int i = 0; i < _outerIterations; i++) { /************************************************************************ * Correspondence Computation * ************************************************************************/ // Compute the indices of the current scene from the point of view of the sensor _T.matrix().row(3) << 0.0f, 0.0f, 0.0f, 1.0f; _projector->setTransform(_T * _referenceSensorOffset); _projector->project(_correspondenceFinder->referenceIndexImage(), _correspondenceFinder->referenceDepthImage(), _referenceCloud->points()); // Correspondences computation. _correspondenceFinder->compute(*_referenceCloud, *_currentCloud, _T.inverse()); /************************************************************************ * Alignment * ************************************************************************/ Eigen::Isometry3f invT = _T.inverse(); for(int k = 0; k < _innerIterations; k++) { invT.matrix().block<1, 4>(3, 0) << 0.0f, 0.0f, 0.0f, 1.0f; Matrix6f H; Vector6f b; _linearizer->setT(invT); _linearizer->update(); H = _linearizer->H() + Matrix6f::Identity(); b = _linearizer->b(); H += Matrix6f::Identity() * 1000.0f; // Add the priors for(size_t j = 0; j < _priors.size(); j++) { const SE3Prior *prior = _priors[j]; Vector6f priorError = prior->error(invT); Matrix6f priorJacobian = prior->jacobian(invT); Matrix6f priorInformationRemapped = prior->errorInformation(invT); Matrix6f Hp = priorJacobian.transpose() * priorInformationRemapped * priorJacobian; Vector6f bp = priorJacobian.transpose() * priorInformationRemapped * priorError; H += Hp; b += bp; } Vector6f dx = H.ldlt().solve(-b); Eigen::Isometry3f dT = v2t(dx); invT = dT * invT; } _T = invT.inverse(); _T = v2t(t2v(_T)); _T.matrix().block<1, 4>(3, 0) << 0.0f, 0.0f, 0.0f, 1.0f; } gettimeofday(&tvEnd, 0); double tStart = tvStart.tv_sec * 1000.0f + tvStart.tv_usec * 0.001f; double tEnd = tvEnd.tv_sec * 1000.0f + tvEnd.tv_usec * 0.001f; _totalTime = tEnd - tStart; _error = _linearizer->error(); _inliers = _linearizer->inliers(); _computeStatistics(_mean, _omega, _translationalEigenRatio, _rotationalEigenRatio); if (_rotationalEigenRatio > _rotationalMinEigenRatio || _translationalEigenRatio > _translationalMinEigenRatio) { if (_debug) { cerr << endl; cerr << "************** WARNING SOLUTION MIGHT BE INVALID (eigenratio failure) **************" << endl; cerr << "tr: " << _translationalEigenRatio << " rr: " << _rotationalEigenRatio << endl; cerr << "************************************************************************************" << endl; } } else { if (_debug) { cerr << "************** I FOUND SOLUTION VALID SOLUTION (eigenratio ok) *******************" << endl; cerr << "tr: " << _translationalEigenRatio << " rr: " << _rotationalEigenRatio << endl; cerr << "************************************************************************************" << endl; } } if (_debug) { cout << "Solution statistics in (t, mq): " << endl; cout << "mean: " << _mean.transpose() << endl; cout << "Omega: " << endl; cout << _omega << endl; } }
void TwoDepthImageAlignerNode::processNode(MapNode* node_){ cerr << "START ITERATION" << endl; std::vector<Serializable*> crearedObjects; SensingFrameNode* sensingFrame = dynamic_cast<SensingFrameNode*>(node_); if (! sensingFrame) return; PinholeImageData* image = dynamic_cast<PinholeImageData*>(sensingFrame->sensorData(_topic)); if (! image) return; cerr << "got image" << endl; Eigen::Isometry3d _sensorOffset = _config->sensorOffset(image->baseSensor()); // cerr << "sensorOffset: " << endl; // cerr << _sensorOffset.matrix() << endl; Eigen::Isometry3f sensorOffset; convertScalar(sensorOffset,_sensorOffset); sensorOffset.matrix().row(3) << 0,0,0,1; Eigen::Matrix3d _cameraMatrix = image->cameraMatrix(); ImageBLOB* blob = image->imageBlob().get(); DepthImage depthImage; depthImage.fromCvMat(blob->cvImage()); int r=depthImage.rows(); int c=depthImage.cols(); DepthImage scaledImage; Eigen::Matrix3f cameraMatrix; convertScalar(cameraMatrix,_cameraMatrix); //computeScaledParameters(r,c,cameraMatrix,_scale); PinholePointProjector* projector=dynamic_cast<PinholePointProjector*>(_converter->projector()); cameraMatrix(2,2)=1; projector->setCameraMatrix(cameraMatrix); projector->setImageSize(depthImage.rows(), depthImage.cols()); projector->scale(1.0/_scale); DepthImage::scale(scaledImage,depthImage,_scale); pwn::Frame* frame = new pwn::Frame; _converter->compute(*frame,scaledImage, sensorOffset); MapNodeBinaryRelation* odom=0; std::vector<MapNode*> oneNode(1); oneNode[0]=sensingFrame; MapNodeUnaryRelation* imu = extractRelation<MapNodeUnaryRelation>(oneNode); if (_previousFrame){ _aligner->setReferenceSensorOffset(_aligner->currentSensorOffset()); _aligner->setCurrentSensorOffset(sensorOffset); _aligner->setReferenceFrame(_previousFrame); _aligner->setCurrentFrame(frame); PinholePointProjector* projector=(PinholePointProjector*)(_aligner->projector()); projector->setCameraMatrix(cameraMatrix); projector->setImageSize(depthImage.rows(), depthImage.cols()); projector->scale(1.0/_scale); _aligner->correspondenceFinder()->setImageSize(projector->imageRows(), projector->imageCols()); /* cerr << "correspondenceFinder: " << r << " " << c << endl; cerr << "sensorOffset" << endl; cerr <<_aligner->currentSensorOffset().matrix() << endl; cerr <<_aligner->referenceSensorOffset().matrix() << endl; cerr << "cameraMatrix" << endl; cerr << projector->cameraMatrix() << endl; */ std::vector<MapNode*> twoNodes(2); twoNodes[0]=_previousSensingFrameNode; twoNodes[1]=sensingFrame; odom = extractRelation<MapNodeBinaryRelation>(twoNodes); cerr << "odom:" << odom << " imu:" << imu << endl; Eigen::Isometry3f guess= Eigen::Isometry3f::Identity(); _aligner->clearPriors(); if (odom){ Eigen::Isometry3f mean; Eigen::Matrix<float,6,6> info; convertScalar(mean,odom->transform()); mean.matrix().row(3) << 0,0,0,1; convertScalar(info,odom->informationMatrix()); //cerr << "odom: " << t2v(mean).transpose() << endl; _aligner->addRelativePrior(mean,info); //guess = mean; } if (imu){ Eigen::Isometry3f mean; Eigen::Matrix<float,6,6> info; convertScalar(mean,imu->transform()); convertScalar(info,imu->informationMatrix()); mean.matrix().row(3) << 0,0,0,1; //cerr << "imu: " << t2v(mean).transpose() << endl; _aligner->addAbsolutePrior(_globalT,mean,info); } _aligner->setInitialGuess(guess); cerr << "Frames: " << _previousFrame << " " << frame << endl; // projector->setCameraMatrix(cameraMatrix); // projector->setTransform(Eigen::Isometry3f::Identity()); // Eigen::MatrixXi debugIndices(r,c); // DepthImage debugImage(r,c); // projector->project(debugIndices, debugImage, frame->points()); _aligner->align(); // sprintf(buf, "img-dbg-%05d.pgm",j); // debugImage.save(buf); //sprintf(buf, "img-ref-%05d.pgm",j); //_aligner->correspondenceFinder()->referenceDepthImage().save(buf); //sprintf(buf, "img-cur-%05d.pgm",j); //_aligner->correspondenceFinder()->currentDepthImage().save(buf); cerr << "inliers: " << _aligner->inliers() << endl; cerr << "chi2: " << _aligner->error() << endl; cerr << "chi2/inliers: " << _aligner->error()/_aligner->inliers() << endl; cerr << "initialGuess: " << t2v(guess).transpose() << endl; cerr << "transform : " << t2v(_aligner->T()).transpose() << endl; if (_aligner->inliers()>-1){ _globalT = _globalT*_aligner->T(); cerr << "TRANSFORM FOUND" << endl; } else { cerr << "FAILURE" << endl; _globalT = _globalT*guess; } if (! (_counter%50) ) { Eigen::Matrix3f R = _globalT.linear(); Eigen::Matrix3f E = R.transpose() * R; E.diagonal().array() -= 1; _globalT.linear() -= 0.5 * R * E; } _globalT.matrix().row(3) << 0,0,0,1; cerr << "globalTransform : " << t2v(_globalT).transpose() << endl; // char buf[1024]; // sprintf(buf, "frame-%05d.pwn",_counter); // frame->save(buf, 1, true, _globalT); cerr << "creating alias" << endl; // create an alias for the previous node MapNodeAlias* newRoot = new MapNodeAlias(_previousSensingFrameNode,_previousSensingFrameNode->manager()); _previousSensingFrameNode->manager()->addNode(newRoot); cerr << "creating alias relation" << endl; MapNodeAliasRelation* aliasRel = new MapNodeAliasRelation(newRoot,_previousSensingFrameNode->manager()); aliasRel->nodes()[0] = newRoot; aliasRel->nodes()[1] = _previousSensingFrameNode; _previousSensingFrameNode->manager()->addRelation(aliasRel); cerr << "reparenting old elements" << endl; // assign all the used relations to the alias node if (imu){ imu->setOwner(newRoot); imu->manager()->addRelation(imu); } if (odom){ odom->setOwner(newRoot); odom->manager()->addRelation(imu); } cerr << "adding result" << endl; Relation* newRel = new Relation(_aligner, _converter, _previousSensingFrameNode, sensingFrame, _topic, _aligner->inliers(), _aligner->error(), _manager); newRel->setOwner(newRoot); newRel->nodes()[0] = newRoot; newRel->nodes()[1] = _previousSensingFrameNode; Eigen::Isometry3d iso; convertScalar(iso,_aligner->T()); newRel->setTransform(iso); newRel->setInformationMatrix(Eigen::Matrix<double, 6,6>::Identity()); newRel->manager()->addRelation(newRel); *os << _globalT.translation().transpose() << endl; // create a new alias node for the prior element // bind the alias with the prior node // add to the alias the relations that have been used to // determine the transformation // add the alias to the map // add the new transformation to the map } else { _aligner->setCurrentSensorOffset(sensorOffset); _globalT = Eigen::Isometry3f::Identity(); if (imu){ Eigen::Isometry3f mean; convertScalar(mean,imu->transform()); _globalT = mean; } } cerr << "deleting previous frame" << endl; if (_previousFrame) delete _previousFrame; cerr << "deleting blob frame" << endl; delete blob; cerr << "bookkeeping update" << endl; _previousSensingFrameNode = sensingFrame; _previousFrame = frame; _counter++; cerr << "END ITERATION" << endl; }