Ejemplo n.º 1
0
void GroundTruthOdometry::loadTrajectory(const std::string & filename)
{
    std::ifstream file;
    std::string line;
    file.open(filename.c_str());
    while (!file.eof())
    {
        unsigned long long int utime;
        float x, y, z, qx, qy, qz, qw;
        std::getline(file, line);
        int n = sscanf(line.c_str(), "%llu,%f,%f,%f,%f,%f,%f,%f", &utime, &x, &y, &z, &qx, &qy, &qz, &qw);

        if(file.eof())
            break;

        assert(n == 8);

        Eigen::Quaternionf q(qw, qx, qy, qz);
        Eigen::Vector3f t(x, y, z);

        Eigen::Isometry3f T;
        T.setIdentity();
        T.pretranslate(t).rotate(q);
        camera_trajectory[utime] = T;
    }
}
Ejemplo n.º 2
0
  bool extractAbsolutePrior(Eigen::Isometry3f& priorMean, Matrix6f& priorInfo, 
			    const DrawableFrame* current){
    VertexSE3* currentVertex =current->_vertex;
    ImuData* imuData = 0;
    OptimizableGraph::Data* d = currentVertex->userData();
    while(d) {
      ImuData* imuData_ = dynamic_cast<ImuData*>(d);
      if (imuData_){
	imuData = imuData_;
      }
      d=d->next();
    }
	
    if (imuData){
      Eigen::Matrix3d R=imuData->getOrientation().matrix();
      Eigen::Matrix3d Omega = imuData->getOrientationCovariance().inverse();
      priorMean.setIdentity();
      priorInfo.setZero();
      for (int c = 0; c<3; c++)
	for (int r = 0; r<3; r++)
	  priorMean.linear()(r,c)=R(r,c);
      
      for (int c = 0; c<3; c++)
	for (int r = 0; r<3; r++)
	  priorInfo(r+3,c+3)=Omega(r,c);
      return true;
    }
    return false;
  }
Ejemplo n.º 3
0
bool Utils::
factorViewMatrix(const Eigen::Projective3f& iMatrix,
                 Eigen::Matrix3f& oCalib, Eigen::Isometry3f& oPose,
                 bool& oIsOrthographic) {
  oIsOrthographic = isOrthographic(iMatrix.matrix());

  // get appropriate rows
  std::vector<int> rows = {0,1,2};
  if (!oIsOrthographic) rows[2] = 3;

  // get A matrix (upper left 3x3) and t vector
  Eigen::Matrix3f A;
  Eigen::Vector3f t;
  for (int i = 0; i < 3; ++i) {
    for (int j = 0; j < 3; ++j) {
      A(i,j) = iMatrix(rows[i],j);
    }
    t[i] = iMatrix(rows[i],3);
  }

  // determine translation vector
  oPose.setIdentity();
  oPose.translation() = -(A.inverse()*t);

  // determine calibration matrix
  Eigen::Matrix3f AAtrans = A*A.transpose();
  AAtrans.col(0).swap(AAtrans.col(2));
  AAtrans.row(0).swap(AAtrans.row(2));
  Eigen::LLT<Eigen::Matrix3f, Eigen::Upper> llt(AAtrans);
  oCalib = llt.matrixU();
  oCalib.col(0).swap(oCalib.col(2));
  oCalib.row(0).swap(oCalib.row(2));
  oCalib.transposeInPlace();

  // compute rotation matrix
  oPose.linear() = (oCalib.inverse()*A).transpose();

  return true;
}
Ejemplo n.º 4
0
int
 main(int argc, char** argv){
  string dirname;
  string graphFilename;
  float numThreads;

  int ng_step;
  int ng_minPoints;
  int ng_imageRadius;
  float ng_worldRadius;
  float ng_maxCurvature;
  
  float al_inlierDistance;
  float al_inlierCurvatureRatio;
  float al_inlierNormalAngle;
  float al_inlierMaxChi2;
  float al_scale;
  float al_flatCurvatureThreshold;
  float al_outerIterations;
  float al_nonLinearIterations;
  float al_linearIterations;
  float al_minInliers;
  float al_lambda;
  float al_debug;

  PointWithNormalStatistcsGenerator normalGenerator;
  PointWithNormalAligner aligner;

  g2o::CommandArgs arg;
  arg.param("ng_step",ng_step,normalGenerator.step(),"compute a normal each x pixels") ;
  arg.param("ng_minPoints",ng_minPoints,normalGenerator.minPoints(),"minimum number of points in a region to compute the normal");
  arg.param("ng_imageRadius",ng_imageRadius,normalGenerator.imageRadius(), "radius of a ball in the works where to compute the normal for a pixel");
  arg.param("ng_worldRadius",ng_worldRadius,  normalGenerator.worldRadius(), "radius of a ball in the works where to compute the normal for a pixel");
  arg.param("ng_maxCurvature",ng_maxCurvature, normalGenerator.maxCurvature(), "above this threshold the normal is not computed");
  
  arg.param("al_inlierDistance", al_inlierDistance, aligner.inlierDistanceThreshold(),  "max metric distance between two points to regard them as iniliers");
  arg.param("al_inlierCurvatureRatio", al_inlierCurvatureRatio, aligner.inlierCurvatureRatioThreshold(), "max metric distance between two points to regard them as iniliers");
  arg.param("al_inlierNormalAngle", al_inlierNormalAngle, aligner.inlierNormalAngularThreshold(), "max metric distance between two points to regard them as iniliers");
  arg.param("al_inlierMaxChi2", al_inlierMaxChi2, aligner.inlierMaxChi2(), "max metric distance between two points to regard them as iniliers");
  arg.param("al_minInliers", al_minInliers, aligner.minInliers(), "minimum numver of inliers to do the matching");
  arg.param("al_scale", al_scale, aligner.scale(), "scale of the range image for the alignment");
  arg.param("al_flatCurvatureThreshold", al_flatCurvatureThreshold, aligner.flatCurvatureThreshold(), "curvature above which the patches are not considered planar");
  arg.param("al_outerIterations", al_outerIterations, aligner.outerIterations(), "outer interations (incl. data association)");
  arg.param("al_linearIterations", al_linearIterations, aligner.linearIterations(), "linear iterations for each outer one (uses R,t)");
  arg.param("al_nonLinearIterations", al_nonLinearIterations, aligner.nonLinearIterations(), "nonlinear iterations for each outer one (uses q,t)");
  arg.param("al_lambda", al_lambda, aligner.lambda(), "damping factor for the transformation update, the higher the smaller the step");
  arg.param("al_debug", al_debug, aligner.debug(), "prints lots of stuff");
  arg.param("numThreads", numThreads, 1, "numver of threads for openmp");
  

  if (numThreads<1)
    numThreads = 1;
  
  std::vector<string> fileNames;

  arg.paramLeftOver("dirname", dirname, "", "", true);
  arg.paramLeftOver("graph-file", graphFilename, "out.g2o", "graph-file", true);
  arg.parseArgs(argc, argv);
  cerr << "dirname " << dirname << endl;

  std::vector<string> filenames;
  std::set<string> filenamesset = readDir(dirname);
  for(set<string>::const_iterator it =filenamesset.begin(); it!=filenamesset.end(); it++) {
    filenames.push_back(*it);
  }
  
  normalGenerator.setStep(ng_step);
  normalGenerator.setMinPoints(ng_minPoints);
  normalGenerator.setImageRadius(ng_imageRadius);
  normalGenerator.setWorldRadius(ng_worldRadius);
  normalGenerator.setMaxCurvature(ng_maxCurvature);
#ifdef _PWN_USE_OPENMP_
  normalGenerator.setNumThreads(numThreads);
#endif //_PWN_USE_OPENMP_

  aligner.setInlierDistanceThreshold(al_inlierDistance);
  aligner.setInlierCurvatureRatioThreshold(al_inlierCurvatureRatio);
  aligner.setInlierNormalAngularThreshold(al_inlierNormalAngle);
  aligner.setInlierMaxChi2(al_inlierMaxChi2);
  aligner.setMinInliers(al_minInliers);
  aligner.setScale(al_scale);
  aligner.setFlatCurvatureThreshold(al_flatCurvatureThreshold);
  aligner.setOuterIterations(al_outerIterations);
  aligner.setLinearIterations(al_linearIterations);
  aligner.setNonLinearIterations(al_nonLinearIterations);
  aligner.setLambda(al_lambda);
  aligner.setDebug(al_debug);
#ifdef _PWN_USE_OPENMP_
  aligner.setNumThreads(numThreads);
#endif //_PWN_USE_OPENMP_


  Frame* referenceFrame= 0;

  Eigen::Matrix3f cameraMatrix;
  cameraMatrix << 
    525.0f, 0.0f, 319.5f,
    0.0f, 525.0f, 239.5f,
    0.0f, 0.0f, 1.0f;

  ostringstream os(graphFilename.c_str());
  

  cerr << "there are " << filenames.size() << " files  in the pool" << endl; 
  Isometry3f trajectory;
  trajectory.setIdentity();
  int previousIndex=-1;
  int graphNum=0;
  int nFrames = 0;
  string baseFilename = graphFilename.substr( 0, graphFilename.find_last_of( '.' ) );
  for (size_t i=0; i<filenames.size(); i++){
    cerr << endl << endl << endl;
    cerr << ">>>>>>>>>>>>>>>>>>>>>>>> PROCESSING " << filenames[i] << " <<<<<<<<<<<<<<<<<<<<" <<  endl;
    Frame* currentFrame= new Frame();
    if(!currentFrame->load(filenames[i])) {
      cerr << "failure in loading image: " << filenames[i] << ", skipping" << endl;
      delete currentFrame;
      continue;
    }
    nFrames ++;
    if (! referenceFrame ){
      os << "PARAMS_CAMERACALIB 0 0 0 0 0 0 0 1 525 525 319.5 239.5"<< endl;
      trajectory.setIdentity();
    }
    {
      // write the vertex frame
      Vector6f x = t2v(trajectory);
      Vector3f t = x.head<3>();
      Vector3f mq = x.tail<3>();
      float w = mq.squaredNorm();
      if (w>1){
	mq.setZero();
	w = 1.0f;
      } else {
	w = sqrt(1-w);
      }
      
      os << "VERTEX_SE3:QUAT " << i << " " << t.transpose() << " " << mq.transpose() << " " << w << endl;
      os << "DEPTH_IMAGE_DATA 0 " << filenames[i] << " 0 0 " << endl;
    }
    
    currentFrame->computeStats(normalGenerator,cameraMatrix);
    if (referenceFrame) {
      referenceFrame->setAligner(aligner, true);
      currentFrame->setAligner(aligner, false);

      Matrix6f omega;
      Vector6f mean;
      float tratio;
      float rratio;
      aligner.setImageSize(currentFrame->depthImage.rows(), currentFrame->depthImage.cols());
      Eigen::Isometry3f X;
      X.setIdentity();
      double ostart = get_time();
      float error;
      int result = aligner.align(error, X, mean, omega, tratio, rratio);
      cerr << "inliers=" << result << " error/inliers: " << error/result << endl;
      cerr << "localTransform : " << endl;
      cerr << X.inverse().matrix() << endl;
      trajectory=trajectory*X;
      cerr << "globaltransform: " << endl;
      cerr << trajectory.matrix() << endl;
      double oend = get_time();
      cerr << "alignment took: " << oend-ostart << " sec." << endl;
      cerr << "aligner scaled image size: " << aligner.scaledImageRows() << " " << aligner.scaledImageCols() << endl;
      
      if(rratio < 100 && tratio < 100) {
	// write the edge frame
	Vector6f x = mean;
	Vector3f t = x.head<3>();
	Vector3f mq = x.tail<3>();
	float w = mq.squaredNorm();
	if (w>1){
	  mq.setZero();
	  w = 1.0f;
	} else {
	  w = sqrt(1-w);
	}
	
	os << "EDGE_SE3:QUAT " << previousIndex << " " << i << " ";
	os << t.transpose() << " " << mq.transpose() << " " << w <<  " ";
	for (int r=0; r<6; r++){
	  for (int c=r; c<6; c++){
	    os << omega(r,c) << " ";
	  }
	} 
	os << endl;
      } else {
	if (nFrames >10) {
	  char buf[1024];
	  sprintf(buf, "%s-%03d.g2o", baseFilename.c_str(), graphNum);	
	  ofstream gs(buf);
	  gs << os.str();
	  gs.close();
	}
	os.str("");
	os.clear();
	if (referenceFrame)
	  delete referenceFrame;
	referenceFrame = 0;
	graphNum ++;
	nFrames = 0;
      }

    }
    previousIndex = i;
    if (referenceFrame)
      delete referenceFrame;
    referenceFrame = currentFrame;
  }

  char buf[1024];
  sprintf(buf, "%s-%03d.g2o", baseFilename.c_str(), graphNum);	
  cerr << "saving final frames, n: " << nFrames << " in file [" << buf << "]" << endl;
  cerr << "filesize:" << os.str().length() << endl; 
  ofstream gs(buf);
  gs << os.str();
  cout << os.str();
  gs.close();
  
}
Ejemplo n.º 5
0
void ICPOdometry::getIncrementalTransformation(Eigen::Vector3f & trans, Eigen::Matrix<float, 3, 3, Eigen::RowMajor> & rot, int threads, int blocks)
{
    iterations[0] = 10;
    iterations[1] = 5;
    iterations[2] = 4;

    Eigen::Matrix<float, 3, 3, Eigen::RowMajor> Rprev = rot;
    Eigen::Vector3f tprev = trans;

    Eigen::Matrix<float, 3, 3, Eigen::RowMajor> Rcurr = Rprev;
    Eigen::Vector3f tcurr = tprev;

    Eigen::Matrix<float, 3, 3, Eigen::RowMajor> Rprev_inv = Rprev.inverse();
    Mat33 & device_Rprev_inv = device_cast<Mat33>(Rprev_inv);
    float3& device_tprev = device_cast<float3>(tprev);

    cv::Mat resultRt = cv::Mat::eye(4, 4, CV_64FC1);

    for(int i = NUM_PYRS - 1; i >= 0; i--)
    {
        for(int j = 0; j < iterations[i]; j++)
        {
            Eigen::Matrix<float, 6, 6, Eigen::RowMajor> A_icp;
            Eigen::Matrix<float, 6, 1> b_icp;

            Mat33&  device_Rcurr = device_cast<Mat33> (Rcurr);
            float3& device_tcurr = device_cast<float3>(tcurr);

            DeviceArray2D<float>& vmap_curr = vmaps_curr_[i];
            DeviceArray2D<float>& nmap_curr = nmaps_curr_[i];

            DeviceArray2D<float>& vmap_g_prev = vmaps_g_prev_[i];
            DeviceArray2D<float>& nmap_g_prev = nmaps_g_prev_[i];

            float residual[2];

            icpStep(device_Rcurr,
                    device_tcurr,
                    vmap_curr,
                    nmap_curr,
                    device_Rprev_inv,
                    device_tprev,
                    intr(i),
                    vmap_g_prev,
                    nmap_g_prev,
                    distThres_,
                    angleThres_,
                    sumData,
                    outData,
                    A_icp.data(),
                    b_icp.data(),
                    &residual[0],
                    threads,
                    blocks);

            lastICPError = sqrt(residual[0]) / residual[1];
            lastICPCount = residual[1];

            Eigen::Matrix<double, 6, 1> result;
            Eigen::Matrix<double, 6, 6, Eigen::RowMajor> dA_icp = A_icp.cast<double>();
            Eigen::Matrix<double, 6, 1> db_icp = b_icp.cast<double>();

            lastA = dA_icp;
            lastb = db_icp;
            result = lastA.ldlt().solve(lastb);

            Eigen::Isometry3f incOdom;

            OdometryProvider::computeProjectiveMatrix(resultRt, result, incOdom);

            Eigen::Isometry3f currentT;
            currentT.setIdentity();
            currentT.rotate(Rprev);
            currentT.translation() = tprev;

            currentT = currentT * incOdom.inverse();

            tcurr = currentT.translation();
            Rcurr = currentT.rotation();
        }
    }

    trans = tcurr;
    rot = Rcurr;
}
int main (int argc, char** argv) {
    // Handle input
    float pointSize;
    float pointStep;
    float alpha;
    int applyTransform;
    int step;
    string logFilename;
    string configFilename;
    float di_scaleFactor;
    float scale;

    g2o::CommandArgs arg;
    arg.param("vz_pointSize", pointSize, 1.0f, "Size of the points where are visualized");
    arg.param("vz_transform", applyTransform, 1, "Choose if you want to apply the absolute transform of the point clouds");
    arg.param("vz_step", step, 1, "Visualize a point cloud each vz_step point clouds");
    arg.param("vz_alpha", alpha, 1.0f, "Alpha channel value used for the color points");
    arg.param("vz_pointStep", pointStep, 1, "Step at which point are drawn");
    arg.param("vz_scale", scale, 2, "Depth image size reduction factor");
    arg.param("di_scaleFactor", di_scaleFactor, 0.001f, "Scale factor to apply to convert depth images in meters");
    arg.paramLeftOver("configFilename", configFilename, "", "Configuration filename", true);
    arg.paramLeftOver("logFilename", logFilename, "", "Log filename", true);
    arg.parseArgs(argc, argv);

    // Create GUI
    QApplication application(argc,argv);
    QWidget *mainWindow = new QWidget();
    mainWindow->setWindowTitle("pwn_tracker_log_viewer");
    QHBoxLayout *hlayout = new QHBoxLayout();
    mainWindow->setLayout(hlayout);
    QVBoxLayout *vlayout = new QVBoxLayout();
    hlayout->addItem(vlayout);
    QVBoxLayout *vlayout2 = new QVBoxLayout();
    hlayout->addItem(vlayout2);
    hlayout->setStretch(1, 1);

    QListWidget* listWidget = new QListWidget(mainWindow);
    listWidget->setSelectionMode(QAbstractItemView::MultiSelection);
    vlayout->addWidget(listWidget);
    PWNQGLViewer* viewer = new PWNQGLViewer(mainWindow);
    vlayout2->addWidget(viewer);
    Eigen::Isometry3f T;
    T.setIdentity();
    T.matrix().row(3) << 0.0f, 0.0f, 0.0f, 1.0f;

    // Read config file
    cout << "Loading config file..." << endl;
    Aligner *aligner;
    DepthImageConverter *converter;
    std::vector<Serializable*> instances = readConfig(aligner, converter, configFilename.c_str());
    converter->projector()->scale(1.0f/scale);
    cout << "... done" << endl;

    // Read and parse log file
    std::vector<boss::Serializable*> objects;
    std::vector<PwnTrackerFrame*> trackerFrames;
    std::vector<PwnTrackerRelation*> trackerRelations;
    Deserializer des;
    des.setFilePath(logFilename);
    cout << "Loading log file..." << endl;
    load(trackerFrames, trackerRelations, objects, des, step);
    cout << "... done" << endl;

    // Load the drawable list with the PwnTrackerFrame objects
    std::vector<Frame*> pointClouds;
    pointClouds.resize(trackerFrames.size());
    Frame *dummyFrame = 0;
    std::fill(pointClouds.begin(), pointClouds.end(), dummyFrame);
    for(size_t i = 0; i < trackerFrames.size(); i++) {
        PwnTrackerFrame *pwnTrackerFrame = trackerFrames[i];
        char nummero[1024];
        sprintf(nummero, "%05d", (int)i);
        listWidget->addItem(QString(nummero));
        QListWidgetItem *lastItem = listWidget->item(listWidget->count() - 1);

        Isometry3f transform = Isometry3f::Identity();
        if(applyTransform) {
            isometry3d2f(transform, pwnTrackerFrame->transform());
            transform = transform*pwnTrackerFrame->sensorOffset;
        }
        transform.matrix().row(3) << 0.0f, 0.0f, 0.0f, 1.0f;

        GLParameterFrame *frameParams = new GLParameterFrame();
        frameParams->setStep(pointStep);
        frameParams->setShow(false);
        DrawableFrame* drawableFrame = new DrawableFrame(transform, frameParams, pointClouds[i]);
        viewer->addDrawable(drawableFrame);
    }

    // Manage GUI
    viewer->init();
    mainWindow->show();
    viewer->show();
    listWidget->show();
    viewer->setAxisIsDrawn(true);
    bool selectionChanged = false;
    QListWidgetItem *item = 0;
    DepthImage depthImage;
    DepthImage scaledDepthImage;
    while (mainWindow->isVisible()) {
        selectionChanged = false;
        for (int i = 0; i<listWidget->count(); i++) {
            item = 0;
            item = listWidget->item(i);
            DrawableFrame *drawableFrame = dynamic_cast<DrawableFrame*>(viewer->drawableList()[i]);
            if (item && item->isSelected()) {
                if(!drawableFrame->parameter()->show()) {
                    drawableFrame->parameter()->setShow(true);
                    selectionChanged = true;
                }
                if(pointClouds[i] == 0) {
                    pointClouds[i] = new Frame();
                    boss_map::ImageBLOB* fromDepthBlob = trackerFrames[i]->depthImage.get();
                    DepthImage depthImage;
                    depthImage.fromCvMat(fromDepthBlob->cvImage());
                    DepthImage::scale(scaledDepthImage, depthImage, scale);
                    converter->compute(*pointClouds[i], scaledDepthImage);
                    drawableFrame->setFrame(pointClouds[i]);
                    delete fromDepthBlob;
                }
            } else {
                drawableFrame->parameter()->setShow(false);
                selectionChanged = true;
            }
        }
        if (selectionChanged)
            viewer->updateGL();

        application.processEvents();
    }

    return 0;
}
Ejemplo n.º 7
0
void RGBDOdometry::getIncrementalTransformation(Eigen::Vector3f & trans,
                                                Eigen::Matrix<float, 3, 3, Eigen::RowMajor> & rot,
                                                const bool & rgbOnly,
                                                const float & icpWeight,
                                                const bool & pyramid,
                                                const bool & fastOdom,
                                                const bool & so3)
{
    bool icp = !rgbOnly && icpWeight > 0;
    bool rgb = rgbOnly || icpWeight < 100;

    Eigen::Matrix<float, 3, 3, Eigen::RowMajor> Rprev = rot;
    Eigen::Vector3f tprev = trans;

    Eigen::Matrix<float, 3, 3, Eigen::RowMajor> Rcurr = Rprev;
    Eigen::Vector3f tcurr = tprev;

    if(rgb)
    {
        for(int i = 0; i < NUM_PYRS; i++)
        {
            computeDerivativeImages(nextImage[i], nextdIdx[i], nextdIdy[i]);
        }
    }

    Eigen::Matrix<double, 3, 3, Eigen::RowMajor> resultR = Eigen::Matrix<double, 3, 3, Eigen::RowMajor>::Identity();

    if(so3)
    {
        int pyramidLevel = 2;

        Eigen::Matrix<float, 3, 3, Eigen::RowMajor> R_lr = Eigen::Matrix<float, 3, 3, Eigen::RowMajor>::Identity();

        Eigen::Matrix<double, 3, 3, Eigen::RowMajor> K = Eigen::Matrix<double, 3, 3, Eigen::RowMajor>::Zero();

        K(0, 0) = intr(pyramidLevel).fx;
        K(1, 1) = intr(pyramidLevel).fy;
        K(0, 2) = intr(pyramidLevel).cx;
        K(1, 2) = intr(pyramidLevel).cy;
        K(2, 2) = 1;

        float lastError = std::numeric_limits<float>::max() / 2;
        float lastCount = std::numeric_limits<float>::max() / 2;

        Eigen::Matrix<double, 3, 3, Eigen::RowMajor> lastResultR = Eigen::Matrix<double, 3, 3, Eigen::RowMajor>::Identity();

        for(int i = 0; i < 10; i++)
        {
            Eigen::Matrix<float, 3, 3, Eigen::RowMajor> jtj;
            Eigen::Matrix<float, 3, 1> jtr;

            Eigen::Matrix<double, 3, 3, Eigen::RowMajor> homography = K * resultR * K.inverse();

            mat33 imageBasis;
            memcpy(&imageBasis.data[0], homography.cast<float>().eval().data(), sizeof(mat33));

            Eigen::Matrix<double, 3, 3, Eigen::RowMajor> K_inv = K.inverse();
            mat33 kinv;
            memcpy(&kinv.data[0], K_inv.cast<float>().eval().data(), sizeof(mat33));

            Eigen::Matrix<double, 3, 3, Eigen::RowMajor> K_R_lr = K * resultR;
            mat33 krlr;
            memcpy(&krlr.data[0], K_R_lr.cast<float>().eval().data(), sizeof(mat33));

            float residual[2];

            TICK("so3Step");
            so3Step(lastNextImage[pyramidLevel],
                    nextImage[pyramidLevel],
                    imageBasis,
                    kinv,
                    krlr,
                    sumDataSO3,
                    outDataSO3,
                    jtj.data(),
                    jtr.data(),
                    &residual[0],
                    GPUConfig::getInstance().so3StepThreads,
                    GPUConfig::getInstance().so3StepBlocks);
            TOCK("so3Step");

            lastSO3Error = sqrt(residual[0]) / residual[1];
            lastSO3Count = residual[1];

            //Converged
            if(lastSO3Error < lastError && lastCount == lastSO3Count)
            {
                break;
            }
            else if(lastSO3Error > lastError + 0.001) //Diverging
            {
                lastSO3Error = lastError;
                lastSO3Count = lastCount;
                resultR = lastResultR;
                break;
            }

            lastError = lastSO3Error;
            lastCount = lastSO3Count;
            lastResultR = resultR;

            Eigen::Vector3f delta = jtj.ldlt().solve(jtr);

            Eigen::Matrix<double, 3, 3, Eigen::RowMajor> rotUpdate = OdometryProvider::rodrigues(delta.cast<double>());

            R_lr = rotUpdate.cast<float>() * R_lr;

            for(int x = 0; x < 3; x++)
            {
                for(int y = 0; y < 3; y++)
                {
                    resultR(x, y) = R_lr(x, y);
                }
            }
        }
    }

    iterations[0] = fastOdom ? 3 : 10;
    iterations[1] = pyramid ? 5 : 0;
    iterations[2] = pyramid ? 4 : 0;

    Eigen::Matrix<float, 3, 3, Eigen::RowMajor> Rprev_inv = Rprev.inverse();
    mat33 device_Rprev_inv = Rprev_inv;
    float3 device_tprev = *reinterpret_cast<float3*>(tprev.data());

    Eigen::Matrix<double, 4, 4, Eigen::RowMajor> resultRt = Eigen::Matrix<double, 4, 4, Eigen::RowMajor>::Identity();

    if(so3)
    {
        for(int x = 0; x < 3; x++)
        {
            for(int y = 0; y < 3; y++)
            {
                resultRt(x, y) = resultR(x, y);
            }
        }
    }

    for(int i = NUM_PYRS - 1; i >= 0; i--)
    {
        if(rgb)
        {
            projectToPointCloud(lastDepth[i], pointClouds[i], intr, i);
        }

        Eigen::Matrix<double, 3, 3, Eigen::RowMajor> K = Eigen::Matrix<double, 3, 3, Eigen::RowMajor>::Zero();

        K(0, 0) = intr(i).fx;
        K(1, 1) = intr(i).fy;
        K(0, 2) = intr(i).cx;
        K(1, 2) = intr(i).cy;
        K(2, 2) = 1;

        lastRGBError = std::numeric_limits<float>::max();

        for(int j = 0; j < iterations[i]; j++)
        {
            Eigen::Matrix<double, 4, 4, Eigen::RowMajor> Rt = resultRt.inverse();

            Eigen::Matrix<double, 3, 3, Eigen::RowMajor> R = Rt.topLeftCorner(3, 3);

            Eigen::Matrix<double, 3, 3, Eigen::RowMajor> KRK_inv = K * R * K.inverse();
            mat33 krkInv;
            memcpy(&krkInv.data[0], KRK_inv.cast<float>().eval().data(), sizeof(mat33));

            Eigen::Vector3d Kt = Rt.topRightCorner(3, 1);
            Kt = K * Kt;
            float3 kt = {(float)Kt(0), (float)Kt(1), (float)Kt(2)};

            int sigma = 0;
            int rgbSize = 0;

            if(rgb)
            {
                TICK("computeRgbResidual");
                computeRgbResidual(pow(minimumGradientMagnitudes[i], 2.0) / pow(sobelScale, 2.0),
                                   nextdIdx[i],
                                   nextdIdy[i],
                                   lastDepth[i],
                                   nextDepth[i],
                                   lastImage[i],
                                   nextImage[i],
                                   corresImg[i],
                                   sumResidualRGB,
                                   maxDepthDeltaRGB,
                                   kt,
                                   krkInv,
                                   sigma,
                                   rgbSize,
                                   GPUConfig::getInstance().rgbResThreads,
                                   GPUConfig::getInstance().rgbResBlocks);
                TOCK("computeRgbResidual");
            }

            float sigmaVal = std::sqrt((float)sigma / rgbSize == 0 ? 1 : rgbSize);
            float rgbError = std::sqrt(sigma) / (rgbSize == 0 ? 1 : rgbSize);

            if(rgbOnly && rgbError > lastRGBError)
            {
                break;
            }

            lastRGBError = rgbError;
            lastRGBCount = rgbSize;

            if(rgbOnly)
            {
                sigmaVal = -1; //Signals the internal optimisation to weight evenly
            }

            Eigen::Matrix<float, 6, 6, Eigen::RowMajor> A_icp;
            Eigen::Matrix<float, 6, 1> b_icp;

            mat33 device_Rcurr = Rcurr;
            float3 device_tcurr = *reinterpret_cast<float3*>(tcurr.data());

            DeviceArray2D<float>& vmap_curr = vmaps_curr_[i];
            DeviceArray2D<float>& nmap_curr = nmaps_curr_[i];

            DeviceArray2D<float>& vmap_g_prev = vmaps_g_prev_[i];
            DeviceArray2D<float>& nmap_g_prev = nmaps_g_prev_[i];

            float residual[2];

            if(icp)
            {
                TICK("icpStep");
                icpStep(device_Rcurr,
                        device_tcurr,
                        vmap_curr,
                        nmap_curr,
                        device_Rprev_inv,
                        device_tprev,
                        intr(i),
                        vmap_g_prev,
                        nmap_g_prev,
                        distThres_,
                        angleThres_,
                        sumDataSE3,
                        outDataSE3,
                        A_icp.data(),
                        b_icp.data(),
                        &residual[0],
                        GPUConfig::getInstance().icpStepThreads,
                        GPUConfig::getInstance().icpStepBlocks);
                TOCK("icpStep");
            }

            lastICPError = sqrt(residual[0]) / residual[1];
            lastICPCount = residual[1];

            Eigen::Matrix<float, 6, 6, Eigen::RowMajor> A_rgbd;
            Eigen::Matrix<float, 6, 1> b_rgbd;

            if(rgb)
            {
                TICK("rgbStep");
                rgbStep(corresImg[i],
                        sigmaVal,
                        pointClouds[i],
                        intr(i).fx,
                        intr(i).fy,
                        nextdIdx[i],
                        nextdIdy[i],
                        sobelScale,
                        sumDataSE3,
                        outDataSE3,
                        A_rgbd.data(),
                        b_rgbd.data(),
                        GPUConfig::getInstance().rgbStepThreads,
                        GPUConfig::getInstance().rgbStepBlocks);
                TOCK("rgbStep");
            }

            Eigen::Matrix<double, 6, 1> result;
            Eigen::Matrix<double, 6, 6, Eigen::RowMajor> dA_rgbd = A_rgbd.cast<double>();
            Eigen::Matrix<double, 6, 6, Eigen::RowMajor> dA_icp = A_icp.cast<double>();
            Eigen::Matrix<double, 6, 1> db_rgbd = b_rgbd.cast<double>();
            Eigen::Matrix<double, 6, 1> db_icp = b_icp.cast<double>();

            if(icp && rgb)
            {
                double w = icpWeight;
                lastA = dA_rgbd + w * w * dA_icp;
                lastb = db_rgbd + w * db_icp;
                result = lastA.ldlt().solve(lastb);
            }
            else if(icp)
            {
                lastA = dA_icp;
                lastb = db_icp;
                result = lastA.ldlt().solve(lastb);
            }
            else if(rgb)
            {
                lastA = dA_rgbd;
                lastb = db_rgbd;
                result = lastA.ldlt().solve(lastb);
            }
            else
            {
                assert(false && "Control shouldn't reach here");
            }

            Eigen::Isometry3f rgbOdom;

            OdometryProvider::computeUpdateSE3(resultRt, result, rgbOdom);

            Eigen::Isometry3f currentT;
            currentT.setIdentity();
            currentT.rotate(Rprev);
            currentT.translation() = tprev;

            currentT = currentT * rgbOdom.inverse();

            tcurr = currentT.translation();
            Rcurr = currentT.rotation();
        }
    }

    if(rgb && (tcurr - tprev).norm() > 0.3)
    {
        Rcurr = Rprev;
        tcurr = tprev;
    }

    if(so3)
    {
        for(int i = 0; i < NUM_PYRS; i++)
        {
            std::swap(lastNextImage[i], nextImage[i]);
        }
    }

    trans = tcurr;
    rot = Rcurr;
}