コード例 #1
0
ファイル: ICPOdometry.cpp プロジェクト: ahundt/ICPCUDA
void ICPOdometry::initICPModel(const DeviceArray2D<unsigned short>& depth,
                               const float depthCutoff,
                               const Eigen::Matrix4f & modelPose)
{
    depth_tmp[0] = depth;

    for(int i = 1; i < NUM_PYRS; ++i)
    {
        pyrDown(depth_tmp[i - 1], depth_tmp[i]);
    }

    for(int i = 0; i < NUM_PYRS; ++i)
    {
        createVMap(intr(i), depth_tmp[i], vmaps_g_prev_[i], depthCutoff);
        createNMap(vmaps_g_prev_[i], nmaps_g_prev_[i]);
    }

    cudaDeviceSynchronize();

    Eigen::Matrix<float, 3, 3, Eigen::RowMajor> Rcam = modelPose.topLeftCorner(3, 3);
    Eigen::Vector3f tcam = modelPose.topRightCorner(3, 1);

    Mat33 &  device_Rcam = device_cast<Mat33>(Rcam);
    float3& device_tcam = device_cast<float3>(tcam);

    if (modelPose != Eigen::Matrix4f::Identity())
      for(int i = 0; i < NUM_PYRS; ++i)
        tranformMaps(vmaps_g_prev_[i], nmaps_g_prev_[i], device_Rcam, device_tcam, vmaps_g_prev_[i], nmaps_g_prev_[i]);

    cudaDeviceSynchronize();
}
コード例 #2
0
void ICPSlowdometry::initICPModel(unsigned short * depth,
                                  const float depthCutoff,
                                  const Eigen::Matrix4f & modelPose)
{
    depth_tmp[0].upload(depth, sizeof(unsigned short) * width, height, width);

    for(int i = 1; i < NUM_PYRS; ++i)
    {
        pyrDown(depth_tmp[i - 1], depth_tmp[i]);
    }

    for(int i = 0; i < NUM_PYRS; ++i)
    {
        createVMap(intr(i), depth_tmp[i], vmaps_g_prev_[i], depthCutoff);
        createNMap(vmaps_g_prev_[i], nmaps_g_prev_[i]);
    }

    cudaDeviceSynchronize();

    Eigen::Matrix<float, 3, 3, Eigen::RowMajor> Rcam = modelPose.topLeftCorner(3, 3);
    Eigen::Vector3f tcam = modelPose.topRightCorner(3, 1);

    Mat33 &  device_Rcam = device_cast<Mat33>(Rcam);
    float3& device_tcam = device_cast<float3>(tcam);

    for(int i = 0; i < NUM_PYRS; ++i)
    {
        tranformMaps(vmaps_g_prev_[i], nmaps_g_prev_[i], device_Rcam, device_tcam, vmaps_g_prev_[i], nmaps_g_prev_[i]);
    }

    cudaDeviceSynchronize();
}
コード例 #3
0
ファイル: dirini_visualizer.cpp プロジェクト: mwuethri/dirini
	void DiriniVisualizer::addCloud(pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud_, Eigen::Matrix3f R, Eigen::Vector3f t)
	{
		Eigen::Matrix4f H = Eigen::Matrix4f::Identity();

		H.topLeftCorner(3,3) = R;
		H.topRightCorner(3,1) = t;

		addCloud(cloud_, H);
	}
コード例 #4
0
ファイル: dirini_visualizer.cpp プロジェクト: mwuethri/dirini
	void DiriniVisualizer::addCloud(boost::shared_ptr<std::vector<Eigen::Vector3i> > points, Eigen::Matrix3f R, Eigen::Vector3f t)
	{
		Eigen::Matrix4f H = Eigen::Matrix4f::Identity();

		H.topLeftCorner(3,3) = R;
		H.topRightCorner(3,1) = t;

		addCloud(points, H);
	}
コード例 #5
0
int main()
{
  Eigen::Matrix4f m;
  m << 1, 2, 3, 4,
       5, 6, 7, 8,
       9, 10,11,12,
       13,14,15,16;
  cout << "m.leftCols(2) =" << endl << m.leftCols(2) << endl << endl;
  cout << "m.bottomRows<2>() =" << endl << m.bottomRows<2>() << endl << endl;
  m.topLeftCorner(1,3) = m.bottomRightCorner(3,1).transpose();
  cout << "After assignment, m = " << endl << m << endl;
}
コード例 #6
0
void OBJCTXT<_DOF6>::findCorrespondences3(const OBJCTXT &ctxt, std::vector<SCOR> &cors,
                                          const DOF6 &tf) {
  map_cors_.clear();

  Eigen::Matrix4f M = tf.getTF4().inverse();
  Eigen::Vector3f t=M.col(3).head<3>();
  Eigen::Matrix3f R=M.topLeftCorner(3,3);

  const float thr = tf.getRotationVariance()+0.05f;

  for(size_t j=0; j<ctxt.objs_.size(); j++)
  {
    OBJECT obj = *ctxt.objs_[j];
    obj.transform(R,t,0,0);

    for(size_t i=0; i<objs_.size(); i++)
      if( (obj.getData().getBB().preassumption(objs_[i]->getData().getBB())>=std::cos(thr+objs_[i]->getData().getBB().ratio())) &&
          obj.intersectsBB(*objs_[i], tf.getRotationVariance(),tf.getTranslationVariance())
          && obj.getData().extensionMatch(objs_[i]->getData(),0.7f,0)
      )
      {
        if(obj.intersectsPts(*objs_[i], tf.getRotationVariance(),tf.getTranslationVariance()) ||
            objs_[i]->intersectsPts(obj, tf.getRotationVariance(),tf.getTranslationVariance()))
        {
          //seccond check
          map_cors_[ctxt.objs_[j]].push_back(cors.size());
          SCOR c;
          c.a = objs_[i];
          c.b = ctxt.objs_[j];
          cors.push_back(c);
        }
      }

  }

#ifdef DEBUG_
  ROS_INFO("found %d correspondences (%d %d)", (int)cors.size(), (int)objs_.size(), (int)ctxt.objs_.size());

  for(typename std::vector<SCOR>::iterator it = cors.begin(); it!=cors.end(); it++)
  {
    Debug::Interface::get().addArrow(it->a->getNearestPoint(),it->b->getNearestPoint(), 0,255,0);
  }
#endif

}
コード例 #7
0
void RGBDOdometry::initICPModel(GPUTexture * predictedVertices,
                                GPUTexture * predictedNormals,
                                const float depthCutoff,
                                const Eigen::Matrix4f & modelPose)
{
    cudaArray * textPtr;

    cudaGraphicsMapResources(1, &predictedVertices->cudaRes);
    cudaGraphicsSubResourceGetMappedArray(&textPtr, predictedVertices->cudaRes, 0, 0);
    cudaMemcpyFromArray(vmaps_tmp.ptr(), textPtr, 0, 0, vmaps_tmp.sizeBytes(), cudaMemcpyDeviceToDevice);
    cudaGraphicsUnmapResources(1, &predictedVertices->cudaRes);

    cudaGraphicsMapResources(1, &predictedNormals->cudaRes);
    cudaGraphicsSubResourceGetMappedArray(&textPtr, predictedNormals->cudaRes, 0, 0);
    cudaMemcpyFromArray(nmaps_tmp.ptr(), textPtr, 0, 0, nmaps_tmp.sizeBytes(), cudaMemcpyDeviceToDevice);
    cudaGraphicsUnmapResources(1, &predictedNormals->cudaRes);

    copyMaps(vmaps_tmp, nmaps_tmp, vmaps_g_prev_[0], nmaps_g_prev_[0]);

    for(int i = 1; i < NUM_PYRS; ++i)
    {
        resizeVMap(vmaps_g_prev_[i - 1], vmaps_g_prev_[i]);
        resizeNMap(nmaps_g_prev_[i - 1], nmaps_g_prev_[i]);
    }

    Eigen::Matrix<float, 3, 3, Eigen::RowMajor> Rcam = modelPose.topLeftCorner(3, 3);
    Eigen::Vector3f tcam = modelPose.topRightCorner(3, 1);

    mat33 device_Rcam = Rcam;
    float3 device_tcam = *reinterpret_cast<float3*>(tcam.data());

    for(int i = 0; i < NUM_PYRS; ++i)
    {
        tranformMaps(vmaps_g_prev_[i], nmaps_g_prev_[i], device_Rcam, device_tcam, vmaps_g_prev_[i], nmaps_g_prev_[i]);
    }

    cudaDeviceSynchronize();
}
コード例 #8
0
ファイル: gicp.hpp プロジェクト: 87west/pcl
template <typename PointSource, typename PointTarget> inline void
pcl::GeneralizedIterativeClosestPoint<PointSource, PointTarget>::computeTransformation (PointCloudSource &output, const Eigen::Matrix4f& guess)
{
  pcl::IterativeClosestPoint<PointSource, PointTarget>::initComputeReciprocal ();
  using namespace std;
  // Difference between consecutive transforms
  double delta = 0;
  // Get the size of the target
  const size_t N = indices_->size ();
  // Set the mahalanobis matrices to identity
  mahalanobis_.resize (N, Eigen::Matrix3d::Identity ());
  // Compute target cloud covariance matrices
  if (target_covariances_.empty ())
    computeCovariances<PointTarget> (target_, tree_, target_covariances_);
  // Compute input cloud covariance matrices
  if (input_covariances_.empty ())
    computeCovariances<PointSource> (input_, tree_reciprocal_, input_covariances_);

  base_transformation_ = guess;
  nr_iterations_ = 0;
  converged_ = false;
  double dist_threshold = corr_dist_threshold_ * corr_dist_threshold_;
  std::vector<int> nn_indices (1);
  std::vector<float> nn_dists (1);

  while(!converged_)
  {
    size_t cnt = 0;
    std::vector<int> source_indices (indices_->size ());
    std::vector<int> target_indices (indices_->size ());

    // guess corresponds to base_t and transformation_ to t
    Eigen::Matrix4d transform_R = Eigen::Matrix4d::Zero ();
    for(size_t i = 0; i < 4; i++)
      for(size_t j = 0; j < 4; j++)
        for(size_t k = 0; k < 4; k++)
          transform_R(i,j)+= double(transformation_(i,k)) * double(guess(k,j));

    Eigen::Matrix3d R = transform_R.topLeftCorner<3,3> ();

    for (size_t i = 0; i < N; i++)
    {
      PointSource query = output[i];
      query.getVector4fMap () = guess * query.getVector4fMap ();
      query.getVector4fMap () = transformation_ * query.getVector4fMap ();

      if (!searchForNeighbors (query, nn_indices, nn_dists))
      {
        PCL_ERROR ("[pcl::%s::computeTransformation] Unable to find a nearest neighbor in the target dataset for point %d in the source!\n", getClassName ().c_str (), (*indices_)[i]);
        return;
      }
      
      // Check if the distance to the nearest neighbor is smaller than the user imposed threshold
      if (nn_dists[0] < dist_threshold)
      {
        Eigen::Matrix3d &C1 = input_covariances_[i];
        Eigen::Matrix3d &C2 = target_covariances_[nn_indices[0]];
        Eigen::Matrix3d &M = mahalanobis_[i];
        // M = R*C1
        M = R * C1;
        // temp = M*R' + C2 = R*C1*R' + C2
        Eigen::Matrix3d temp = M * R.transpose();        
        temp+= C2;
        // M = temp^-1
        M = temp.inverse ();
        source_indices[cnt] = static_cast<int> (i);
        target_indices[cnt] = nn_indices[0];
        cnt++;
      }
    }
    // Resize to the actual number of valid correspondences
    source_indices.resize(cnt); target_indices.resize(cnt);
    /* optimize transformation using the current assignment and Mahalanobis metrics*/
    previous_transformation_ = transformation_;
    //optimization right here
    try
    {
      rigid_transformation_estimation_(output, source_indices, *target_, target_indices, transformation_);
      /* compute the delta from this iteration */
      delta = 0.;
      for(int k = 0; k < 4; k++) {
        for(int l = 0; l < 4; l++) {
          double ratio = 1;
          if(k < 3 && l < 3) // rotation part of the transform
            ratio = 1./rotation_epsilon_;
          else
            ratio = 1./transformation_epsilon_;
          double c_delta = ratio*fabs(previous_transformation_(k,l) - transformation_(k,l));
          if(c_delta > delta)
            delta = c_delta;
        }
      }
    } 
    catch (PCLException &e)
    {
      PCL_DEBUG ("[pcl::%s::computeTransformation] Optimization issue %s\n", getClassName ().c_str (), e.what ());
      break;
    }
    nr_iterations_++;
    // Check for convergence
    if (nr_iterations_ >= max_iterations_ || delta < 1)
    {
      converged_ = true;
      previous_transformation_ = transformation_;
      PCL_DEBUG ("[pcl::%s::computeTransformation] Convergence reached. Number of iterations: %d out of %d. Transformation difference: %f\n",
                 getClassName ().c_str (), nr_iterations_, max_iterations_, (transformation_ - previous_transformation_).array ().abs ().sum ());
    } 
    else
      PCL_DEBUG ("[pcl::%s::computeTransformation] Convergence failed\n", getClassName ().c_str ());
  }
  //for some reason the static equivalent methode raises an error
  // final_transformation_.block<3,3> (0,0) = (transformation_.block<3,3> (0,0)) * (guess.block<3,3> (0,0));
  // final_transformation_.block <3, 1> (0, 3) = transformation_.block <3, 1> (0, 3) + guess.rightCols<1>.block <3, 1> (0, 3);
  final_transformation_.topLeftCorner (3,3) = previous_transformation_.topLeftCorner (3,3) * guess.topLeftCorner (3,3);
  final_transformation_(0,3) = previous_transformation_(0,3) + guess(0,3);
  final_transformation_(1,3) = previous_transformation_(1,3) + guess(1,3);
  final_transformation_(2,3) = previous_transformation_(2,3) + guess(2,3);
}
コード例 #9
0
ファイル: invrender.cpp プロジェクト: kyzyx/empty-room
int main(int argc, char* argv[]) {
    glutInit(&argc, argv);
    glutInitDisplayMode(GLUT_SINGLE | GLUT_RGB | GLUT_DEPTH);
    glutInitWindowSize(1,1);
    glutCreateWindow("");
    glutHideWindow();
    GLenum err = glewInit();
    if (err != GLEW_OK) {
        cerr << "Error:" << glewGetErrorString(err) << endl;
        return 1;
    }
    if (!parseargs(argc, argv)) return 1;
    PolygonMesh::Ptr mesh(new PolygonMesh());
    cout << "Loading mesh geometry...." << endl;
    io::loadPolygonFile(argv[1], *mesh);
    PointCloud<PointXYZRGB>::Ptr cloud(new PointCloud<PointXYZRGB>());
    {
        PointCloud<PointXYZ> tmp;
        fromPCLPointCloud2(mesh->cloud, tmp);
        cloud->points.resize(tmp.size());
        for (size_t i = 0; i < tmp.size(); ++i) {
            cloud->points[i].x = tmp[i].x;
            cloud->points[i].y = tmp[i].y;
            cloud->points[i].z = tmp[i].z;
        }
    }
    PlaneOrientationFinder of(mesh, resolution/2);
    of.computeNormals(ccw);
    Mesh m(mesh,true);
    cout << "Done loading mesh geometry" << endl;

    vector<int> wallindices;
    vector<int> floorindices;
    ColorHelper loader(image_flip_x, image_flip_y);
    WallFinder wf(&of, resolution);

    if (do_wallfinding) {
        cout << "===== WALLFINDING =====" << endl;
        if (wallinput) {
            cout << "Loading wall files..." << endl;
            wf.loadWalls(wallfile, m.types);
            cout << "Done loading wall files..." << endl;
            of.normalize();
        } else {
            cout << "Analyzing geometry..." << endl;
            if (!of.computeOrientation()) {
                cout << "Error computing orientation! Non-triangle mesh!" << endl;
            }
            cout << "Done analyzing geometry" << endl;
            of.normalize();
            cout << "Finding walls..." << endl;
            wf.findFloorAndCeiling(m.types, anglethreshold);
            wf.findWalls(m.types, wallthreshold, minlength, anglethreshold);
            cout << "Done finding walls" << endl;
        }
        if (flipfloorceiling) {
            for (int i = 0; i < m.types.size(); ++i) {
                if (m.types[i] == WallFinder::LABEL_CEILING) m.types[i] = WallFinder::LABEL_FLOOR;
                else if (m.types[i] == WallFinder::LABEL_FLOOR) m.types[i] = WallFinder::LABEL_CEILING;
            }
        }
        for (int i = 0; i < m.types.size(); ++i) {
            if (m.types[i] == WallFinder::LABEL_WALL) wallindices.push_back(i);
            else if (m.types[i] == WallFinder::LABEL_FLOOR) floorindices.push_back(i);
        }
        if (output_wall) wf.saveWalls(walloutfile, m.types);
        cout << "=======================" << endl;
    }

    int numlights = 0;
    if (do_reprojection) {
        cout << "===== REPROJECTING =====" << endl;
        if (input) {
            cout << "Loading reprojection input files..." << endl;
            loader.readCameraFile(camfile);
            cout << "Reading input files..." << endl;
            loader.load(camfile, ColorHelper::READ_COLOR);
            if (use_confidence_files) {
                loader.load(camfile, ColorHelper::READ_CONFIDENCE);
            }
            cout << "Done reading " << loader.size() << " color images" << endl;
            if (do_wallfinding) {
                vector<char> tmp;
                swap(tmp, m.types);
                numlights = m.readSamples(infile);
                swap (m.types, tmp);
            } else {
                numlights = m.readSamples(infile);
            }
            cout << "Done loading reprojection files" << endl;
            if (hdr_threshold > 0) {
                numlights = clusterLights(m, hdr_threshold, minlightsize);
                cout << "Done clustering " << numlights << " lights" << endl;
            }
        } else {
            if (hdr_threshold < 0) hdr_threshold = 10.0;
            if (all_project) {
                cout << "Reading input files..." << endl;
                loader.load(camfile, ColorHelper::READ_COLOR | ColorHelper::READ_DEPTH);
                if (use_confidence_files) {
                    loader.load(camfile, ColorHelper::READ_CONFIDENCE);
                }
                cout << "Done reading " << loader.size() << " color images" << endl;
                cout << "Reprojecting..." << endl;
                reproject(loader, m, hdr_threshold);
            } else {
                loader.load(camfile, 0);
                loader.load(project, ColorHelper::READ_COLOR | ColorHelper::READ_DEPTH);
                if (use_confidence_files) {
                    loader.load(project, ColorHelper::READ_CONFIDENCE);
                }
                cout << "Reprojecting..." << endl;
                reproject((const float*) loader.getImage(project),
                          loader.getConfidenceMap(project),
                          loader.getDepthMap(project),
                          loader.getCamera(project),
                          m, hdr_threshold);
            }
            cout << "Done reprojecting; clustering lights..." << endl;
            numlights = clusterLights(m, hdr_threshold, minlightsize);
            cout << "Done clustering " << numlights << " lights" << endl;
        }
        if (output_reprojection) m.writeSamples(outfile);
        if (coloredfile.length()) m.writeColoredMesh(coloredfile, displayscale);
        m.computeColorsOGL();
        hemicuberesolution = max(hemicuberesolution, loader.getCamera(0)->width);
        hemicuberesolution = max(hemicuberesolution, loader.getCamera(0)->height);
        cout << "========================" << endl;
    }
    InverseRender ir(&m, numlights, hemicuberesolution);
    vector<SampleData> walldata, floordata;
    // Only do inverse rendering with full reprojection and wall labels
    if (do_sampling && (input || all_project) && (wallinput || do_wallfinding)) {
        cout << "==== SAMPLING SCENE ====" << endl;
        if (read_eq) {
            ir.loadVariablesBinary(walldata, samplefile + ".walls");
            if (do_texture) {
                ir.loadVariablesBinary(floordata, samplefile + ".floors");
            }
        } else {
            ir.computeSamples(walldata, wallindices, numsamples, discardthreshold);
            if (do_texture) {
                ir.computeSamples(floordata, floorindices, numsamples, discardthreshold);
            }
            if (write_eq) {
                ir.writeVariablesBinary(walldata, sampleoutfile + ".walls");
                if (do_texture) {
                    ir.writeVariablesBinary(floordata, sampleoutfile + ".floors");
                }
            }
            if (write_matlab) {
                ir.writeVariablesMatlab(walldata, matlabsamplefile);
            }
        }
        cout << "========================" << endl;
        ir.setNumRansacIters(numRansacIters);
        ir.setMaxPercentErr(maxPercentErr);
        ir.solve(walldata);

        Texture tex;
        if (do_texture) {
            // Prepare floor plane
            Eigen::Matrix4f t = of.getNormalizationTransform().inverse();
            Eigen::Vector3f floornormal(0,flipfloorceiling?-1:1,0);
            floornormal = t.topLeftCorner(3,3)*floornormal;
            Eigen::Vector4f floorpoint(0, wf.floorplane, 0, 1);
            floorpoint = t*floorpoint;
            R3Plane floorplane(eigen2gaps(floorpoint.head(3)).Point(), eigen2gaps(floornormal));

            loader.load(camfile, use_confidence_files);

            cout << "Solving texture..." << endl;
            ir.solveTexture(floordata, &loader, floorplane, tex);
            cout << "Done solving texture..." << endl;
            if (tex.size > 0) {
                ColorHelper::writeExrImage(texfile, tex.texture, tex.size, tex.size);
            }
        }
        if (radfile != "") {
            outputRadianceFile(radfile, wf, m, ir);
        }
        if (plyfile != "") {
            outputPlyFile(plyfile, wf, m, ir);
        }
        if (pbrtfile != "") {
            outputPbrtFile(pbrtfile, wf, m, ir, tex, loader.getCamera(0), do_texture?texfile:"");
        }
    }
    cout << "DONE PROCESSING" << endl;

    if (display) {
        InvRenderVisualizer irv(cloud, loader, wf, ir);
        if (project_debug) irv.recalculateColors(InvRenderVisualizer::LABEL_REPROJECT_DEBUG);
        irv.visualizeCameras(camera);
        irv.visualizeWalls();
        irv.addSamples(walldata);
        irv.loop();
    }
    return 0;
}
コード例 #10
0
/* ---[ */
int
main (int argc, char** argv)
{
  print_info ("Transform a cloud. For more information, use: %s -h\n", argv[0]);

  bool help = false;
  parse_argument (argc, argv, "-h", help);
  if (argc < 3 || help)
  {
    printHelp (argc, argv);
    return (-1);
  }

  // Parse the command line arguments for .pcd files
  std::vector<int> p_file_indices;
  p_file_indices = parse_file_extension_argument (argc, argv, ".pcd");
  if (p_file_indices.size () != 2)
  {
    print_error ("Need one input PCD file and one output PCD file to continue.\n");
    return (-1);
  }

  // Initialize the transformation matrix
  Eigen::Matrix4f tform; 
  tform.setIdentity ();

  // Command line parsing
  float dx, dy, dz;
  std::vector<float> values;

  if (parse_3x_arguments (argc, argv, "-trans", dx, dy, dz) > -1)
  {
    tform (0, 3) = dx;
    tform (1, 3) = dy;
    tform (2, 3) = dz;
  }

  if (parse_x_arguments (argc, argv, "-quat", values) > -1)
  {
    if (values.size () == 4)
    {
      const float& x = values[0];
      const float& y = values[1];
      const float& z = values[2];
      const float& w = values[3];
      tform.topLeftCorner (3, 3) = Eigen::Matrix3f (Eigen::Quaternionf (w, x, y, z));
    }
    else
    {
      print_error ("Wrong number of values given (%zu): ", values.size ());
      print_error ("The quaternion specified with -quat must contain 4 elements (w,x,y,z).\n");
    }
  }

  if (parse_x_arguments (argc, argv, "-axisangle", values) > -1)
  {
    if (values.size () == 4)
    {
      const float& ax = values[0];
      const float& ay = values[1];
      const float& az = values[2];
      const float& theta = values[3];
      tform.topLeftCorner (3, 3) = Eigen::Matrix3f (Eigen::AngleAxisf (theta, Eigen::Vector3f (ax, ay, az)));
    }
    else
    {
      print_error ("Wrong number of values given (%zu): ", values.size ());
      print_error ("The rotation specified with -axisangle must contain 4 elements (ax,ay,az,theta).\n");
    }
  }

  if (parse_x_arguments (argc, argv, "-matrix", values) > -1)
  {
    if (values.size () == 9 || values.size () == 16)
    {
      int n = values.size () == 9 ? 3 : 4;
      for (int r = 0; r < n; ++r)
        for (int c = 0; c < n; ++c)
          tform (r, c) = values[n*r+c];
    }
    else
    {
      print_error ("Wrong number of values given (%zu): ", values.size ());
      print_error ("The transformation specified with -matrix must be 3x3 (9) or 4x4 (16).\n");
    }
  }

  // Load the first file
  pcl::PCLPointCloud2::Ptr cloud (new pcl::PCLPointCloud2);
  if (!loadCloud (argv[p_file_indices[0]], *cloud)) 
    return (-1);

  // Apply the transform
  pcl::PCLPointCloud2 output;
  compute (cloud, output, tform);

  // Check if a scaling parameter has been given
  double divider[3];
  if (parse_3x_arguments (argc, argv, "-scale", divider[0], divider[1], divider[2]) > -1)
  {
    print_highlight ("Scaling XYZ data with the following values: %f, %f, %f\n", divider[0], divider[1], divider[2]);
    scaleInPlace (output, divider);
  }

  // Save into the second file
  saveCloud (argv[p_file_indices[1]], output);
}
コード例 #11
0
ファイル: Feature.cpp プロジェクト: weeks-yu/WReg
bool Feature::getTransformationByRANSAC(Eigen::Matrix4f &result_transform,
	Eigen::Matrix<double, 6, 6> &result_information,
	int &point_count, int &point_corr_count,
	float &rmse, vector<cv::DMatch> *matches, // output vars. if matches == nullptr, do not return inlier match
	const Feature* earlier, const Feature* now,
	PointCloudCuda *pcc,
	const vector<cv::DMatch> &initial_matches)
{
	if (matches != nullptr)	matches->clear();

	if (initial_matches.size() < (unsigned int) Config::instance()->get<int>("min_matches"))
	{
		return false;
	}

	unsigned int min_inlier_threshold = (unsigned int)(initial_matches.size() * Config::instance()->get<float>("min_inliers_percent"));
	std::vector<cv::DMatch> inlier; //holds those feature correspondences that support the transformation
	float inlier_error; //all squared errors
	srand((long)std::clock());

	// a point is an inlier if it's no more than max_dist_m m from its partner apart
	const float max_dist_m = Config::instance()->get<float>("max_dist_for_inliers");
	const float squared_max_dist_m = max_dist_m * max_dist_m;

	// best values of all iterations (including invalids)
	float best_error = 1e6, best_error_invalid = 1e6;
	unsigned int best_inlier_invalid = 0, best_inlier_cnt = 0, valid_iterations = 0;
	Eigen::Matrix4f transformation;

	const unsigned int sample_size = 3;// chose this many randomly from the correspondences:

	unsigned int ransac_iterations = Config::instance()->get<int>("ransac_max_iteration");
	int threads = Config::instance()->get<int>("icpcuda_threads");
	int blocks = Config::instance()->get<int>("icpcuda_blocks");
	float corr_percent = Config::instance()->get<float>("coresp_percent");
	Eigen::Matrix<double, 6, 6> information = Eigen::Matrix<double, 6, 6>::Identity();
	int pc = 1.0, pcorrc = 0.0;

	bool *used = new bool[initial_matches.size()];
	memset(used, 0, initial_matches.size() * sizeof(bool));
	vector<int> sample_matches_indices(3);
	vector<cv::DMatch> sample_matches_vector(3);

	for (unsigned int n_iter = 0; n_iter < ransac_iterations; n_iter++)
	{
		for (int i = 0; i < sample_matches_indices.size(); i++)
		{
			used[sample_matches_indices[i]] = false;
		}
		sample_matches_indices.clear();
		sample_matches_vector.clear();
		while (sample_matches_indices.size() < sample_size)
		{
			int id = rand() % initial_matches.size();
			if (!used[id])
			{
				used[id] = true;
				sample_matches_indices.push_back(id);
				sample_matches_vector.push_back(initial_matches.at(id));
			}
		}

		bool valid; // valid is false iff the sampled points clearly aren't inliers themself 
		transformation = Feature::getTransformFromMatches(valid,
			earlier->feature_pts_3d,
			now->feature_pts_3d,
			sample_matches_vector, max_dist_m);

		if (!valid) continue; // valid is false iff the sampled points aren't inliers themself 
		if (transformation != transformation) continue; //Contains NaN

		//test whether samples are inliers (more strict than before)
		Feature::computeInliersAndError(inlier, inlier_error, nullptr,
			sample_matches_vector, transformation,
			earlier->feature_pts_3d, now->feature_pts_3d,
			squared_max_dist_m);
		if (inlier_error > 1000) continue; //most possibly a false match in the samples

		Feature::computeInliersAndError(inlier, inlier_error, nullptr,
			initial_matches, transformation,
			earlier->feature_pts_3d, now->feature_pts_3d,
			squared_max_dist_m);

		if (inlier.size() < min_inlier_threshold || inlier_error > max_dist_m)
		{
			continue;
		}

		valid_iterations++;

		//Performance hacks:
		///Iterations with more than half of the initial_matches inlying, count twice
		if (inlier.size() > initial_matches.size() * 0.5) n_iter++;
		///Iterations with more than 80% of the initial_matches inlying, count threefold
		if (inlier.size() > initial_matches.size() * 0.8) n_iter++;

		if (inlier_error < best_error)
		{ //copy this to the result
			if (pcc != nullptr)
			{
				Eigen::Vector3f t = transformation.topRightCorner(3, 1);
				Eigen::Matrix<float, 3, 3, Eigen::RowMajor> rot = transformation.topLeftCorner(3, 3);
				pcc->getCoresp(t, rot, information, pc, pcorrc, threads, blocks);
			}

			result_transform = transformation;
			result_information = information;
			point_count = pc;
			point_corr_count = pcorrc;
			if (matches != nullptr) *matches = inlier;
			best_inlier_cnt = inlier.size();
			rmse = inlier_error;
			best_error = inlier_error;
		}

		float new_inlier_error;

		transformation = Feature::getTransformFromMatches(valid,
			earlier->feature_pts_3d, now->feature_pts_3d, inlier); // compute new trafo from all inliers:
		if (transformation != transformation) continue; //Contains NaN
		Feature::computeInliersAndError(inlier, new_inlier_error, nullptr,
			initial_matches, transformation,
			earlier->feature_pts_3d, now->feature_pts_3d,
			squared_max_dist_m);

		if(inlier.size() < min_inlier_threshold || new_inlier_error > max_dist_m)
		{
			continue;
		}

		if (new_inlier_error < best_error) 
		{
			if (pcc != nullptr)
			{
				Eigen::Vector3f t = transformation.topRightCorner(3, 1);
				Eigen::Matrix<float, 3, 3, Eigen::RowMajor> rot = transformation.topLeftCorner(3, 3);
				pcc->getCoresp(t, rot, information, pc, pcorrc, threads, blocks);
			}

			result_transform = transformation;
			if (pcc != nullptr)
				result_information = information;
			point_count = pc;
			point_corr_count = pcorrc;
			if (matches != nullptr) *matches = inlier;
			best_inlier_cnt = inlier.size();
			rmse = new_inlier_error;
			best_error = new_inlier_error;
		}
	} //iterations
	
	if (pcc == nullptr)
		result_information = Eigen::Matrix<double, 6, 6>::Identity();
	return best_inlier_cnt >= min_inlier_threshold;
}
コード例 #12
0
void MainController::run()
{
    while(!pangolin::ShouldQuit() && !((!logReader->hasMore()) && quiet) && !(eFusion->getTick() == end && quiet))
    {
        if(!gui->pause->Get() || pangolin::Pushed(*gui->step))
        {
            if((logReader->hasMore() || rewind) && eFusion->getTick() < end)
            {
                TICK("LogRead");
                if(rewind)
                {
                    if(!logReader->hasMore())
                    {
                        logReader->getBack();
                    }
                    else
                    {
                        logReader->getNext();
                    }

                    if(logReader->rewound())
                    {
                        logReader->currentFrame = 0;
                    }
                }
                else
                {
                    logReader->getNext();
                }
                TOCK("LogRead");

                if(eFusion->getTick() < start)
                {
                    eFusion->setTick(start);
                    logReader->fastForward(start);
                }

                float weightMultiplier = framesToSkip + 1;

                if(framesToSkip > 0)
                {
                    eFusion->setTick(eFusion->getTick() + framesToSkip);
                    logReader->fastForward(logReader->currentFrame + framesToSkip);
                    framesToSkip = 0;
                }

                Eigen::Matrix4f * currentPose = 0;

                if(groundTruthOdometry)
                {
                    currentPose = new Eigen::Matrix4f;
                    currentPose->setIdentity();
                    *currentPose = groundTruthOdometry->getIncrementalTransformation(logReader->timestamp);
                }

                eFusion->processFrame(logReader->rgb, logReader->depth, logReader->timestamp, currentPose, weightMultiplier);

                if(currentPose)
                {
                    delete currentPose;
                }

                if(frameskip && Stopwatch::getInstance().getTimings().at("Run") > 1000.f / 30.f)
                {
                    framesToSkip = int(Stopwatch::getInstance().getTimings().at("Run") / (1000.f / 30.f));
                }
            }
        }
        else
        {
            eFusion->predict();
        }

        TICK("GUI");

        if(gui->followPose->Get())
        {
            pangolin::OpenGlMatrix mv;

            Eigen::Matrix4f currPose = eFusion->getCurrPose();
            Eigen::Matrix3f currRot = currPose.topLeftCorner(3, 3);

            Eigen::Quaternionf currQuat(currRot);
            Eigen::Vector3f forwardVector(0, 0, 1);
            Eigen::Vector3f upVector(0, iclnuim ? 1 : -1, 0);

            Eigen::Vector3f forward = (currQuat * forwardVector).normalized();
            Eigen::Vector3f up = (currQuat * upVector).normalized();

            Eigen::Vector3f eye(currPose(0, 3), currPose(1, 3), currPose(2, 3));

            eye -= forward;

            Eigen::Vector3f at = eye + forward;

            Eigen::Vector3f z = (eye - at).normalized();  // Forward
            Eigen::Vector3f x = up.cross(z).normalized(); // Right
            Eigen::Vector3f y = z.cross(x);

            Eigen::Matrix4d m;
            m << x(0),  x(1),  x(2),  -(x.dot(eye)),
                 y(0),  y(1),  y(2),  -(y.dot(eye)),
                 z(0),  z(1),  z(2),  -(z.dot(eye)),
                    0,     0,     0,              1;

            memcpy(&mv.m[0], m.data(), sizeof(Eigen::Matrix4d));

            gui->s_cam.SetModelViewMatrix(mv);
        }

        gui->preCall();

        std::stringstream stri;
        stri << eFusion->getModelToModel().lastICPCount;
        gui->trackInliers->Ref().Set(stri.str());

        std::stringstream stre;
        stre << (isnan(eFusion->getModelToModel().lastICPError) ? 0 : eFusion->getModelToModel().lastICPError);
        gui->trackRes->Ref().Set(stre.str());

        if(!gui->pause->Get())
        {
            gui->resLog.Log((isnan(eFusion->getModelToModel().lastICPError) ? std::numeric_limits<float>::max() : eFusion->getModelToModel().lastICPError), icpErrThresh);
            gui->inLog.Log(eFusion->getModelToModel().lastICPCount, icpCountThresh);
        }

        Eigen::Matrix4f pose = eFusion->getCurrPose();

        if(gui->drawRawCloud->Get() || gui->drawFilteredCloud->Get())
        {
            eFusion->computeFeedbackBuffers();
        }

        if(gui->drawRawCloud->Get())
        {
            eFusion->getFeedbackBuffers().at(FeedbackBuffer::RAW)->render(gui->s_cam.GetProjectionModelViewMatrix(), pose, gui->drawNormals->Get(), gui->drawColors->Get());
        }

        if(gui->drawFilteredCloud->Get())
        {
            eFusion->getFeedbackBuffers().at(FeedbackBuffer::FILTERED)->render(gui->s_cam.GetProjectionModelViewMatrix(), pose, gui->drawNormals->Get(), gui->drawColors->Get());
        }

        if(gui->drawGlobalModel->Get())
        {
            glFinish();
            TICK("Global");

            if(gui->drawFxaa->Get())
            {
                gui->drawFXAA(gui->s_cam.GetProjectionModelViewMatrix(),
                              gui->s_cam.GetModelViewMatrix(),
                              eFusion->getGlobalModel().model(),
                              eFusion->getConfidenceThreshold(),
                              eFusion->getTick(),
                              eFusion->getTimeDelta(),
                              iclnuim);
            }
            else
            {
                eFusion->getGlobalModel().renderPointCloud(gui->s_cam.GetProjectionModelViewMatrix(),
                                                           eFusion->getConfidenceThreshold(),
                                                           gui->drawUnstable->Get(),
                                                           gui->drawNormals->Get(),
                                                           gui->drawColors->Get(),
                                                           gui->drawPoints->Get(),
                                                           gui->drawWindow->Get(),
                                                           gui->drawTimes->Get(),
                                                           eFusion->getTick(),
                                                           eFusion->getTimeDelta());
            }
            glFinish();
            TOCK("Global");
        }

        if(eFusion->getLost())
        {
            glColor3f(1, 1, 0);
        }
        else
        {
            glColor3f(1, 0, 1);
        }
        gui->drawFrustum(pose);
        glColor3f(1, 1, 1);

        if(gui->drawFerns->Get())
        {
            glColor3f(0, 0, 0);
            for(size_t i = 0; i < eFusion->getFerns().frames.size(); i++)
            {
                if((int)i == eFusion->getFerns().lastClosest)
                    continue;

                gui->drawFrustum(eFusion->getFerns().frames.at(i)->pose);
            }
            glColor3f(1, 1, 1);
        }

        if(gui->drawDefGraph->Get())
        {
            const std::vector<GraphNode*> & graph = eFusion->getLocalDeformation().getGraph();

            for(size_t i = 0; i < graph.size(); i++)
            {
                pangolin::glDrawCross(graph.at(i)->position(0),
                                      graph.at(i)->position(1),
                                      graph.at(i)->position(2),
                                      0.1);

                for(size_t j = 0; j < graph.at(i)->neighbours.size(); j++)
                {
                    pangolin::glDrawLine(graph.at(i)->position(0),
                                         graph.at(i)->position(1),
                                         graph.at(i)->position(2),
                                         graph.at(graph.at(i)->neighbours.at(j))->position(0),
                                         graph.at(graph.at(i)->neighbours.at(j))->position(1),
                                         graph.at(graph.at(i)->neighbours.at(j))->position(2));
                }
            }
        }

        if(eFusion->getFerns().lastClosest != -1)
        {
            glColor3f(1, 0, 0);
            gui->drawFrustum(eFusion->getFerns().frames.at(eFusion->getFerns().lastClosest)->pose);
            glColor3f(1, 1, 1);
        }

        const std::vector<PoseMatch> & poseMatches = eFusion->getPoseMatches();

        int maxDiff = 0;
        for(size_t i = 0; i < poseMatches.size(); i++)
        {
            if(poseMatches.at(i).secondId - poseMatches.at(i).firstId > maxDiff)
            {
                maxDiff = poseMatches.at(i).secondId - poseMatches.at(i).firstId;
            }
        }

        for(size_t i = 0; i < poseMatches.size(); i++)
        {
            if(gui->drawDeforms->Get())
            {
                if(poseMatches.at(i).fern)
                {
                    glColor3f(1, 0, 0);
                }
                else
                {
                    glColor3f(0, 1, 0);
                }
                for(size_t j = 0; j < poseMatches.at(i).constraints.size(); j++)
                {
                    pangolin::glDrawLine(poseMatches.at(i).constraints.at(j).sourcePoint(0), poseMatches.at(i).constraints.at(j).sourcePoint(1), poseMatches.at(i).constraints.at(j).sourcePoint(2),
                                         poseMatches.at(i).constraints.at(j).targetPoint(0), poseMatches.at(i).constraints.at(j).targetPoint(1), poseMatches.at(i).constraints.at(j).targetPoint(2));
                }
            }
        }
        glColor3f(1, 1, 1);

        eFusion->normaliseDepth(0.3f, gui->depthCutoff->Get());

        for(std::map<std::string, GPUTexture*>::const_iterator it = eFusion->getTextures().begin(); it != eFusion->getTextures().end(); ++it)
        {
            if(it->second->draw)
            {
                gui->displayImg(it->first, it->second);
            }
        }

        eFusion->getIndexMap().renderDepth(gui->depthCutoff->Get());

        gui->displayImg("ModelImg", eFusion->getIndexMap().imageTex());
        gui->displayImg("Model", eFusion->getIndexMap().drawTex());

        std::stringstream strs;
        strs << eFusion->getGlobalModel().lastCount();

        gui->totalPoints->operator=(strs.str());

        std::stringstream strs2;
        strs2 << eFusion->getLocalDeformation().getGraph().size();

        gui->totalNodes->operator=(strs2.str());

        std::stringstream strs3;
        strs3 << eFusion->getFerns().frames.size();

        gui->totalFerns->operator=(strs3.str());

        std::stringstream strs4;
        strs4 << eFusion->getDeforms();

        gui->totalDefs->operator=(strs4.str());

        std::stringstream strs5;
        strs5 << eFusion->getTick() << "/" << logReader->getNumFrames();

        gui->logProgress->operator=(strs5.str());

        std::stringstream strs6;
        strs6 << eFusion->getFernDeforms();

        gui->totalFernDefs->operator=(strs6.str());

        gui->postCall();

        logReader->flipColors = gui->flipColors->Get();
        eFusion->setRgbOnly(gui->rgbOnly->Get());
        eFusion->setPyramid(gui->pyramid->Get());
        eFusion->setFastOdom(gui->fastOdom->Get());
        eFusion->setConfidenceThreshold(gui->confidenceThreshold->Get());
        eFusion->setDepthCutoff(gui->depthCutoff->Get());
        eFusion->setIcpWeight(gui->icpWeight->Get());
        eFusion->setSo3(gui->so3->Get());
        eFusion->setFrameToFrameRGB(gui->frameToFrameRGB->Get());

        resetButton = pangolin::Pushed(*gui->reset);

        if(gui->autoSettings)
        {
            static bool last = gui->autoSettings->Get();

            if(gui->autoSettings->Get() != last)
            {
                last = gui->autoSettings->Get();
                static_cast<LiveLogReader *>(logReader)->setAuto(last);
            }
        }

        Stopwatch::getInstance().sendAll();

        if(resetButton)
        {
            break;
        }

        if(pangolin::Pushed(*gui->save))
        {
            eFusion->savePly();
        }

        TOCK("GUI");
    }
}
コード例 #13
0
ファイル: dirini_visualizer.cpp プロジェクト: mwuethri/dirini
	void DiriniVisualizer::addPoint(Eigen::Vector3f point, Eigen::Matrix4f transform_)
	{
		points.push_back(transform_.topLeftCorner(3,3)*point+transform_.topRightCorner(3,1));
	}