int main ( int argc, char** argv )
{

    pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_source ( new pcl::PointCloud<pcl::PointXYZ> () );
    pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_target ( new pcl::PointCloud<pcl::PointXYZ> () );

    loadFile ( argv[1], *cloud_source );


    float *R, *t;
    R = new float [9];
    t = new float [3];
    loadRTfromFile(R, t, argv[3]);
    Eigen::Affine3f RT;
    RT.matrix() <<
                R[0], R[1], R[2], t[0],
                R[3], R[4], R[5], t[1],
                R[6], R[7], R[8], t[2],
                0,0,0,1;
    delete [] R;
    delete [] t;


    pcl::transformPointCloud ( *cloud_source, *cloud_target, RT );

    pcl::io::savePCDFileASCII ( argv[2], *cloud_target );

    return 0;
}
void ndtTransformAndAdd(pcl::PointCloud<pcl::PointXYZRGB>::Ptr& A,pcl::PointCloud<pcl::PointXYZRGB>::Ptr& B)
{
	 pcl::NormalDistributionsTransform<pcl::PointXYZRGB, pcl::PointXYZRGB> ndt;
	 // Setting scale dependent NDT parameters
	 // Setting minimum transformation difference for termination condition.
	 ndt.setTransformationEpsilon (0.01);
	 // Setting maximum step size for More-Thuente line search.
	 ndt.setStepSize (0.1);
	 //Setting Resolution of NDT grid structure (VoxelGridCovariance).
	 ndt.setResolution (1.0);

	  // Setting max number of registration iterations.
	  ndt.setMaximumIterations (35);

	  // Setting point cloud to be aligned.
	 ndt.setInputSource (B);
	  // Setting point cloud to be aligned to.
	 ndt.setInputTarget (A);

	 pcl::PointCloud<pcl::PointXYZRGB>::Ptr output_cloud (new pcl::PointCloud<pcl::PointXYZRGB>);
	 Eigen::Affine3f estimate;
	 estimate.setIdentity();
	 ndt.align (*output_cloud, estimate.matrix());

	 std::cout << "Normal Distributions Transform has converged:" << ndt.hasConverged ()
	           << " score: " << ndt.getFitnessScore () << std::endl;

	   // Transforming unfiltered, input cloud using found transform.
	 pcl::transformPointCloud (*B, *output_cloud, ndt.getFinalTransformation ());

	 *A=*A+*output_cloud;
}
Ejemplo n.º 3
0
void fuzzyAffines()
{
    std::vector<Eigen::Matrix4f> trans;
    trans.reserve(count/10);
    for( size_t i=0; i<count/10; i++ )
    {
        Eigen::Vector3f x = Eigen::Vector3f::Random();
        Eigen::Vector3f y = Eigen::Vector3f::Random();

        x.normalize();
        y.normalize();

        Eigen::Vector3f z = x.cross(y);
        z.normalize();

        y = z.cross(x);
        y.normalize();

        Eigen::Affine3f t = Eigen::Affine3f::Identity();
        Eigen::Matrix3f r = Eigen::Matrix3f::Identity();

        r.col(0) = x;
        r.col(1) = y;
        r.col(2) = z;

        t.rotate(r);
        t.translate( 0.5f * Eigen::Vector3f::Random() + Eigen::Vector3f(0.5,0.5,0.5) );

        trans.push_back( t.matrix() );
    }

    s_plot.setColor( Eigen::Vector4f(1,0,0,1) );
    s_plot.setLineWidth( 3.0 );
    s_plot( trans, nox::plot<float>::Pos | nox::plot<float>::CS );
}
Ejemplo n.º 4
0
 /**
  * @brief Return the modelview matrix as a GLdouble array.
  *
  * Similar to OpenGL old method using glGet**(GL_MODELVIEW_MATRIX)
  * @param matrix A pointer to a GLdouble of at least 16 elements to fill with view matrix.
  */
 void getViewMatrix (GLdouble *matrix)
 {
     Eigen::Matrix4f mv = view_matrix.matrix();
     for (int i = 0; i < 16; ++i)
     {
         matrix[i] = mv(i);
     }
 }
Ejemplo n.º 5
0
gtsam::Pose3 transformToPose3(const Eigen::Affine3f& tform)
{
  //  return gtsam::Pose3(gtsam::Rot3(tform(0,0),tform(0,1),tform(0,2),
  //				  tform(1,0),tform(1,1),tform(1,2),
  //				  tform(2,0),tform(2,1),tform(2,2),
  //				  gtsam::Point3(tform());
  return gtsam::Pose3(tform.matrix().cast<double>());
}
Ejemplo n.º 6
0
void TrackerICP::determineTransformation(PointCloudConstPtr pointCloud, Eigen::Affine3f &T, bool &converged, float &RMS){

    // Filter
    approximateVoxelFilter->setInputCloud(pointCloud);
    pcl::PointCloud<pcl::PointXYZRGB>::Ptr filteredPointCloud(new pcl::PointCloud<pcl::PointXYZRGB>);
    approximateVoxelFilter->filter(*filteredPointCloud);
    std::cout << "Reduced number of points in source from " << pointCloud->points.size() << " to " << filteredPointCloud->points.size() << std::endl;

////pcl::io::savePLYFileBinary(fileName.toStdString(), *pointCloudPCL);
//pcl::PLYWriter w;
//// Write to ply in binary without camera
//w.write<pcl::PointXYZRGB> ("filteredPointCloud.ply", *filteredPointCloud, true, false);

    // Add normal fields to source (due to PCL bug)
    pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr pointCloudNormals(new pcl::PointCloud<pcl::PointXYZRGBNormal>);
    pcl::copyPointCloud(*filteredPointCloud, *pointCloudNormals);

//    correspondenceEstimator->setInputSource(pointCloudNormals);
//    correspondenceRejectorBoundary->setInputSource<pcl::PointXYZRGBNormal>(pointCloudNormals);
    icp->setInputSource(pointCloudNormals);

    // Align
    pcl::PointCloud<pcl::PointXYZRGBNormal> registeredPointCloud;
    icp->align(registeredPointCloud, lastTransformation.matrix());

    //std::cout << "Nr of iterations: " << icp->nr_iterations_ << std::endl;

    // Computes nearest neighbors from scratch
    //std::cout << "Mean error: " << icp->getFitnessScore(100.0) << std::endl;

    pcl::Correspondences correspondences = *(icp->correspondences_);
    int nCorrespondences = correspondences.size();
    std::cout << "Number of correspondences: " << nCorrespondences << std::endl;
    float sumDistances = 0.0;
    for(int i=0; i<nCorrespondences; i++){
       sumDistances += correspondences[i].distance;
    }
    RMS = sumDistances/nCorrespondences;
    std::cout << "RMS: " << RMS << std::endl;

    //std::cout << "Median distance: " << correspondenceRejectorMedian->getMedianDistance() << std::endl;

    converged = icp->hasConverged();

    std::cout << "Converged: " << converged << std::endl;

    if(converged){
        Eigen::Affine3f Traw;
        Traw.matrix() = icp->getFinalTransformation();
        poseFilter->filterPoseEstimate(Traw, T);
        //T = Traw;
        lastTransformation = T;
    } else {
        T = lastTransformation;
    }

}
keypoint_map::keypoint_map(const std::string & dir_name) {

	init_feature_detector(fd, de, dm);

	intrinsics << 525.0, 525.0, 319.5, 239.5;

	pcl::io::loadPCDFile(dir_name + "/keypoints3d.pcd", keypoints3d);

	cv::FileStorage fs(dir_name + "/descriptors.yml", cv::FileStorage::READ);

	fs["descriptors"] >> descriptors;
	fs["weights"] >> weights;

	cv::FileNode obs = fs["observations"];
	for (cv::FileNodeIterator it = obs.begin(); it != obs.end(); ++it) {
		observation o;
		*it >> o;
		observations.push_back(o);
	}

	cv::FileNode cam_pos = fs["camera_positions"];
	for (cv::FileNodeIterator it = cam_pos.begin(); it != cam_pos.end(); ++it) {
		Eigen::Affine3f pos;

		int i = 0;
		for (cv::FileNodeIterator it2 = (*it).begin(); it2 != (*it).end();
				++it2) {
			int u = i / 4;
			int v = i % 4;

			*it2 >> pos.matrix().coeffRef(u, v);
			i++;

		}
		camera_positions.push_back(pos);
	}

	fs.release();

	for (size_t i = 0; i < camera_positions.size(); i++) {
		cv::Mat rgb = cv::imread(
				dir_name + "/rgb/" + boost::lexical_cast<std::string>(i)
						+ ".png", CV_LOAD_IMAGE_UNCHANGED);
		cv::Mat depth = cv::imread(
				dir_name + "/depth/" + boost::lexical_cast<std::string>(i)
						+ ".png", CV_LOAD_IMAGE_UNCHANGED);

		rgb_imgs.push_back(rgb);
		depth_imgs.push_back(depth);
	}

}
Ejemplo n.º 8
0
void alignedAffines()
{
    s_plot.setColor( Eigen::Vector4f(0,0,0,1) );
    s_plot.setLineWidth( 1.0 );
    for( size_t i=0; i<count; i++ )
    {
        Eigen::Affine3f t = Eigen::Affine3f::Identity();
        t.rotate(Eigen::Matrix3f::Identity());
        t.translate( 0.5f * Eigen::Vector3f::Random() + Eigen::Vector3f(0.5,0.5,0.5) );

        s_plot( t.matrix(), nox::plot<float>::Pos | nox::plot<float>::CS );
    }
}
Ejemplo n.º 9
0
void alignScaleOnce(pcl::PointCloud<pcl::PointXYZ>::Ptr cloud1,
                    pcl::PointCloud<pcl::PointXYZ>::Ptr cloud2)
{
  Eigen::Vector4f center1, center2;
  Eigen::Matrix3f covariance1, covariance2;
  pcl::computeMeanAndCovarianceMatrix (*cloud1, covariance1, center1);
  pcl::computeMeanAndCovarianceMatrix (*cloud2, covariance2, center2);

  std::cout << "scale1 = " << covariance1.trace() << std::endl;
  std::cout << "scale2 = " << covariance2.trace() << std::endl;
  
  
  Eigen::Affine3f Scale;
  float s;
  s = 1.0 / sqrt( covariance1.trace() );
  s *= sqrt(0.003); // ad-hoc for numerical issue
  Scale.matrix() <<
  s, 0, 0, 0,
  0, s, 0, 0,
  0, 0, s, 0,
  0, 0, 0, 1;
  pcl::transformPointCloud ( *cloud1, *cloud1, Scale );

  s = 1.0 / sqrt( covariance2.trace() );
  s *= sqrt(0.003); // ad-hoc for numerical issue
  Scale.matrix() <<
  s, 0, 0, 0,
  0, s, 0, 0,
  0, 0, s, 0,
  0, 0, 0, 1;
  pcl::transformPointCloud ( *cloud2, *cloud2, Scale );

  pcl::computeMeanAndCovarianceMatrix (*cloud1, covariance1, center1);
  pcl::computeMeanAndCovarianceMatrix (*cloud2, covariance2, center2);

  std::cout << "scale1 = " << covariance1.trace() << std::endl;
  std::cout << "scale2 = " << covariance2.trace() << std::endl;
}
Ejemplo n.º 10
0
bool update(glh::App* app)
{
    float t = (float) app->time();
    angle += (t - tprev) * radial_speed;
    tprev = t;
    Eigen::Affine3f transform;
    transform = Eigen::AngleAxis<float>(angle, glh::vec3(0.f, 0.f, 1.f));
    obj2world = transform.matrix();

    env.set_mat4(OBJ2WORLD, obj2world);
    env.set_texture2d("Sampler", texture);

    return g_run;
}
Ejemplo n.º 11
0
/**
 * ./collar-lines-odom $(ls *.bin | sort | xargs)
 */
int main(int argc, char** argv) {

    vector<string> clouds_to_process;
    string output_filename;
    if(!parse_arguments(argc, argv, output_filename, clouds_to_process)) {
      return EXIT_FAILURE;
    }
    ofstream output_file(output_filename.c_str());

    LoamImuInput imu(SCAN_PERIOD);
    ScanRegistration scanReg(imu, SCAN_PERIOD);
    LaserOdometry laserOdom(SCAN_PERIOD);
    LaserMapping mapping(SCAN_PERIOD);


    VelodynePointCloud cloud;
    float time = 0;
    for (int i = 0; i < clouds_to_process.size(); i++) {
        string filename = clouds_to_process[i];
        cerr << "KITTI file: " << filename << endl << flush;

        if (endsWith(filename, ".bin")) {
          but_velodyne::VelodynePointCloud::fromKitti(filename, cloud);
          pcl::transformPointCloud(cloud, cloud,
              cloud.getAxisCorrection().inverse().matrix());
        } else {
          pcl::io::loadPCDFile(filename, cloud);
        }

        LaserOdometry::Inputs odomInputs;
        scanReg.run(cloud, time, odomInputs);

        LaserMapping::Inputs mappingInputs;
        laserOdom.run(odomInputs, mappingInputs);

        LaserMapping::Outputs mappingOutputs;
        mapping.run(mappingInputs, mappingOutputs, time);

        Eigen::Affine3f transf = pcl::getTransformation(-mappingOutputs.transformToMap[3],
            -mappingOutputs.transformToMap[4], mappingOutputs.transformToMap[5], -mappingOutputs.transformToMap[0],
            -mappingOutputs.transformToMap[1], mappingOutputs.transformToMap[2]);
        but_velodyne::KittiUtils::printPose(output_file, transf.matrix());

        time += SCAN_PERIOD;
    }

    output_file.close();

    return EXIT_SUCCESS;
}
Ejemplo n.º 12
0
int main( )
  {
  Eigen::VectorXf a(3);
  Eigen::VectorXf b(a);
  Eigen::VectorXf c(b);

  a = Eigen::Vector4f(1, 2, 3, 4);
  b = Eigen::Vector4f(3, 2, 1, 0);

  c = a + b;

  Eigen::Matrix<float,Eigen::Dynamic,Eigen::Dynamic> x(1,3);
  Eigen::Matrix<float,Eigen::Dynamic,Eigen::Dynamic> y(3,1);

  x = y;

  x.resize(4,4);
  x.resize(2,3);

  y = x;

  y = a;

  Eigen::Vector3f d = a.block<3, 1>(0, 0);

  c = a;

  Eigen::Vector3f e = a.block<3, 1>(0, 0);

  Eigen::ArrayXf ddd = c;

  ddd = a;

  Eigen::Matrix3f f;

  f.row(0) = e;

  Eigen::Affine3f m;


  m.matrix().row(0).head<3>() = e;

  return EXIT_SUCCESS;
  }
Ejemplo n.º 13
0
void SLPointCloudWidget::updateCalibration(){

    CalibrationData calibration;
    bool load_result = calibration.load("calibration.xml");
	if (!load_result)
		return;
    // Camera coordinate system
    visualizer->addCoordinateSystem(50, "camera", 0);

    // Projector coordinate system
    cv::Mat TransformPCV(4, 4, CV_32F, 0.0);
    cv::Mat(calibration.Rp).copyTo(TransformPCV.colRange(0, 3).rowRange(0, 3));
    cv::Mat(calibration.Tp).copyTo(TransformPCV.col(3).rowRange(0, 3));
    TransformPCV.at<float>(3, 3) = 1.0; // make it homogeneous 
    Eigen::Affine3f TransformP;
    cv::cv2eigen(TransformPCV, TransformP.matrix());

    visualizer->addCoordinateSystem(50, TransformP.inverse(), "projector", 0);
}
Ejemplo n.º 14
0
void alignScaleOnce1(pcl::PointCloud<pcl::PointXYZ>::Ptr cloud1,
                    pcl::PointCloud<pcl::PointXYZ>::Ptr cloud2)
{
  Eigen::Vector4f center1, center2;
  Eigen::Matrix3f covariance1, covariance2;
  pcl::computeMeanAndCovarianceMatrix (*cloud1, covariance1, center1);
  pcl::computeMeanAndCovarianceMatrix (*cloud2, covariance2, center2);

  // symmetric scale estimation by Horn 1968
  float s = sqrt( covariance1.trace() / covariance2.trace() );
  
  Eigen::Affine3f Scale;
  Scale.matrix() <<
  s, 0, 0, 0,
  0, s, 0, 0,
  0, 0, s, 0,
  0, 0, 0, 1;
  
  pcl::transformPointCloud ( *cloud2, *cloud2, Scale );

}
Ejemplo n.º 15
0
void addVelodynePcl(Visualizer3D &vis, const VelodynePointCloud &cloud, const Eigen::Affine3f &pose) {
  PointCloud<PointXYZRGB>::Ptr rgb_cloud(new PointCloud<PointXYZRGB>());

  float min = cloud.getMinValuePt().intensity;
  float max = cloud.getMaxValuePt().intensity;

  for(VelodynePointCloud::const_iterator pt = cloud.begin(); pt < cloud.end(); pt++) {
    uchar r, g, b;
    float normalized = (pt->intensity - min) / (max - min) * 255.0;
    toColor(MIN(normalized*2, 255), r, g, b);
    PointXYZRGB rgb_pt;
    rgb_pt.x = pt->x;
    rgb_pt.y = pt->y;
    rgb_pt.z = pt->z;
    rgb_pt.r = r;
    rgb_pt.g = g;
    rgb_pt.b = b;
    rgb_cloud->push_back(rgb_pt);
  }

  vis.addColorPointCloud(rgb_cloud, pose.matrix());
}
	void CGLUtil::getRTFromWorld2CamCV(Eigen::Matrix3f* pRw_, Eigen::Vector3f* pTw_) {
		//only the matrix in the top of the modelview matrix stack works
		Eigen::Affine3f M;
		glGetFloatv(GL_MODELVIEW_MATRIX,M.matrix().data());

		Eigen::Matrix3f S;
		*pTw_ = M.translation();
		*pRw_ = M.linear();
		//M.computeRotationScaling(pRw_,&S);
		//*pTw_ = (*pTw_)/S(0,0);
		//negate row no. 1 and 2, to switch from GL to CV convention
		for (int r = 1; r < 3; r++){
			for	(int c = 0; c < 3; c++){
				(*pRw_)(r,c) = -(*pRw_)(r,c);
			}
			(*pTw_)(r) = -(*pTw_)(r); 
		}
		//PRINT(S);
		//PRINT(*pRw_);
		//PRINT(*pTw_);
		return;
	}
Ejemplo n.º 17
0
void IterativeClosestPoint::execute() {
    float error = std::numeric_limits<float>::max(), previousError;
    uint iterations = 0;

    // Get access to the two point sets
    PointSetAccess::pointer accessFixedSet = ((PointSet::pointer)getStaticInputData<PointSet>(0))->getAccess(ACCESS_READ);
    PointSetAccess::pointer accessMovingSet = ((PointSet::pointer)getStaticInputData<PointSet>(1))->getAccess(ACCESS_READ);

    // Get transformations of point sets
    AffineTransformation::pointer fixedPointTransform2 = SceneGraph::getAffineTransformationFromData(getStaticInputData<PointSet>(0));
    Eigen::Affine3f fixedPointTransform;
    fixedPointTransform.matrix() = fixedPointTransform2->matrix();
    AffineTransformation::pointer initialMovingTransform2 = SceneGraph::getAffineTransformationFromData(getStaticInputData<PointSet>(1));
    Eigen::Affine3f initialMovingTransform;
    initialMovingTransform.matrix() = initialMovingTransform2->matrix();

    // These matrices are Nx3
    MatrixXf fixedPoints = accessFixedSet->getPointSetAsMatrix();
    MatrixXf movingPoints = accessMovingSet->getPointSetAsMatrix();

    Eigen::Affine3f currentTransformation = Eigen::Affine3f::Identity();

    // Want to choose the smallest one as moving
    bool invertTransform = false;
    if(false && fixedPoints.cols() < movingPoints.cols()) {
        reportInfo() << "switching fixed and moving" << Reporter::end;
        // Switch fixed and moving
        MatrixXf temp = fixedPoints;
        fixedPoints = movingPoints;
        movingPoints = temp;
        invertTransform = true;

        // Apply initial transformations
        //currentTransformation = fixedPointTransform.getTransform();
        movingPoints = fixedPointTransform*movingPoints.colwise().homogeneous();
        fixedPoints = initialMovingTransform*fixedPoints.colwise().homogeneous();
    } else {
        // Apply initial transformations
        //currentTransformation = initialMovingTransform.getTransform();
        movingPoints = initialMovingTransform*movingPoints.colwise().homogeneous();
        fixedPoints = fixedPointTransform*fixedPoints.colwise().homogeneous();
    }
    do {
        previousError = error;
        MatrixXf movedPoints = currentTransformation*(movingPoints.colwise().homogeneous());

        // Match closest points using current transformation
        MatrixXf rearrangedFixedPoints = rearrangeMatrixToClosestPoints(
                fixedPoints, movedPoints);

        // Get centroids
        Vector3f centroidFixed = getCentroid(rearrangedFixedPoints);
        //reportInfo() << "Centroid fixed: " << Reporter::end;
        //reportInfo() << centroidFixed << Reporter::end;
        Vector3f centroidMoving = getCentroid(movedPoints);
        //reportInfo() << "Centroid moving: " << Reporter::end;
        //reportInfo() << centroidMoving << Reporter::end;

        Eigen::Transform<float, 3, Eigen::Affine> updateTransform = Eigen::Transform<float, 3, Eigen::Affine>::Identity();

        if(mTransformationType == IterativeClosestPoint::RIGID) {
            // Create correlation matrix H of the deviations from centroid
            MatrixXf H = (movedPoints.colwise() - centroidMoving)*
                    (rearrangedFixedPoints.colwise() - centroidFixed).transpose();

            // Do SVD on H
            Eigen::JacobiSVD<Eigen::MatrixXf> svd(H, Eigen::ComputeFullU | Eigen::ComputeFullV);

            // Estimate rotation as R=V*U.transpose()
            MatrixXf temp = svd.matrixV()*svd.matrixU().transpose();
            Matrix3f d = Matrix3f::Identity();
            d(2,2) = sign(temp.determinant());
            Matrix3f R = svd.matrixV()*d*svd.matrixU().transpose();

            // Estimate translation
            Vector3f T = centroidFixed - R*centroidMoving;

            updateTransform.linear() = R;
            updateTransform.translation() = T;
        } else {
            // Only translation
            Vector3f T = centroidFixed - centroidMoving;
            updateTransform.translation() = T;
        }

        // Update current transformation
        currentTransformation = updateTransform*currentTransformation;

        // Calculate RMS error
        MatrixXf distance = rearrangedFixedPoints - currentTransformation*(movingPoints.colwise().homogeneous());
        error = 0;
        for(uint i = 0; i < distance.cols(); i++) {
            error += pow(distance.col(i).norm(),2);
        }
        error = sqrt(error / distance.cols());

        iterations++;
        reportInfo() << "Error: " << error << Reporter::end;
        // To continue, change in error has to be above min error change and nr of iterations less than max iterations
    } while(previousError-error > mMinErrorChange && iterations < mMaxIterations);


    if(invertTransform){
        currentTransformation = currentTransformation.inverse();
    }

    mError = error;
    mTransformation->matrix() = currentTransformation.matrix();
}
Ejemplo n.º 18
0
int main(int argc, char *argv[])
{
  std::cout << "Syntax is: " << argv[0] << " [--processor 0|1|2] [model.ply] [--showmodels]\n --processor options 0,1,2 correspond to CPU, OPENCL, and OPENGL respectively\n";
  processor freenectprocessor = OPENGL;
  std::vector<int> ply_file_indices;
  
  /// http://docs.pointclouds.org/trunk/classpcl_1_1recognition_1_1_obj_rec_r_a_n_s_a_c.html#ae1a4249f8278de41a34f74b950996986
  float pair_width = 0.15;
  float voxel_size = 0.01;
  bool showmodels = false;
  
  if(argc>1){
      int fnpInt;
      ply_file_indices = parse_file_extension_argument (argc, argv, ".ply");
      parse_argument (argc, argv, "--processor", fnpInt);
      parse_argument (argc, argv, "--pair_width", pair_width);
      parse_argument (argc, argv, "--voxel_size", voxel_size);
      showmodels = find_switch(argc,argv,"--showmodels");
      freenectprocessor = static_cast<processor>(fnpInt);
      
  }
  
  
  // setup ransac
  pcl::recognition::ObjRecRANSAC orransac(pair_width,voxel_size);
  pcl::PLYReader reader;

  //Map from the file name to the point cloud
  std::map<std::string, pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr> pclMap;

  for (int idx : ply_file_indices) {
      pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr model(new pcl::PointCloud<pcl::PointXYZRGBNormal>());
      // these are the point clouds of the ply files read in
      pcl::PointCloud<Normal>::Ptr modelnormal(new pcl::PointCloud<pcl::Normal>());
      pcl::PointCloud<pcl::PointXYZ>::Ptr modelxyz(new pcl::PointCloud<pcl::PointXYZ>());
      
      std::string modelFile(argv[idx]);

      reader.read(modelFile,*model);
      //loadPLYSimpleCloud(modelFile.c_str(),model);
      pcl::copyPointCloud(*model,*modelnormal);
      pcl::copyPointCloud(*model,*modelxyz);
      
      /// @todo should const_cast really be used?
      orransac.addModel(*modelxyz,*modelnormal,modelFile);
      

      //we need to map these strings, which are the names, to the ply files
      pclMap[modelFile] = model;
      
      if (showmodels) {
    
          boost::shared_ptr<pcl::visualization::PCLVisualizer> viewer (new pcl::visualization::PCLVisualizer (modelFile));
          viewer->setBackgroundColor (0, 0, 0);
          pcl::visualization::PointCloudColorHandlerRGBField<pcl::PointXYZRGBNormal> rgb(model);
          viewer->addPointCloud<pcl::PointXYZRGBNormal> (model, rgb, modelFile);
          viewer->setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_POINT_SIZE, 1, modelFile);
          
          /// @todo wasStopped() seems to never be true?!?
          /// @todo the model is missing colors?!?!
          while (!viewer->wasStopped()) {
            viewer->spinOnce ();
            pcl::visualization::PointCloudColorHandlerRGBField<pcl::PointXYZRGBNormal> rgb(model);
            viewer->updatePointCloud<pcl::PointXYZRGBNormal> (model, rgb, modelFile);
          }
      }
  }
  
    // estimate normals http://pointclouds.org/documentation/tutorials/normal_estimation_using_integral_images.php#normal-estimation-using-integral-images
    pcl::PointCloud<pcl::Normal>::Ptr normals (new pcl::PointCloud<pcl::Normal>());

  // setup kinect
  pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud(new pcl::PointCloud<pcl::PointXYZRGB>());
  pcl::PointCloud<pcl::PointXYZRGB>::Ptr kinect_cloud;
  K2G k2g(freenectprocessor);
  std::cout << "getting cloud" << std::endl;
  kinect_cloud = k2g.getCloud();

  kinect_cloud->sensor_orientation_.w() = 0.0;
  kinect_cloud->sensor_orientation_.x() = 1.0;
  kinect_cloud->sensor_orientation_.y() = 0.0;
  kinect_cloud->sensor_orientation_.z() = 0.0;

  cloud->sensor_orientation_.w() = 0.0;
  cloud->sensor_orientation_.x() = 1.0;
  cloud->sensor_orientation_.y() = 0.0;
  cloud->sensor_orientation_.z() = 0.0;
  boost::shared_ptr<pcl::visualization::PCLVisualizer> viewer (new pcl::visualization::PCLVisualizer ("3D Viewer"));
  viewer->setBackgroundColor (0, 0, 0);
  pcl::visualization::PointCloudColorHandlerRGBField<pcl::PointXYZRGB> rgb(cloud);
  viewer->addPointCloud<pcl::PointXYZRGB> (cloud, rgb, "sample cloud");
  viewer->setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_POINT_SIZE, 1, "sample cloud");
  
  bool done = false;
  while ((!viewer->wasStopped()) && (!done)) {
    viewer->spinOnce ();
    using namespace std::chrono;
    static high_resolution_clock::time_point last;

    auto tnow = high_resolution_clock::now();
    kinect_cloud = k2g.updateCloud(kinect_cloud);
    auto tpost = high_resolution_clock::now();
    std::cout << "delta " << duration_cast<duration<double>>(tpost-tnow).count()*1000 << std::endl;
    
    

    pcl::IntegralImageNormalEstimation<pcl::PointXYZRGB, pcl::Normal> ne;
    ne.setNormalEstimationMethod (ne.AVERAGE_3D_GRADIENT);
    /// @todo make magic numbers into params
    ne.setMaxDepthChangeFactor(1.0f);
    ne.setNormalSmoothingSize(10.0f);
    ne.setInputCloud(kinect_cloud);
    ne.compute(*normals);
    
    pcl::copyPointCloud(*kinect_cloud,*cloud);
    pcl::PointCloud<pcl::PointXYZ>::Ptr cloudxyz(new pcl::PointCloud<pcl::PointXYZ>());
    pcl::copyPointCloud(*kinect_cloud,*cloudxyz);
    std::list< pcl::recognition::ObjRecRANSAC::Output > recognized_objects;
    /// @todo make magic numbers into params
    double success_probability=0.99;
    orransac.recognize(*cloudxyz,*normals,recognized_objects,success_probability);
    
    // iterate through the recognized objects, obtain names and get pcls to transform onto cloud
    // the final concatenated point cloud to visualize
    
    pcl::PointCloud<pcl::PointXYZRGB>::Ptr pclCloudTransform(new pcl::PointCloud<pcl::PointXYZRGB>());

    for (const pcl::recognition::ObjRecRANSAC::Output & object : recognized_objects) {
        //obtain the object string, use it to find the initial point cloud to transpose on cloud scene
        std::string objstring = object.object_name_; //:: ?

        //get the point cloud with normal, need to copy to pcl without normal and add to cloud
        pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr pclCloud = pclMap[objstring];
        pcl::PointCloud<pcl::PointXYZRGB>::Ptr pclCloudNoNorm(new pcl::PointCloud<pcl::PointXYZRGB>());

        // copy to point cloud without normal, now add to scene cloud
        pcl::copyPointCloud(*pclCloud, *pclCloudNoNorm);

        // need to create an eigenmatrix from the transform parameters
        // no idea if this is correct, need to create an eigenMatrix for transformation
        Eigen::Affine3f eMatrix = Eigen::Affine3f::Identity();
        const float* tr  = &(object.rigid_transform_[9]);
        const float* rt = &(object.rigid_transform_[0]);
        const Eigen::Map<const Eigen::Vector3f> trans(tr);
        const Eigen::Map<const Eigen::Matrix<float,3,3,Eigen::RowMajor>> rot(rt);
        eMatrix.matrix().block<3,3>(0,0)=rot;
        eMatrix.translation()=trans;
        //outputs pclCloudTransform, now correctly transformed to add to cloud
        pcl::transformPointCloud(*pclCloudNoNorm, *pclCloudTransform, eMatrix);
        //now concatenate with existing point cloud to get new visualization
        *cloud += *pclCloudTransform;
    }

    // now visualize with the template objects superimposed on
    pcl::visualization::PointCloudColorHandlerRGBField<pcl::PointXYZRGB> rgb(cloud);
    viewer->updatePointCloud<pcl::PointXYZRGB> (cloud, rgb, "sample cloud");

    /// @todo support writing to file again
//    if(ply_file_indices.size() > 0 ){
//        pcl::PCLPointCloud2 cloud2;
//        pcl::toPCLPointCloud2(*cloud,cloud2);
//        saveCloud("cloudname.ply",cloud2,false,false);
//        done = true;
//    }
  }

  k2g.shutDown();
  return 0;
}
void keypoint_map::align_z_axis() {

	pcl::PointCloud<pcl::PointXYZ>::Ptr point_cloud(
			new pcl::PointCloud<pcl::PointXYZ>);

	for (int v = 0; v < depth_imgs[0].rows; v++) {
		for (int u = 0; u < depth_imgs[0].cols; u++) {
			if (depth_imgs[0].at<unsigned short>(v, u) != 0) {
				pcl::PointXYZ p;

				p.z = depth_imgs[0].at<unsigned short>(v, u) / 1000.0f;
				p.x = (u - intrinsics[2]) * p.z / intrinsics[0];
				p.y = (v - intrinsics[3]) * p.z / intrinsics[1];

				p.getVector4fMap() = camera_positions[0] * p.getVector4fMap();

				point_cloud->push_back(p);

			}
		}
	}

	pcl::ModelCoefficients::Ptr coefficients(new pcl::ModelCoefficients);
	pcl::PointIndices::Ptr inliers(new pcl::PointIndices);
	// Create the segmentation object
	pcl::SACSegmentation<pcl::PointXYZ> seg;
	// Optional
	seg.setOptimizeCoefficients(true);
	// Mandatory
	seg.setModelType(pcl::SACMODEL_PLANE);
	seg.setMethodType(pcl::SAC_RANSAC);
	seg.setDistanceThreshold(0.005);
	seg.setProbability(0.99);

	seg.setInputCloud(point_cloud);
	seg.segment(*inliers, *coefficients);

	std::cerr << "Model coefficients: " << coefficients->values[0] << " "
			<< coefficients->values[1] << " " << coefficients->values[2] << " "
			<< coefficients->values[3] << " Num inliers "
			<< inliers->indices.size() << std::endl;

	Eigen::Affine3f transform = Eigen::Affine3f::Identity();
	if (coefficients->values[2] > 0) {
		transform.matrix().coeffRef(0, 2) = coefficients->values[0];
		transform.matrix().coeffRef(1, 2) = coefficients->values[1];
		transform.matrix().coeffRef(2, 2) = coefficients->values[2];
	} else {
		transform.matrix().coeffRef(0, 2) = -coefficients->values[0];
		transform.matrix().coeffRef(1, 2) = -coefficients->values[1];
		transform.matrix().coeffRef(2, 2) = -coefficients->values[2];
	}

	transform.matrix().col(0).head<3>() =
			transform.matrix().col(1).head<3>().cross(
					transform.matrix().col(2).head<3>());
	transform.matrix().col(1).head<3>() =
			transform.matrix().col(2).head<3>().cross(
					transform.matrix().col(0).head<3>());

	transform = transform.inverse();

	transform.matrix().coeffRef(2, 3) = coefficients->values[3];

	pcl::transformPointCloud(keypoints3d, keypoints3d, transform);

	for (size_t i = 0; i < camera_positions.size(); i++) {
		camera_positions[i] = transform * camera_positions[i];
	}

}
Ejemplo n.º 20
0
int main(void)
{
	std::cout << "Starting engine" << std::endl;

	pastry::initialize();

	auto dr = std::make_shared<pastry::deferred::DeferredRenderer>();
	pastry::scene_add(dr);

	// scene (need this first!)
	auto scene = std::make_shared<pastry::deferred::Scene>();
	dr->setScene(scene);

	// skybox
	{
		auto go = pastry::deferred::FactorSkyBox();
		go->skybox->setTexture("assets/stormydays_cm.jpg");
		go->skybox->setGeometry("assets/sphere_4.obj");
		scene->setMainSkybox(go);
	}

	// camera
	{
		auto go = pastry::deferred::FactorCamera();
		go->camera->setProjection(90.0f, 1.0f, 100.0f);
		go->camera->setView({4,14,6},{2,3,0},{0,0,-1});
		scene->setMainCamera(go);
	}

	constexpr float SPACE = 3.0f;

	// geometry
	{
		constexpr int R = 3;
		for(int x=-R; x<=+R; x++) {
			for(int y=-R; y<=+R; y++) {
				auto go = pastry::deferred::FactorGeometry();
				go->geometry->load("assets/suzanne.obj");
				Eigen::Affine3f pose = Eigen::Translation3f(Eigen::Vector3f(SPACE*x,SPACE*y,0))
					* Eigen::AngleAxisf(0.0f,Eigen::Vector3f{0,0,1});
				go->geometry->setPose(pose.matrix());
				float p = static_cast<float>(x+R)/static_cast<float>(2*R);
				go->geometry->setRoughness(0.2f + 0.8f*p);
				scene->add(go);				
			}
		}
	}

	// lights
	// {
	// 	auto light = std::make_shared<pastry::deferred::PointLight>();
	// 	light->setLightPosition({+3,3,4});
	// 	light->setLightColor(20.0f*Eigen::Vector3f{1,1,1});
	// 	scene->add(light);
	// }
	// {
	// 	auto light = std::make_shared<pastry::deferred::PointLight>();
	// 	light->setLightPosition({-4,1,4});
	// 	light->setLightColor(20.0f*Eigen::Vector3f{1,0.5,0.5});
	// 	scene->add(light);
	// }
	{
		auto go = pastry::deferred::FactorEnvironmentLight();
		scene->add(go);
	}
	{
		constexpr int R = 3;
		for(int x=-R; x<=+R; x++) {
			for(int y=-R; y<=+R; y++) {
				auto go = pastry::deferred::FactorPointLight();
				((pastry::deferred::PointLight*)(go->light.get()))->setLightPosition({SPACE*x,SPACE*y,1.5});
				((pastry::deferred::PointLight*)(go->light.get()))->setLightColor(35.0f*HSL(std::atan2(y,x)/6.2831853f,0.5f,0.5f));
				((pastry::deferred::PointLight*)(go->light.get()))->setLightFalloff(0.05f);
				scene->add(go);
			}
		}
	}

	std::cout << "Running main loop" << std::endl;

	pastry::run();

	std::cout << "Graceful quit" << std::endl;

	return 0;
}
Ejemplo n.º 21
0
AffineTransformation::AffineTransformation(const Eigen::Affine3f& transform) {
    matrix() = transform.matrix();
}
Ejemplo n.º 22
0
NORI_NAMESPACE_BEGIN

NoriObject *loadFromXML(const std::string &filename) {
    /* Load the XML file using 'pugi' (a tiny self-contained XML parser implemented in C++) */
    pugi::xml_document doc;
    pugi::xml_parse_result result = doc.load_file(filename.c_str());

    /* Helper function: map a position offset in bytes to a more readable row/column value */
    auto offset = [&](ptrdiff_t pos) -> std::string {
        std::fstream is(filename);
        char buffer[1024];
        int line = 0, linestart = 0, offset = 0;
        while (is.good()) {
            is.read(buffer, sizeof(buffer));
            for (int i = 0; i < is.gcount(); ++i) {
                if (buffer[i] == '\n') {
                    if (offset + i >= pos)
                        return tfm::format("row %i, col %i", line + 1, pos - linestart);
                    ++line;
                    linestart = offset + i;
                }
            }
            offset += (int) is.gcount();
        }
        return "byte offset " + std::to_string(pos);
    };

    if (!result) /* There was a parser / file IO error */
        throw NoriException("Error while parsing \"%s\": %s (at %s)", filename, result.description(), offset(result.offset));

    /* Set of supported XML tags */
    enum ETag {
        /* Object classes */
        EScene                = NoriObject::EScene,
        EMesh                 = NoriObject::EMesh,
        EBSDF                 = NoriObject::EBSDF,
        ETEXTURE              = NoriObject::ETEXTURE,
        EPERLIN               = NoriObject::EPERLIN,
        EMIXTEXTURE           = NoriObject::EMIXTEXTURE,
        EMIXBUMPMAP           = NoriObject::EMIXBUMPMAP,
        EBUMPMAP              = NoriObject::EBUMPMAP,
        EPhaseFunction        = NoriObject::EPhaseFunction,
        EEmitter              = NoriObject::EEmitter,
        EMedium               = NoriObject::EMedium,
        EVolume               = NoriObject::EVolume,
        ECamera               = NoriObject::ECamera,
        EIntegrator           = NoriObject::EIntegrator,
        ESampler              = NoriObject::ESampler,
        ETest                 = NoriObject::ETest,
        EReconstructionFilter = NoriObject::EReconstructionFilter,

        /* Properties */
        EBoolean = NoriObject::EClassTypeCount,
        EInteger,
        EFloat,
        EString,
        EPoint,
        EVector,
        EColor,
        ETransform,
        ETranslate,
        EMatrix,
        ERotate,
        EScale,
        ELookAt,

        EInvalid
    };

    /* Create a mapping from tag names to tag IDs */
    std::map<std::string, ETag> tags;
    tags["scene"]      = EScene;
    tags["mesh"]       = EMesh;
    tags["bsdf"]       = EBSDF;
    tags["texture"]    = ETEXTURE;
    tags["bumpmap"]    = EBUMPMAP;
    tags["perlin"]     = EPERLIN;
    tags["mixTexture"] = EMIXTEXTURE;
    tags["mixBumpmap"] = EMIXBUMPMAP;
    tags["bumpmap"]    = EBUMPMAP;
    tags["emitter"]    = EEmitter;
    tags["camera"]     = ECamera;
    tags["medium"]     = EMedium;
    tags["volume"]     = EVolume;
    tags["phase"]      = EPhaseFunction;
    tags["integrator"] = EIntegrator;
    tags["sampler"]    = ESampler;
    tags["rfilter"]    = EReconstructionFilter;
    tags["test"]       = ETest;
    tags["boolean"]    = EBoolean;
    tags["integer"]    = EInteger;
    tags["float"]      = EFloat;
    tags["string"]     = EString;
    tags["point"]      = EPoint;
    tags["vector"]     = EVector;
    tags["color"]      = EColor;
    tags["transform"]  = ETransform;
    tags["translate"]  = ETranslate;
    tags["matrix"]     = EMatrix;
    tags["rotate"]     = ERotate;
    tags["scale"]      = EScale;
    tags["lookat"]     = ELookAt;

    /* Helper function to check if attributes are fully specified */
    auto check_attributes = [&](const pugi::xml_node &node, std::set<std::string> attrs) {
        for (auto attr : node.attributes()) {
            auto it = attrs.find(attr.name());
            if (it == attrs.end())
                throw NoriException("Error while parsing \"%s\": unexpected attribute \"%s\" in \"%s\" at %s",
                                    filename, attr.name(), node.name(), offset(node.offset_debug()));
            attrs.erase(it);
        }
        if (!attrs.empty())
            throw NoriException("Error while parsing \"%s\": missing attribute \"%s\" in \"%s\" at %s",
                                filename, *attrs.begin(), node.name(), offset(node.offset_debug()));
    };

    Eigen::Affine3f transform;

    /* Helper function to parse a Nori XML node (recursive) */
    std::function<NoriObject *(pugi::xml_node &, PropertyList &, int)> parseTag = [&](
    pugi::xml_node &node, PropertyList &list, int parentTag) -> NoriObject * {
        /* Skip over comments */
        if (node.type() == pugi::node_comment || node.type() == pugi::node_declaration)
            return nullptr;

        if (node.type() != pugi::node_element)
            throw NoriException(
                "Error while parsing \"%s\": unexpected content at %s",
                filename, offset(node.offset_debug()));

        /* Look up the name of the current element */
        auto it = tags.find(node.name());
        if (it == tags.end())
            throw NoriException("Error while parsing \"%s\": unexpected tag \"%s\" at %s",
            filename, node.name(), offset(node.offset_debug()));
        int tag = it->second;

        /* Perform some safety checks to make sure that the XML tree really makes sense */
        bool hasParent            = parentTag != EInvalid;
        bool parentIsObject       = hasParent && parentTag < NoriObject::EClassTypeCount;
        bool currentIsObject      = tag < NoriObject::EClassTypeCount;
        bool parentIsTransform    = parentTag == ETransform;
        bool currentIsTransformOp = tag == ETranslate || tag == ERotate || tag == EScale || tag == ELookAt || tag == EMatrix;

        if (!hasParent && !currentIsObject)
            throw NoriException("Error while parsing \"%s\": root element \"%s\" must be a Nori object (at %s)",
            filename, node.name(), offset(node.offset_debug()));

        if (parentIsTransform != currentIsTransformOp)
            throw NoriException("Error while parsing \"%s\": transform nodes "
            "can only contain transform operations (at %s)",
            filename,  offset(node.offset_debug()));

        if (hasParent && !parentIsObject && !(parentIsTransform && currentIsTransformOp))
            throw NoriException("Error while parsing \"%s\": node \"%s\" requires a Nori object as parent (at %s)",
            filename, node.name(), offset(node.offset_debug()));

        if (tag == EScene)
            node.append_attribute("type") = "scene";
        else if (tag == ETransform)
            transform.setIdentity();

        PropertyList propList;
        std::vector<NoriObject *> children;
        for (pugi::xml_node &ch: node.children()) {
            NoriObject *child = parseTag(ch, propList, tag);
            if (child)
                children.push_back(child);
        }

        NoriObject *result = nullptr;
        try {
            if (currentIsObject) {
                check_attributes(node, { "type" });

                /* This is an object, first instantiate it */
                result = NoriObjectFactory::createInstance(
                             node.attribute("type").value(),
                             propList
                         );

                if (result->getClassType() != (int) tag) {
                    throw NoriException(
                        "Unexpectedly constructed an object "
                        "of type <%s> (expected type <%s>): %s",
                        NoriObject::classTypeName(result->getClassType()),
                        NoriObject::classTypeName((NoriObject::EClassType) tag),
                        result->toString());
                }

                /* Add all children */
                for (auto ch: children) {
                    result->addChild(ch);
                    ch->setParent(result);
                }

                /* Activate / configure the object */
                result->activate();
            } else {
                /* This is a property */
                switch (tag) {
                case EString: {
                    check_attributes(node, { "name", "value" });
                    list.setString(node.attribute("name").value(), node.attribute("value").value());
                }
                break;
                case EFloat: {
                    check_attributes(node, { "name", "value" });
                    list.setFloat(node.attribute("name").value(), toFloat(node.attribute("value").value()));
                }
                break;
                case EInteger: {
                    check_attributes(node, { "name", "value" });
                    list.setInteger(node.attribute("name").value(), toInt(node.attribute("value").value()));
                }
                break;
                case EBoolean: {
                    check_attributes(node, { "name", "value" });
                    list.setBoolean(node.attribute("name").value(), toBool(node.attribute("value").value()));
                }
                break;
                case EPoint: {
                    check_attributes(node, { "name", "value" });
                    list.setPoint(node.attribute("name").value(), Point3f(toVector3f(node.attribute("value").value())));
                }
                break;
                case EVector: {
                    check_attributes(node, { "name", "value" });
                    list.setVector(node.attribute("name").value(), Vector3f(toVector3f(node.attribute("value").value())));
                }
                break;
                case EColor: {
                    check_attributes(node, { "name", "value" });
                    list.setColor(node.attribute("name").value(), Color3f(toVector3f(node.attribute("value").value()).array()));
                }
                break;
                case ETransform: {
                    check_attributes(node, { "name" });
                    list.setTransform(node.attribute("name").value(), transform.matrix());
                }
                break;
                case ETranslate: {
                    check_attributes(node, { "value" });
                    Eigen::Vector3f v = toVector3f(node.attribute("value").value());
                    transform = Eigen::Translation<float, 3>(v.x(), v.y(), v.z()) * transform;
                }
                break;
                case EMatrix: {
                    check_attributes(node, { "value" });
                    std::vector<std::string> tokens = tokenize(node.attribute("value").value());
                    if (tokens.size() != 16)
                        throw NoriException("Expected 16 values");
                    Eigen::Matrix4f matrix;
                    for (int i=0; i<4; ++i)
                        for (int j=0; j<4; ++j)
                            matrix(i, j) = toFloat(tokens[i*4+j]);
                    transform = Eigen::Affine3f(matrix) * transform;
                }
                break;
                case EScale: {
                    check_attributes(node, { "value" });
                    Eigen::Vector3f v = toVector3f(node.attribute("value").value());
                    transform = Eigen::DiagonalMatrix<float, 3>(v) * transform;
                }
                break;
                case ERotate: {
                    check_attributes(node, { "angle", "axis" });
                    float angle = degToRad(toFloat(node.attribute("angle").value()));
                    Eigen::Vector3f axis = toVector3f(node.attribute("axis").value());
                    transform = Eigen::AngleAxis<float>(angle, axis) * transform;
                }
                break;
                case ELookAt: {
                    check_attributes(node, { "origin", "target", "up" });
                    Eigen::Vector3f origin = toVector3f(node.attribute("origin").value());
                    Eigen::Vector3f target = toVector3f(node.attribute("target").value());
                    Eigen::Vector3f up = toVector3f(node.attribute("up").value());

                    Vector3f dir = (target - origin).normalized();
                    Vector3f left = up.normalized().cross(dir);
                    Vector3f newUp = dir.cross(left);

                    Eigen::Matrix4f trafo;
                    trafo << left, newUp, dir, origin,
                          0, 0, 0, 1;

                    transform = Eigen::Affine3f(trafo) * transform;
                }
                break;

                default:
                    throw NoriException("Unhandled element \"%s\"", node.name());
                };
            }
        } catch (const NoriException &e) {
            throw NoriException("Error while parsing \"%s\": %s (at %s)", filename,
                                e.what(), offset(node.offset_debug()));
        }

        return result;
    };

    PropertyList list;
    return parseTag(*doc.begin(), list, EInvalid);
}
/**
 * @brief Callback for feedback subscriber for getting the transformation of moved markers
 *
 * @param feedback subscribed from geometry_map/map/feedback
 */
void ShapeVisualization::setShapePosition(const visualization_msgs::InteractiveMarkerFeedbackConstPtr& feedback)//,const cob_3d_mapping_msgs::Shape& shape)
{

  cob_3d_mapping_msgs::ShapeArray map_msg;
  map_msg.header.frame_id="/map";
  map_msg.header.stamp = ros::Time::now();

  int shape_id,index;
  index=-1;
  stringstream name(feedback->marker_name);

  Eigen::Quaternionf quat;

  Eigen::Matrix3f rotationMat;
  Eigen::MatrixXf rotationMatInit;

  Eigen::Vector3f normalVec;
  Eigen::Vector3f normalVecNew;
  Eigen::Vector3f newCentroid;
  Eigen::Matrix4f transSecondStep;


  if (feedback->marker_name != "Text"){
    name >> shape_id ;
    cob_3d_mapping::Polygon p;

    for(int i=0;i<sha.shapes.size();++i)
    {
    	if (sha.shapes[i].id == shape_id)
	{
		index = i;
	}

    }


    // temporary fix.
    //do nothing if index of shape is not found
    // this is not supposed to occur , but apparently it does
    if(index==-1){

    ROS_WARN("shape not in map array");
    return;
	}


    cob_3d_mapping::fromROSMsg (sha.shapes.at(index), p);

    if (feedback->event_type == 2 && feedback->menu_entry_id == 5){
      quatInit.x() = (float)feedback->pose.orientation.x ;           //normalized
      quatInit.y() = (float)feedback->pose.orientation.y ;
      quatInit.z() = (float)feedback->pose.orientation.z ;
      quatInit.w() = (float)feedback->pose.orientation.w ;

      oldCentroid (0) = (float)feedback->pose.position.x ;
      oldCentroid (1) = (float)feedback->pose.position.y ;
      oldCentroid (2) = (float)feedback->pose.position.z ;

      quatInit.normalize() ;

      rotationMatInit = quatInit.toRotationMatrix() ;

      transInit.block(0,0,3,3) << rotationMatInit ;
      transInit.col(3).head(3) << oldCentroid(0) , oldCentroid(1), oldCentroid(2) ;
      transInit.row(3) << 0,0,0,1 ;

      transInitInv = transInit.inverse() ;
      Eigen::Affine3f affineInitFinal (transInitInv) ;
      affineInit = affineInitFinal ;

      std::cout << "transInit : " << "\n"    << affineInitFinal.matrix() << "\n" ;
    }

    if (feedback->event_type == 5){
      /* the name of the marker is arrows_shape_.id, we need to erase the "arrows_" part */
      //string strName(feedback->marker_name);
      //strName.erase(strName.begin(),strName.begin()+7);
//      stringstream name(strName);
	stringstream name(feedback->marker_name);

      name >> shape_id ;
      cob_3d_mapping::Polygon p;
      cob_3d_mapping::fromROSMsg (sha.shapes.at(index), p);

      quat.x() = (float)feedback->pose.orientation.x ;           //normalized
      quat.y() = (float)feedback->pose.orientation.y ;
      quat.z() = (float)feedback->pose.orientation.z ;
      quat.w() = (float)feedback->pose.orientation.w ;

      quat.normalize() ;

      rotationMat = quat.toRotationMatrix() ;

      normalVec << sha.shapes.at(index).params[0],                   //normalized
          sha.shapes.at(index).params[1],
          sha.shapes.at(index).params[2];

      newCentroid << (float)feedback->pose.position.x ,
          (float)feedback->pose.position.y ,
          (float)feedback->pose.position.z ;


      transSecondStep.block(0,0,3,3) << rotationMat ;
      transSecondStep.col(3).head(3) << newCentroid(0) , newCentroid(1), newCentroid(2) ;
      transSecondStep.row(3) << 0,0,0,1 ;

      Eigen::Affine3f affineSecondStep(transSecondStep) ;

      std::cout << "transfrom : " << "\n"    << affineSecondStep.matrix() << "\n" ;

      Eigen::Affine3f affineFinal(affineSecondStep*affineInit) ;
      Eigen::Matrix4f matFinal = (transSecondStep*transInitInv) ;

      normalVecNew    = (matFinal.block(0,0,3,3))* normalVec;
      //      newCentroid  = transFinal *OldCentroid ;


      sha.shapes.at(index).centroid.x = newCentroid(0) ;
      sha.shapes.at(index).centroid.y = newCentroid(1) ;
      sha.shapes.at(index).centroid.z = newCentroid(2) ;


      sha.shapes.at(index).params[0] = normalVecNew(0) ;
      sha.shapes.at(index).params[1] = normalVecNew(1) ;
      sha.shapes.at(index).params[2] = normalVecNew(2) ;


      std::cout << "transfromFinal : " << "\n"    << affineFinal.matrix() << "\n" ;

      pcl::PointCloud<pcl::PointXYZ> pc;
      pcl::PointXYZ pt;
      sensor_msgs::PointCloud2 pc2;

      for(unsigned int j=0; j<p.contours.size(); j++)
      {
        for(unsigned int k=0; k<p.contours[j].size(); k++)
        {
          p.contours[j][k] = affineFinal * p.contours[j][k];
          pt.x = p.contours[j][k][0] ;
          pt.y = p.contours[j][k][1] ;
          pt.z = p.contours[j][k][2] ;
          pc.push_back(pt) ;
        }
      }

      pcl::toROSMsg (pc, pc2);
      sha.shapes.at(index).points.clear() ;
      sha.shapes.at(index).points.push_back (pc2);

      // uncomment when using test_shape_array

      //      for(unsigned int i=0;i<sha.shapes.size();i++){
      //        map_msg.header = sha.shapes.at(i).header ;
      //        map_msg.shapes.push_back(sha.shapes.at(i)) ;
      //      }
      //      shape_pub_.publish(map_msg);

      // end uncomment

      modified_shapes_.shapes.push_back(sha.shapes.at(index));
    }
Ejemplo n.º 24
0
      if(row <= col) {
        stream << " " << precision(row, col);
      }
    }
  }
  return stream;
}

std::istream& operator>>(std::istream& is, PoseGraphEdge& edge) {
  string type;
  float x, y, z, roll, pitch, yaw;

  is >> type >> edge.sourceIdx >> edge.targetIdx >> x >> y >> z >> roll >> pitch >> yaw;

  Eigen::Affine3f t = getTransformation(x, y, z, roll, pitch, yaw);
  edge.transformation = t.matrix();

  edge.covariance.create(6, 6, CV_32FC1);
  for(int r = 0; r < edge.covariance.rows; r++) {
    for(int c = 0; c < edge.covariance.cols; c++) {
      if(r <= c) {
        float cov; is >> cov;
        edge.covariance.at<float>(r, c) = cov;
        edge.covariance.at<float>(c, r) = cov;
      }
    }
  }

  return is;
}
Ejemplo n.º 25
0
bool AirwaysFilter::execute()
{
	std::cout << "EXECUTING AIRWAYS FILTER" << std::endl;
    ImagePtr input = this->getCopiedInputImage();
    	if (!input)
    		return false;
    mInputImage = input;

	std::string filename = (patientService()->getActivePatientFolder()+"/"+input->getFilename()).toStdString();

	try {
		// Import image data from disk
		fast::ImageFileImporter::pointer importer = fast::ImageFileImporter::New();
		importer->setFilename(filename);

	    // Need to know the data type
	    importer->update();
	    fast::Image::pointer image = importer->getOutputData<fast::Image>();
	    std::cout << "IMAGE LOADED" << std::endl;

	    // Set up algorithm
		fast::TubeSegmentationAndCenterlineExtraction::pointer tubeExtraction = fast::TubeSegmentationAndCenterlineExtraction::New();
	    tubeExtraction->setInputConnection(importer->getOutputPort());
	    tubeExtraction->extractDarkTubes();
	    if(getCroppingOption(mOptions)->getValue())
			tubeExtraction->enableAutomaticCropping(true);
	    // Set min and max intensity based on HU unit scale
	    if(image->getDataType() == fast::TYPE_UINT16) {
	        tubeExtraction->setMinimumIntensity(0);
	        tubeExtraction->setMaximumIntensity(1124);
	    } else {
	        tubeExtraction->setMinimumIntensity(-1024);
	        tubeExtraction->setMaximumIntensity(100);
	    }
	    tubeExtraction->setMinimumRadius(0.5);
	    tubeExtraction->setMaximumRadius(50);
	    tubeExtraction->setSensitivity(getSensitivityOption(mOptions)->getValue());
	    // TODO set blur amount..

	    // Convert fast segmentation data to VTK data which CX can use
        vtkSmartPointer<fast::VTKImageExporter> vtkExporter = fast::VTKImageExporter::New();
	    vtkExporter->setInputConnection(tubeExtraction->getSegmentationOutputPort());
	    vtkExporter->Update();
	    mSegmentationOutput = vtkExporter->GetOutput();
	    std::cout << "FINISHED AIRWAY SEGMENTATION" << std::endl;

	    // Get output segmentation data
	    fast::Segmentation::pointer segmentation = tubeExtraction->getOutputData<fast::Segmentation>(0);

	    // Get the transformation of the segmentation
	    Eigen::Affine3f T = fast::SceneGraph::getEigenAffineTransformationFromData(segmentation);
	    mTransformation.matrix() = T.matrix().cast<double>(); // cast to double

	    // Get centerline
	    vtkSmartPointer<fast::VTKLineSetExporter> vtkCenterlineExporter = fast::VTKLineSetExporter::New();
	    vtkCenterlineExporter->setInputConnection(tubeExtraction->getCenterlineOutputPort());
	    mCenterlineOutput = vtkCenterlineExporter->GetOutput();
	    vtkCenterlineExporter->Update();

	} catch(fast::Exception& e) {
		std::string error = e.what();
		reportError("fast::Exception: "+qstring_cast(error));

		return false;
	} catch(cl::Error& e) {
		reportError("cl::Error:"+qstring_cast(e.what()));

		return false;
	} catch (std::exception& e){
		reportError("std::exception:"+qstring_cast(e.what()));

		return false;
	} catch (...){
		reportError("Airway segmentation algorithm threw a unknown exception.");

		return false;
	}
 	return true;
}