Exemple #1
0
bool transformCloud(double theta, char axis, double x, double y, double z, pcl::PointCloud<pcl::PointXYZRGB>::Ptr source_cloud, pcl::PointCloud<pcl::PointXYZRGB>::Ptr transformed_cloud)
{
	    Eigen::Affine3f transform = Eigen::Affine3f::Identity();

        // Define the translation
        transform.translation() << x, y, z;

        switch( axis ) {
		case 'x':
                 // The same rotation matrix as before; tetha radians arround X axis
		 transform.rotate (Eigen::AngleAxisf (theta, Eigen::Vector3f::UnitX()));
		 break;
		case 'y':
		 // The same rotation matrix as before; tetha radians arround Y axis
		 transform.rotate (Eigen::AngleAxisf (theta, Eigen::Vector3f::UnitY()));
		 break;
		case 'z':
  		 // The same rotation matrix as before; tetha radians arround Z axis
  		 transform.rotate (Eigen::AngleAxisf (theta, Eigen::Vector3f::UnitZ()));
		 break;
	}

  	// Print the transformation
  	//printf ("\nTransformation matrix:\n");
  	//std::cout << transform.matrix() << std::endl;

  	// Executing the transformation
  	
 	 pcl::transformPointCloud (*source_cloud, *transformed_cloud, transform);

	return true;
}
StereoProperties VelStereoMatcher::vecToStereo(const gsl_vector* vec)
{

  float x = gsl_vector_get(vec, 0);
  float y = gsl_vector_get(vec, 1);
  float z = gsl_vector_get(vec, 2);
  float ax = gsl_vector_get(vec, 3);
  float ay = gsl_vector_get(vec, 4);
  float az = gsl_vector_get(vec, 5);
  float r = std::sqrt(ax * ax + ay * ay + az * az);

  float fx = gsl_vector_get(vec, 6);
  float fy = gsl_vector_get(vec, 7);
  float cx = gsl_vector_get(vec, 8);
  float cy = gsl_vector_get(vec, 9);
  float baseline = gsl_vector_get(vec, 10);

  //normalize axis
  ax /= r;
  ay /= r;
  az /= r;

  //create tform
  Eigen::Affine3f tform = Eigen::Affine3f::Identity();
  tform.translation() << x, y, z;
  tform.rotate(Eigen::AngleAxisf(r, Eigen::Vector3f(ax, ay, az)));

  //create stereo
  return StereoProperties(fx,fy,cx,cy,baseline,tform);
}
Exemple #3
0
void fuzzyAffines()
{
    std::vector<Eigen::Matrix4f> trans;
    trans.reserve(count/10);
    for( size_t i=0; i<count/10; i++ )
    {
        Eigen::Vector3f x = Eigen::Vector3f::Random();
        Eigen::Vector3f y = Eigen::Vector3f::Random();

        x.normalize();
        y.normalize();

        Eigen::Vector3f z = x.cross(y);
        z.normalize();

        y = z.cross(x);
        y.normalize();

        Eigen::Affine3f t = Eigen::Affine3f::Identity();
        Eigen::Matrix3f r = Eigen::Matrix3f::Identity();

        r.col(0) = x;
        r.col(1) = y;
        r.col(2) = z;

        t.rotate(r);
        t.translate( 0.5f * Eigen::Vector3f::Random() + Eigen::Vector3f(0.5,0.5,0.5) );

        trans.push_back( t.matrix() );
    }

    s_plot.setColor( Eigen::Vector4f(1,0,0,1) );
    s_plot.setLineWidth( 3.0 );
    s_plot( trans, nox::plot<float>::Pos | nox::plot<float>::CS );
}
bool targetViewpoint(const Eigen::Vector3f& rayo,const Eigen::Vector3f& target,const Eigen::Vector3f& down,
                     Eigen::Affine3f& transf)
{
  // uz: versor pointing toward the destination
  Eigen::Vector3f uz = target - rayo;
  if (std::abs(uz.norm()) < 1e-3) {
    std::cout << __FILE__ << "," << __LINE__ << ": target point on ray origin!" << std::endl;
    return false;
  }
  uz.normalize();
  //std::cout << "uz " << uz.transpose() << ", norm " << uz .norm() << std::endl;
  // ux: versor pointing toward the ground
  Eigen::Vector3f ux = down - down.dot(uz) * uz;  
  if (std::abs(ux.norm()) < 1e-3) {
    std::cout << __FILE__ << "," << __LINE__ << ": ray to target toward ground direction!" << std::endl;
    return false;
  }
  ux.normalize();
  //std::cout << "ux " << ux.transpose() << ", norm " << ux.norm() << std::endl;
  Eigen::Vector3f uy = uz.cross(ux);
  //std::cout << "uy " << uy.transpose() << ", norm " << uy.norm() << std::endl;
  Eigen::Matrix3f rot;
  rot << ux.x(), uy.x(), uz.x(),
         ux.y(), uy.y(), uz.y(),
         ux.z(), uy.z(), uz.z();
  transf.setIdentity();
  transf.translate(rayo);
  transf.rotate(rot);
  //std::cout << __FILE__ << "\nrotation\n" << rot << "\ntranslation\n" << rayo << "\naffine\n" << transf.matrix() << std::endl;
  return true;
}
    void rotatePointCloud(pcl::PointCloud<PointT>::Ptr inputCloud, pcl::PointCloud<PointT>::Ptr outputCloud, double theta)
    {
        // Create rotation matrix.
        Eigen::Affine3f transform = Eigen::Affine3f::Identity();
        transform.rotate (Eigen::AngleAxisf (theta, Eigen::Vector3f::UnitX()));

        // Transform the cloud and return it
        pcl::transformPointCloud (*inputCloud, *outputCloud, transform);;
    }
PointCloudViewer::PointCloudViewer(QWidget* parent, Qt::WindowFlags f): QVTKWidget(parent, f)
{
    mImpl = new PointCloudViewer::Impl;
    mImpl->Vis.addPointCloud(common::KinectPointCloud::Ptr(new common::KinectPointCloud));
    Eigen::Affine3f trans;
    trans.setIdentity();
    trans.rotate(Eigen::AngleAxisf(3.14159265, Eigen::Vector3f(0, 0, 1)));
    
    mImpl->Vis.addCoordinateSystem(1.0, trans);
    mImpl->Vis.setBackgroundColor(0, 0, 0);
    SetRenderWindow(mImpl->Vis.getRenderWindow().GetPointer());
}
Exemple #7
0
 /* rotate a single pcl::PointXYZ point */
 pcl::PointCloud<pcl::PointXYZ>::Ptr rotateCloud(pcl::PointCloud<pcl::PointXYZ>::Ptr cloudToRotate, 
                                                 float angleToRotateTo){
    
     //create the rotation transformation matrix
     Eigen::Affine3f rotationMatrix = Eigen::Affine3f::Identity();
     rotationMatrix.rotate (Eigen::AngleAxisf (angleToRotateTo, Eigen::Vector3f::UnitZ()));
 
     //Apply rotation
     pcl::PointCloud<pcl::PointXYZ>::Ptr rotatedCloud (new pcl::PointCloud<pcl::PointXYZ> ());
     pcl::transformPointCloud (*cloudToRotate, *rotatedCloud, rotationMatrix);
 
     return rotatedCloud;
 }
Exemple #8
0
void alignedAffines()
{
    s_plot.setColor( Eigen::Vector4f(0,0,0,1) );
    s_plot.setLineWidth( 1.0 );
    for( size_t i=0; i<count; i++ )
    {
        Eigen::Affine3f t = Eigen::Affine3f::Identity();
        t.rotate(Eigen::Matrix3f::Identity());
        t.translate( 0.5f * Eigen::Vector3f::Random() + Eigen::Vector3f(0.5,0.5,0.5) );

        s_plot( t.matrix(), nox::plot<float>::Pos | nox::plot<float>::CS );
    }
}
pcl::PointCloud<pcl::PointXYZ>::Ptr ObjectDetector::transformCloud(pcl::PointCloud<pcl::PointXYZ>::Ptr cloud)
{
    pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_transformed (new pcl::PointCloud<pcl::PointXYZ>);

    // Create rotation + translation matrix
    Eigen::Affine3f transform = Eigen::Affine3f::Identity();
    transform.translation() << 0.0, -y_translation, 0.0;
    transform.rotate (Eigen::AngleAxisf (theta, Eigen::Vector3f::UnitX()));

    // Perform transformation on point cloud
    pcl::transformPointCloud(*cloud, *cloud_transformed, transform);

    return cloud_transformed;
}
Exemple #10
0
int main( int argc, char* argv[] )
{
    QGuiApplication app( argc, argv );

    RenderWindow window;
    window.show();

    Defaults::initialize();

    Eigen::Affine3f mat = Eigen::Affine3f::Identity();

    FileMesh* mesh = new FileMesh( GluonCore::DirectoryProvider::instance()->dataDirectory() + "/gluon/examples/graphics/duck.dae" );
    GluonCore::ResourceManager::instance()->addResource< Mesh >( "duck.dae", mesh );
    mesh->initialize();
    Texture* texture = GluonCore::ResourceManager::instance()->createResource< Texture >( "duck.tga" );
    texture->load( GluonCore::DirectoryProvider::instance()->dataDirectory() + "/gluon/examples/graphics/duck.tga" );

    Material* material = GluonCore::ResourceManager::instance()->createResource< Material>( "duck" );
    material->load( GluonCore::DirectoryProvider::instance()->dataDirectory() + "/gluon/examples/graphics/duck.gluonmaterial" );
    material->build();

    World* world = GluonCore::ResourceManager::instance()->resource< World >( Defaults::World );

    Entity* ent = world->createEntity< Entity >();
    mat.rotate( Eigen::AngleAxis<float>( -M_PI_4 /* pi/4 */, Eigen::Vector3f(0.f, 1.f, 0.f) ) );
    ent->setTransform( mat );
    ent->setMesh( mesh );
    ent->setMaterialInstance( material->createInstance() );
    ent->materialInstance()->setProperty( "texture0", QVariant::fromValue( texture ) );

    Camera* cam = world->createEntity< Camera >();
    mat = Eigen::Affine3f::Identity();
    mat.translate( Eigen::Vector3f(0.f, 75.f, 100.f) );
    cam->setTransform( mat );

    cam->setVisibleArea( QSizeF( 200.f, 200.f ) );
    cam->setNearPlane( 0.f );
    cam->setFarPlane( 1000.f );

    GluonCore::ResourceManager::instance()->resource< RenderTarget >( Defaults::RenderTarget )->addChild( cam );

    //app.exec();

    return app.exec();
}
Exemple #11
0
    /**
     * @brief Renders the trackball representation.
     * @todo setTrackballOrthographicMatrix should be set during viewport resize
     */
    void render (void)
    {
        if(drawTrackball)
        {

            float ratio = (viewport[2] - viewport[0]) / (viewport[3] - viewport[1]);
            setTrackballOrthographicMatrix(-ratio, ratio, -1.0, 1.0, 0.1, 100.0);

            trackball_shader.bind();

            //Using unique viewMatrix for the trackball, considering only the rotation to be visualized.
            Eigen::Affine3f trackballViewMatrix = Eigen::Affine3f::Identity();
            trackballViewMatrix.translate(defaultTranslation);
            trackballViewMatrix.rotate(quaternion);

            trackball_shader.setUniform("viewMatrix", trackballViewMatrix);
            trackball_shader.setUniform("projectionMatrix", trackballProjectionMatrix);
            trackball_shader.setUniform("nearPlane", near_plane);
            trackball_shader.setUniform("farPlane", far_plane);

            bindBuffers();

            //X:
            Eigen::Vector4f colorVector(1.0, 0.0, 0.0, 1.0);
            trackball_shader.setUniform("modelMatrix", Eigen::Affine3f::Identity()*Eigen::AngleAxis<float>(M_PI/2.0,Eigen::Vector3f(0.0,1.0,0.0)));
            trackball_shader.setUniform("in_Color", colorVector);
            glDrawArrays(GL_LINE_LOOP, 0, 200);

            //Y:
            colorVector << 0.0, 1.0, 0.0, 1.0;
            trackball_shader.setUniform("modelMatrix", Eigen::Affine3f::Identity()*Eigen::AngleAxis<float>(M_PI/2.0,Eigen::Vector3f(1.0,0.0,0.0)));
            trackball_shader.setUniform("in_Color", colorVector);
            glDrawArrays(GL_LINE_LOOP, 0, 200);

            //Z:
            colorVector << 0.0, 0.0, 1.0, 1.0;            
            trackball_shader.setUniform("modelMatrix", Eigen::Affine3f::Identity());
            trackball_shader.setUniform("in_Color", colorVector);
            glDrawArrays(GL_LINE_LOOP, 0, 200);

            unbindBuffers();

            trackball_shader.unbind();
        }
    }
Exemple #12
0
Eigen::Affine3f interpolateAffine(const Eigen::Affine3f &pose0, 
        const Eigen::Affine3f &pose1, float blend)
{
    /* interpolate translation */
    Eigen::Vector3f t0 = pose0.translation();
    Eigen::Vector3f t1 = pose1.translation();
    Eigen::Vector3f tIP = (t1 - t0)*blend;

    /* interpolate rotation */
    Eigen::Quaternionf r0(pose0.rotation());
    Eigen::Quaternionf r1(pose1.rotation());
    Eigen::Quaternionf rIP(r1.slerp(blend, r0));

    /* compose resulting pose */
    Eigen::Affine3f ipAff = pose0;
    ipAff.rotate(rIP);
    ipAff.translate(tIP);
    return ipAff;
}
Exemple #13
0
Eigen::Affine3f transRotVecToAffine3f(
        const cv::Mat &translationVec, 
        const cv::Mat &rotationVec)
{

    /* Copies the axis angle rotation
     * and the translation to an
     * Affine3f transformation matrix
     */

    /* axis angle roation */
#if 1
    Eigen::Vector3f axis(
            rotationVec.at<float>(0,0),
            rotationVec.at<float>(1,0),
            rotationVec.at<float>(2,0));
    float angle = axis.norm(); // length of the vector 
    axis.normalize();
    Eigen::AngleAxisf rot(angle, axis);
#endif


#if 0
    /* do euler angle rotation */
    Eigen::Affine3f rot;
    rot = Eigen::AngleAxisf(rotationVec.at<float>(0,0), Eigen::Vector3f::UnitX())
      * Eigen::AngleAxisf(rotationVec.at<float>(1,0), Eigen::Vector3f::UnitY())
      * Eigen::AngleAxisf(rotationVec.at<float>(2,0), Eigen::Vector3f::UnitZ());
#endif

    /* compose new pose */
    Eigen::Affine3f pose;
    pose = Eigen::Affine3f (Eigen::Translation3f (
                translationVec.at<float>(0, 0),
                translationVec.at<float>(1, 0),
                translationVec.at<float>(2, 0)));
    pose.rotate(rot);

    return pose;
}
bool normalizeOrientationAndTranslation(std::vector<Eigen::Vector3f, Eigen::aligned_allocator<Eigen::Vector3f> > &points, std::vector<Eigen::Vector3f, Eigen::aligned_allocator<Eigen::Vector3f> > &normals, Eigen::Affine3f &invTransform)
{
    if (points.empty())
        return false;

    // Perform PCA on input to determine a canoncial coordinate frame for the given point cloud.
    Eigen::Matrix3Xf::MapType pointsInMatrix(points.at(0).data(), 3, static_cast<int>(points.size()));
    const Eigen::Vector3f centroid = pointsInMatrix.rowwise().mean();
    pointsInMatrix = pointsInMatrix.colwise() - centroid;

    const Eigen::Matrix3f cov = pointsInMatrix * pointsInMatrix.transpose();
    Eigen::SelfAdjointEigenSolver<Eigen::Matrix3f> eig(cov);
    const Eigen::Matrix3f rot = eig.eigenvectors().transpose();
    for (size_t i = 0; i < points.size(); ++i) {
        points[i] = rot * points[i];
        normals[i] = rot * normals[i];
    }

    invTransform = Eigen::Affine3f::Identity();
    invTransform = invTransform.rotate(rot).translate(-centroid); // applied in right to left order.
    invTransform = invTransform.inverse();

    return true;
}
void ObjectDetector::performSegmentation(pcl::PointCloud<pcl::PointXYZ>::Ptr cloud)
{
    pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_plane (new pcl::PointCloud<pcl::PointXYZ>), cloud_plane_rotated (new pcl::PointCloud<pcl::PointXYZ>);

    ROS_INFO("PointCloud before planar segmentation: %d data points.", cloud->width * cloud->height);

    pcl::ModelCoefficients::Ptr coefficients(new pcl::ModelCoefficients);
    pcl::PointIndices::Ptr inliers(new pcl::PointIndices);
    // Create the segmentation object
    pcl::SACSegmentation<pcl::PointXYZ> seg;
    // Optional
    seg.setOptimizeCoefficients (true);
    // Fit plane
    seg.setModelType (pcl::SACMODEL_PLANE);
    // Use RANSAC
    seg.setMethodType (pcl::SAC_RANSAC);
    // Set maximum number of iterations
    seg.setMaxIterations (max_it_calibration);
    // Set distance to the model threshold
    seg.setDistanceThreshold (floor_threshold);

    // Segment the largest planar component from the cloud
    seg.setInputCloud (cloud);
    seg.segment (*inliers, *coefficients);

    // Extract the inliers of the plane
    pcl::ExtractIndices<pcl::PointXYZ> extract;
    extract.setInputCloud (cloud);
    extract.setIndices (inliers);
    extract.setNegative (false);
    extract.filter (*cloud_plane);
    ROS_INFO("PointCloud representing the planar component: %d data points.", cloud_plane->width * cloud_plane->height);

    // Create normal vector of the plane
    Eigen::Matrix<float, 1, 3> normal_plane, normal_floor, r_axis;
    normal_plane[0] = coefficients->values[0];
    normal_plane[1] = coefficients->values[1];
    normal_plane[2] = coefficients->values[2];
    ROS_INFO("Plane normal: %f %f %f", normal_plane[0], normal_plane[1], normal_plane[2]);

    // Create normal vector of the floor
    normal_floor[0] = 0.0f;
    normal_floor[1] = 1.0f;
    normal_floor[2] = 0.0f;
    ROS_INFO("Floor normal: %f %f %f", normal_floor[0], normal_floor[1], normal_floor[2]);

    // Determine rotation axis
    r_axis = normal_plane.cross(normal_floor);
    ROS_INFO("Rotation axis: %f %f %f", r_axis[0], r_axis[1], r_axis[2]);

    // Determine rotation angle theta
    theta = acos(normal_plane.dot(normal_floor));
    ROS_INFO("Rotation angle theta: %f", theta);

    // Create rotation matrix
    Eigen::Affine3f transform = Eigen::Affine3f::Identity();
    transform.rotate (Eigen::AngleAxisf (theta, Eigen::Vector3f::UnitX()));

    // Perform rotation on extracted plane
    pcl::transformPointCloud(*cloud_plane, *cloud_plane_rotated,transform);

    // Compute y translation by taking the average y values of the plane points
    int num_of_points = cloud_plane_rotated->width * cloud_plane_rotated->height;
    for (size_t i = 0; i < num_of_points; ++i)
    {
        y_translation += cloud_plane_rotated->points[i].y;
    }
    y_translation = y_translation / num_of_points;
}
int main(int argc, char** argv)
{
    if( argc < 2 )
    {
        printPrompt( argv[0] );
        return -1;
    }

    initModule_nonfree();

    // Get Input Data
    ifstream file(argv[1]);
    if ( !file.is_open() )
        return false;
    
    string str;
    
        // Image Name
    getline( file, str ); getline( file, str );
    string image_name = str;
        // Cloud Name
    getline( file, str ); getline( file, str );
    string cloud_name = str;
        // width of images to be created.
    getline( file, str ); getline( file, str );
    int w = atoi(str.c_str());
        // height of images to be created
    getline( file, str ); getline( file, str );
    int h = atoi(str.c_str());
        // resolution of voxel grids
    getline( file, str ); getline( file, str );
    float r = atof(str.c_str());
        // f (distance from pinhole)
    getline( file, str ); getline( file, str );
    float f = atof(str.c_str());
        // thetax (initial rotation about X Axis of map)
    getline( file, str ); getline( file, str );
    float thetaX = atof(str.c_str());
        // thetay (initial rotation about Y Axis of map)
    getline( file, str ); getline( file, str );
    float thetaY = atof(str.c_str());
        // number of points to go to
    getline( file, str ); getline( file, str );
    float nop = atoi(str.c_str());
        // Number of divisions
    getline( file, str ); getline( file, str );
    float divs = atoi(str.c_str());
        // Number of images to return
    getline( file, str ); getline( file, str );
    int numtoreturn = atoi(str.c_str());    
        // Should we load or create photos?
    getline( file, str ); getline( file, str );
    string lorc =str.c_str();
        // Directory to look for photos
    getline( file, str ); getline( file, str );
    string dir =str.c_str();
        // Directory to look for kp and descriptors
    getline( file, str ); getline( file, str );
    string kdir =str.c_str();
        // save photos?
    getline( file, str ); getline( file, str );
    string savePhotos =str.c_str();
    
    file.close();
    // Done Getting Input Data

    map<vector<float>, Mat> imagemap;
    map<vector<float>, Mat> surfmap;
    map<vector<float>, Mat> siftmap;
    map<vector<float>, Mat> orbmap;
    map<vector<float>, Mat> fastmap;
    imagemap.clear();

    vector<KeyPoint> SurfKeypoints;
    vector<KeyPoint> SiftKeypoints;
    vector<KeyPoint> OrbKeypoints;
    vector<KeyPoint> FastKeypoints;
    Mat SurfDescriptors;
    Mat SiftDescriptors;
    Mat OrbDescriptors;
    Mat FastDescriptors;

    int minHessian = 300;

    SurfFeatureDetector SurfDetector (minHessian);
    SiftFeatureDetector SiftDetector (minHessian);
    OrbFeatureDetector OrbDetector (minHessian);
    FastFeatureDetector FastDetector (minHessian);


    SurfDescriptorExtractor SurfExtractor;
    SiftDescriptorExtractor SiftExtractor;
    OrbDescriptorExtractor OrbExtractor;

    if ( !fs::exists( dir ) || lorc == "c" )
    { // Load Point Cloud and render images
        PointCloud<PT>::Ptr cloud (new pcl::PointCloud<PT>);
        io::loadPCDFile<PT>(cloud_name, *cloud);

        Eigen::Affine3f tf = Eigen::Affine3f::Identity();
        tf.rotate (Eigen::AngleAxisf (thetaX, Eigen::Vector3f::UnitX()));
        pcl::transformPointCloud (*cloud, *cloud, tf);
        tf = Eigen::Affine3f::Identity();
        tf.rotate (Eigen::AngleAxisf (thetaY, Eigen::Vector3f::UnitY()));
        pcl::transformPointCloud (*cloud, *cloud, tf);

        // Create images from point cloud
        imagemap = render::createImages(cloud, nop, w, h, r, f);

        if (savePhotos == "y")
        {
            for (map<vector<float>, Mat>::iterator i = imagemap.begin(); i != imagemap.end(); ++i)
            {
                // Create image name and storagename
                string imfn = dir + "/";
                string kpfn = kdir + "/";
                for (int j = 0; j < i->first.size(); j++)
                {
                    imfn += boost::to_string(i->first[j]) + " ";
                    kpfn += boost::to_string(i->first[j]) + " ";
                }
                imfn += ".jpg";
                imwrite(imfn, i->second);

                // Detect keypoints, add to keypoint map. Same with descriptors

                SurfDetector.detect(i->second, SurfKeypoints);
                SiftDetector.detect(i->second, SiftKeypoints);
                OrbDetector.detect(i->second, OrbKeypoints);
                FastDetector.detect(i->second, FastKeypoints);

                SurfExtractor.compute(i->second, SurfKeypoints, SurfDescriptors);
                SiftExtractor.compute(i->second, SiftKeypoints, SiftDescriptors);
                OrbExtractor.compute(i->second, OrbKeypoints, OrbDescriptors);
                SiftExtractor.compute(i->second, FastKeypoints, FastDescriptors);

                // Store KP and Descriptors in yaml file.

                kpfn += ".yml";
                FileStorage store(kpfn, cv::FileStorage::WRITE);
                write(store,"SurfKeypoints",SurfKeypoints);
                write(store,"SiftKeypoints",SiftKeypoints);
                write(store,"OrbKeypoints", OrbKeypoints);
                write(store,"FastKeypoints",FastKeypoints);
                write(store,"SurfDescriptors",SurfDescriptors);
                write(store,"SiftDescriptors",SiftDescriptors);
                write(store,"OrbDescriptors", OrbDescriptors);
                write(store,"FastDescriptors",FastDescriptors);
                store.release();

                surfmap[i->first] = SurfDescriptors;
                siftmap[i->first] = SiftDescriptors;
                orbmap[i->first]  = OrbDescriptors;
                fastmap[i->first] = FastDescriptors;
            }
        }
    } 
    else 
    { // load images from the folder dir
        // First look into the folder to get a list of filenames
        vector<fs::path> ret;
        const char * pstr = dir.c_str();
        fs::path p(pstr);
        get_all(pstr, ret);

        for (int i = 0; i < ret.size(); i++)
        {
            // Load Image via filename
            string fn = ret[i].string();
            istringstream iss(fn);
            vector<string> tokens;
            copy(istream_iterator<string>(iss), istream_iterator<string>(), back_inserter<vector<string> >(tokens));

            // Construct ID from filename
            vector<float> ID;
            for (int i = 0; i < 6; i++) // 6 because there are three location floats and three direction floats
                ID.push_back(::atof(tokens[i].c_str()));
            string imfn = dir + "/" + fn;

            // Read image and add to imagemap.
            Mat m = imread(imfn);
            imagemap[ID] = m;

            // Create Filename for loading Keypoints and descriptors
            string kpfn = kdir + "/";
            for (int j = 0; j < ID.size(); j++)
            {
                kpfn += boost::to_string(ID[j]) + " ";
            }

            kpfn = kpfn+ ".yml";
            
            // Create filestorage item to read from and add to map.
            FileStorage store(kpfn, cv::FileStorage::READ);

            FileNode n1 = store["SurfKeypoints"];
            read(n1,SurfKeypoints);
            FileNode n2 = store["SiftKeypoints"];
            read(n2,SiftKeypoints);
            FileNode n3 = store["OrbKeypoints"];
            read(n3,OrbKeypoints);
            FileNode n4 = store["FastKeypoints"];
            read(n4,FastKeypoints);
            FileNode n5 = store["SurfDescriptors"];
            read(n5,SurfDescriptors);
            FileNode n6 = store["SiftDescriptors"];
            read(n6,SiftDescriptors);
            FileNode n7 = store["OrbDescriptors"];
            read(n7,OrbDescriptors);
            FileNode n8 = store["FastDescriptors"];
            read(n8,FastDescriptors);

            store.release();

            surfmap[ID] = SurfDescriptors;
            siftmap[ID] = SiftDescriptors;
            orbmap[ID]  = OrbDescriptors;
            fastmap[ID] = FastDescriptors;
        }
    }

    TickMeter tm;
    tm.reset();
    cout << "<\n  Analyzing Images ..." << endl;

    // We have a bunch of images, now we compute their grayscale and black and white.
    map<vector<float>, Mat> gsmap;
    map<vector<float>, Mat> bwmap;
    for (map<vector<float>, Mat>::iterator i = imagemap.begin(); i != imagemap.end(); ++i)
    {
        vector<float> ID = i->first;
        Mat Image = i-> second;
        GaussianBlur( Image, Image, Size(5,5), 0, 0, BORDER_DEFAULT );


        gsmap[ID] = averageImage::getPixSumFromImage(Image, divs);
        bwmap[ID] = averageImage::aboveBelow(gsmap[ID]);
    }
    Mat image = imread(image_name);
    Mat gsimage = averageImage::getPixSumFromImage(image, divs);
    Mat bwimage = averageImage::aboveBelow(gsimage);

    // cout << gsimage <<endl;
    imwrite("GS.png", gsimage);
    namedWindow("GSIMAGE (Line 319)");
    imshow("GSIMAGE (Line 319)", gsimage);
    waitKey(0);

    vector<KeyPoint> imgSurfKeypoints;
    vector<KeyPoint> imgSiftKeypoints;
    vector<KeyPoint> imgOrbKeypoints;
    vector<KeyPoint> imgFastKeypoints;
    Mat imgSurfDescriptors;
    Mat imgSiftDescriptors;
    Mat imgOrbDescriptors;
    Mat imgFastDescriptors;

    SurfDetector.detect(image, imgSurfKeypoints);
    SiftDetector.detect(image, imgSiftKeypoints);
    OrbDetector.detect(image, imgOrbKeypoints);
    FastDetector.detect(image, imgFastKeypoints);

    SurfExtractor.compute(image, imgSurfKeypoints, imgSurfDescriptors);
    SiftExtractor.compute(image, imgSiftKeypoints, imgSiftDescriptors);
    OrbExtractor.compute(image, imgOrbKeypoints, imgOrbDescriptors);
    SiftExtractor.compute(image, imgFastKeypoints, imgFastDescriptors);


    tm.start();

    cout << ">\n<\n  Comparing Images ..." << endl;

    // We have their features, now compare them!
    map<vector<float>, float> gssim; // Gray Scale Similarity
    map<vector<float>, float> bwsim; // Above Below Similarity
    map<vector<float>, float> surfsim;
    map<vector<float>, float> siftsim;
    map<vector<float>, float> orbsim;
    map<vector<float>, float> fastsim;

    for (map<vector<float>, Mat>::iterator i = gsmap.begin(); i != gsmap.end(); ++i)
    {
        vector<float> ID = i->first;
        gssim[ID] = similarities::getSimilarity(i->second, gsimage);
        bwsim[ID] = similarities::getSimilarity(bwmap[ID], bwimage); 
        surfsim[ID] = similarities::compareDescriptors(surfmap[ID], imgSurfDescriptors);
        siftsim[ID] = similarities::compareDescriptors(siftmap[ID], imgSiftDescriptors);
        orbsim[ID] = 0;//similarities::compareDescriptors(orbmap[ID], imgOrbDescriptors);
        fastsim[ID] = 0;//similarities::compareDescriptors(fastmap[ID], imgFastDescriptors);
    }

    map<vector<float>, int> top;

    bool gotone = false;
    typedef map<vector<float>, int>::iterator iter;

    // Choose the best ones!
    for (map<vector<float>, Mat>::iterator i = imagemap.begin(); i != imagemap.end(); ++i)
    {
        vector<float> ID = i->first;

        int sim = /* gssim[ID] + 0.5*bwsim[ID] + */ 5*surfsim[ID] + 0.3*siftsim[ID] + orbsim[ID] + fastsim[ID];

        // cout << surfsim[ID] << "\t";
        // cout << siftsim[ID] << "\t";
        // cout << orbsim[ID] << "\t";
        // cout << fastsim[ID] << endl;

        if (!gotone)
        {
            top[ID] = sim;
            gotone = true;
        }

        iter it = top.begin();
        iter end = top.end();
        int max_value = it->second;
        vector<float> max_ID = it->first;
        for( ; it != end; ++it) 
        {
            int current = it->second;
            if(current > max_value) 
            {
                max_value = it->second;
                max_ID = it->first;
            }
        }
        // cout << "Sim: " << sim << "\tmax_value: " << max_value << endl;
        if (top.size() < numtoreturn)
            top[ID] = sim;
        else
        {
            if (sim < max_value)
            {
                top[ID] = sim;
                top.erase(max_ID);
            }
        }
    }
    tm.stop();
        double s = tm.getTimeSec();


    cout << ">\n<\n  Writing top " << numtoreturn << " images ..." << endl;

    int count = 1;
    namedWindow("Image");
    namedWindow("Match");
    namedWindow("ImageBW");
    namedWindow("MatchBW");
    namedWindow("ImageGS");
    namedWindow("MatchGS");

    imshow("Image", image);
    imshow("ImageBW", bwimage);
    imshow("ImageGS", gsimage);


    vector<KeyPoint> currentPoints;

    for (iter i = top.begin(); i != top.end(); ++i)
    {
        vector<float> ID = i->first;

        cout << "  Score: "<< i->second << "\tGrayScale: " << gssim[ID] << "\tBW: " << bwsim[ID] << "  \tSURF: " << surfsim[ID] << "\tSIFT: " << siftsim[ID] << endl;
        string fn = "Sim_" + boost::to_string(count) + "_" + boost::to_string(i->second) + ".png";
        imwrite(fn, imagemap[ID]);
        count++;

        normalize(bwmap[ID], bwmap[ID], 0, 255, NORM_MINMAX, CV_64F);
        normalize(gsmap[ID], gsmap[ID], 0, 255, NORM_MINMAX, CV_64F);

        imshow("Match", imagemap[ID]);
        imshow("MatchBW", bwmap[ID]);
        imshow("MatchGS", gsmap[ID]);


        waitKey(0);

    }

    cout << ">\nComparisons took " << s << " seconds for " << imagemap.size() << " images (" 
        << (int) imagemap.size()/s << " images per second)." << endl;

return 0;
}
Exemple #17
0
int
 main (int argc, char** argv)
{
    // Get input object and scene
    if (argc < 2)
    {
        pcl::console::print_error ("Syntax is: %s cloud1.pcd (cloud2.pcd)\n", argv[0]);
        return (1);
    }
    
    pcl::PointCloud<pcl::PointXYZRGBA>::Ptr cloud_in (new pcl::PointCloud<pcl::PointXYZRGBA>);
    pcl::PointCloud<pcl::PointXYZRGBA>::Ptr cloud_out (new pcl::PointCloud<pcl::PointXYZRGBA>);

    // Load object and scene
    pcl::console::print_highlight ("Loading point clouds...\n");
    if(argc<3)
    {
        if (pcl::io::loadPCDFile<pcl::PointXYZRGBA> (argv[1], *cloud_in) < 0)
            pcl::console::print_error ("Error loading first file!\n");
        *cloud_out = *cloud_in;
        
        //transform cloud
        Eigen::Affine3f transformation;
        transformation.setIdentity();
        transformation.translate(Eigen::Vector3f(0.3,0.02,-0.1));
        float roll, pitch, yaw;
        roll = 0.02; pitch = 1.2; yaw = 0;
        Eigen::AngleAxisf rollAngle(roll, Eigen::Vector3f::UnitX());
        Eigen::AngleAxisf pitchAngle(pitch, Eigen::Vector3f::UnitY());
        Eigen::AngleAxisf yawAngle(yaw, Eigen::Vector3f::UnitZ());
        Eigen::Quaternion<float> q = rollAngle*pitchAngle*yawAngle;
        transformation.rotate(q);
        
        pcl::transformPointCloud<pcl::PointXYZRGBA>(*cloud_in, *cloud_out, transformation);
        std::cout << "Transformed " << cloud_in->points.size () << " data points:"
            << std::endl;
    }else{
       if (pcl::io::loadPCDFile<pcl::PointXYZRGBA> (argv[1], *cloud_in) < 0 ||
        pcl::io::loadPCDFile<pcl::PointXYZRGBA> (argv[2], *cloud_out) < 0)
        {
            pcl::console::print_error ("Error loading files!\n");
            return (1);
        } 
    }
    
    // Fill in the CloudIn data
//     cloud_in->width    = 100;
//     cloud_in->height   = 1;
//     cloud_in->is_dense = false;
//     cloud_in->points.resize (cloud_in->width * cloud_in->height);
//     for (size_t i = 0; i < cloud_in->points.size (); ++i)
//     {
//         cloud_in->points[i].x = 1024 * rand () / (RAND_MAX + 1.0f);
//         cloud_in->points[i].y = 1024 * rand () / (RAND_MAX + 1.0f);
//         cloud_in->points[i].z = 1024 * rand () / (RAND_MAX + 1.0f);
//     }

    std::cout << "size:" << cloud_out->points.size() << std::endl;
      
    {
        pcl::ScopeTime("icp proces");
        
        pcl::IterativeClosestPoint<pcl::PointXYZRGBA, pcl::PointXYZRGBA> icp;
        icp.setInputSource(cloud_in);
        icp.setInputTarget(cloud_out);
        pcl::PointCloud<pcl::PointXYZRGBA> Final;
        icp.setMaximumIterations(1000000);
        icp.setRANSACOutlierRejectionThreshold(0.01);
        icp.align(Final);
        std::cout << "has converged:" << icp.hasConverged() << " score: " <<
        icp.getFitnessScore() << std::endl;
        std::cout << icp.getFinalTransformation() << std::endl;
        
        //translation, rotation
        Eigen::Matrix4f icp_transformation=icp.getFinalTransformation();
        Eigen::Matrix3f icp_rotation = icp_transformation.block<3,3>(0,0);
        Eigen::Vector3f euler = icp_rotation.eulerAngles(0,1,2);
        std::cout << "rotation: " << euler.transpose() << std::endl;
        std::cout << "translation:" << icp_transformation.block<3,1>(0,3).transpose() << std::endl;
    }
  

 return (0);
}
void cloud_cb (const sensor_msgs::PointCloud2Ptr& input)
{
	int is_save = false;
	int is_predict = true;

	// Create a container for the data.
	pcl::PointCloud<pcl::PointXYZI>::Ptr conv_input(new pcl::PointCloud<pcl::PointXYZI>());
	
	input->fields[3].name = "intensity";
	pcl::fromROSMsg(*input, *conv_input);

	// Size of humansize cloud
	int cloud_size = conv_input->points.size();
	std::cout << "cloud_size: " << cloud_size << std::endl;

	if( cloud_size <= 1 ){
		return;
	}

	pcl::PCA<pcl::PointXYZI> pca;
	pcl::PointCloud<pcl::PointXYZI>::Ptr transform_cloud_translate(new pcl::PointCloud<pcl::PointXYZI>());
	pcl::PointCloud<pcl::PointXYZI>::Ptr transform_cloud_rotate(new pcl::PointCloud<pcl::PointXYZI>());
	sensor_msgs::PointCloud2 output;
	geometry_msgs::PointStamped::Ptr output_point(new geometry_msgs::PointStamped());
	geometry_msgs::PointStamped output_point_;
	Eigen::Vector3f eigen_values;
	Eigen::Vector4f centroid;
	Eigen::Matrix3f eigen_vectors;
	Eigen::Affine3f translate = Eigen::Affine3f::Identity();
	Eigen::Affine3f rotate = Eigen::Affine3f::Identity();
	double theta;
	std::vector<double> description;
	description.push_back(cloud_size);

	pcl::PointCloud<pcl::PointNormal>::Ptr normals(new pcl::PointCloud<pcl::PointNormal>());
	pcl::NormalEstimation<pcl::PointXYZI, pcl::PointNormal> ne;
	ne.setInputCloud(conv_input);
	ne.setKSearch(24);
	ne.compute(*normals);

	pcl::FPFHEstimation<pcl::PointXYZI, pcl::PointNormal, pcl::FPFHSignature33> fpfh;
	fpfh.setInputCloud(conv_input);
	fpfh.setInputNormals(normals);

	pcl::search::KdTree<pcl::PointXYZI>::Ptr tree(new pcl::search::KdTree<pcl::PointXYZI>());
	fpfh.setSearchMethod(tree);

	pcl::PointCloud<pcl::FPFHSignature33>::Ptr fpfhs(new pcl::PointCloud<pcl::FPFHSignature33>());
	fpfh.setRadiusSearch(0.05);
	fpfh.compute(*fpfhs);

	// for(int i=0;i<fpfhs.histogram.size();i++){
		std::cout << fpfhs->points.size();
	// }
	std::cout << std::endl;

	//PCA
	pca.setInputCloud(conv_input);
	centroid = pca.getMean();
	std::cout << "Centroid of pointcloud: " << centroid[0] << ", " << centroid[1] << ", " << centroid[2] << std::endl;
	eigen_values = pca.getEigenValues();
	std::cout << "Eigen values of PCAed pointcloud: " << eigen_values[0] << ", "  << eigen_values[1] << ", " << eigen_values[2] << std::endl;
	eigen_vectors << pca.getEigenVectors();
	std::cout << "Eigen Vectors of PCAed pointcloud:" << std::endl;
	for(int i=0;i<eigen_vectors.rows()*eigen_vectors.cols();i++){
		std::cout << eigen_vectors(i) << " ";
		if(!((i+1)%3)){
			std::cout << std::endl;
		}
	}
	std::cout << std::endl;

	//Rotation cloud from PCA data
	theta = atan2(eigen_vectors(1,0), eigen_vectors(0,0));
	std::cout << "Theta: " << theta << std::endl;
	translate.translation() << -centroid[0], -centroid[1], -centroid[2];
	pcl::transformPointCloud(*conv_input, *transform_cloud_translate, translate);

	rotate.rotate(Eigen::AngleAxisf(-theta, Eigen::Vector3f::UnitZ()));
	pcl::transformPointCloud(*transform_cloud_translate, *transform_cloud_rotate, rotate);

	pcl::toROSMsg(*transform_cloud_rotate, output);
	output.header = input -> header;
	output.header.frame_id = "map";

	pub.publish(output);

	pcl::PointXYZI searchPoint;
	searchPoint.x = 0.0;
	searchPoint.y = 0.0;
	searchPoint.z = 0.0;

	float min_distance,min_distance_tmp;
	min_distance = sqrt(
		powf( conv_input->points[0].x - searchPoint.x, 2.0f ) +
		powf( conv_input->points[0].y - searchPoint.y, 2.0f ) +
		powf( conv_input->points[0].z - searchPoint.z, 2.0f )
	);

	//brute force nearest neighbor search
	for(int i=1; i<conv_input->size(); i++){
		min_distance_tmp = sqrt(
			powf( conv_input->points[i].x - searchPoint.x, 2.0f ) +
			powf( conv_input->points[i].y - searchPoint.y, 2.0f ) +
			powf( conv_input->points[i].z - searchPoint.z, 2.0f )
		);
		if( min_distance < min_distance_tmp){
			min_distance = min_distance_tmp;
		}
	}

	std::cout << "min_distance: " << min_distance << std::endl;
	std::cout << std::endl;

	description.push_back(min_distance);

	// Calculate center of mass of humansize cloud
	Vector4f center_of_mass;
	pcl::compute3DCentroid(*transform_cloud_rotate, center_of_mass);
	MatrixXf convariance_matrix = MatrixXf::Zero(3,3);
	MatrixXf convariance_matrix_tmp = MatrixXf::Zero(3,3);
	MatrixXf moment_of_inertia_matrix_tmp = MatrixXf::Zero(3,3);
	MatrixXf moment_of_inertia_matrix = MatrixXf::Zero(3,3);
	Vector3f point_tmp;
	Vector3f point_from_center_of_mass;

	// intensity buffer
	double intensity_sum=0, intensity_ave=0, intensity_pow_sum=0, intensity_std_dev=0, max_intensity = 5000;
	std::vector<double> intensity_histgram(25);

	for(int i=0; i<cloud_size; i++){
		point_tmp << transform_cloud_rotate->points[i].x, transform_cloud_rotate->points[i].y, transform_cloud_rotate->points[i].z;
		//Calculate three dimentional convariance matrix
		point_from_center_of_mass <<
		center_of_mass[1] - point_tmp[1],
		center_of_mass[0] - point_tmp[0],
		center_of_mass[2] - point_tmp[2];

		convariance_matrix_tmp += point_tmp * point_tmp.transpose();

		//Calculate three dimentional moment of inertia matrix
		moment_of_inertia_matrix_tmp << 
		powf(point_tmp[1],2.0f)+powf(point_tmp[2],2.0f),	-point_tmp[0]*point_tmp[1],							-point_tmp[0]*point_tmp[2],
		-point_tmp[0]*point_tmp[1],							powf(point_tmp[0],2.0f)+powf(point_tmp[2],2.0f),	-point_tmp[1]*point_tmp[2],
		-point_tmp[0]*point_tmp[2],							-point_tmp[1]*point_tmp[2],							powf(point_tmp[0],2.0f)+powf(point_tmp[1],2.0f);

		moment_of_inertia_matrix = moment_of_inertia_matrix + moment_of_inertia_matrix_tmp;

		// Calculate intensity distribution
		intensity_sum += transform_cloud_rotate->points[i].intensity;
		intensity_pow_sum += powf(transform_cloud_rotate->points[i].intensity,2);
		intensity_histgram[transform_cloud_rotate->points[i].intensity / (max_intensity / intensity_histgram.size())] += 1;
		if( g_max_inten < transform_cloud_rotate->points[i].intensity ){
			g_max_inten = transform_cloud_rotate->points[i].intensity;
		}
	}
	//Calculate Convariance matrix 
	convariance_matrix = (1.0f/cloud_size) * convariance_matrix_tmp.array();	

	// Calculate intensity distribution
	intensity_ave = intensity_sum / cloud_size;
	std::cout << "max_intensity: " << g_max_inten << std::endl;
	std::cout << "intensity_ave: " << intensity_ave << std::endl;
	intensity_std_dev = sqrt(fabs(intensity_pow_sum / cloud_size - powf(intensity_ave,2)));
	std::cout << "intensity_std_dev: " << intensity_std_dev <<std::endl;
	std::cout << "intensity_histgram: ";
	description.push_back(intensity_ave);
	description.push_back(intensity_std_dev);

	for(int i=0; i<intensity_histgram.size(); i++){
		intensity_histgram[i] = intensity_histgram[i] / cloud_size;
		std::cout << intensity_histgram[i] << " ";
		description.push_back(intensity_histgram[i]);
	}
	std::cout << std::endl;
	std::cout << std::endl;

	std::cout << "3D convariance matrix:" << std::endl;
	for(int i=0;i<convariance_matrix.rows()*convariance_matrix.cols();i++){
		if(i<5 || i==6){
			description.push_back(convariance_matrix(i));
		}
		std::cout << convariance_matrix(i) << " ";
		if(!((i+1)%3)){
			std::cout << std::endl;
		}
	}
	std::cout << std::endl;

	std::cout << "moment of inertia:" << std::endl;
	for(int i=0;i<moment_of_inertia_matrix.rows()*moment_of_inertia_matrix.cols();i++){
		if(i<5 || i==6){
			description.push_back(convariance_matrix(i));
		}
		std::cout << moment_of_inertia_matrix(i) << " ";
		if(!((i+1)%3)){
			std::cout << std::endl;
		}
	}
	std::cout << std::endl;

	//Calculate Slice distribution
	pcl::PointXYZI min_pt,max_pt,sliced_min_pt,sliced_max_pt;
	pcl::getMinMax3D(*transform_cloud_rotate, min_pt, max_pt);
	int sectors = 10;
	double sector_height = (max_pt.z - min_pt.z)/sectors;

	pcl::PassThrough<pcl::PointXYZI> pass;
	std::vector<std::vector<double> > slice_dist(sectors,std::vector<double> (2));
	
	for(double sec_h = min_pt.z,i = 0; sec_h < max_pt.z - sector_height; sec_h += sector_height, i++){
		pcl::PointCloud<pcl::PointXYZI>::Ptr cloud_sliced(new pcl::PointCloud<pcl::PointXYZI>());
		pass.setInputCloud(transform_cloud_rotate);
		pass.setFilterFieldName("z");
		pass.setFilterLimits(sec_h, sec_h + sector_height);
		pass.filter(*cloud_sliced);

		pcl::getMinMax3D(*cloud_sliced,sliced_min_pt,sliced_max_pt);

		slice_dist[i][0] = sliced_max_pt.x - sliced_min_pt.x;
		slice_dist[i][1] = sliced_max_pt.y - sliced_min_pt.y;
	}

	std::cout << "slice distribution:" << std::endl;
	for(int i=0;i<2;i++){
		for(int j=0;j<10;j++){
			description.push_back(slice_dist[j][i]);
			std::cout << slice_dist[j][i] << " ";
		}
		std::cout << std::endl;
	}
	std::cout << std::endl;

	if(is_save){
		std::ofstream ofs;
		ofs.open("description.txt", std::ios::ate | std::ios::app);
		ofs << "1 ";
		for(int i=0;i<description.size();i++){
			ofs << i+1 << ":" << description[i] << " ";
		}
		ofs << std::endl;
		ofs.close();
	}

	if(is_predict){
		FILE *fp_scale, *fp_model;
		int idx,max_index;
		std::vector<double> scale_min;
		std::vector<double> scale_max;
		double scale_min_tmp;
		double scale_max_tmp;
		double lower,upper;
		double predict;
		struct svm_model* model = svm_load_model(
			model_filename.c_str());
		struct svm_node *nodes;
		nodes = (struct svm_node *)malloc((description.size()+1)*sizeof(struct svm_node));
		fp_scale = fopen(scale_filename.c_str(),"r");
		if(fp_scale == NULL || model == NULL){
			ROS_ERROR_STREAM("Can't find model or scale data");
			return;
		}
		if(fgetc(fp_scale) != 'x'){
			return;
		}
		fscanf(fp_scale,"%lf %lf",&lower,&upper);
		
		// ROS_INFO_STREAM("lower: " << lower << "upper: "<< upper);
		while(fscanf(fp_scale,"%d %lf %lf\n",&idx,&scale_min_tmp,&scale_max_tmp) == 3){
			scale_min.push_back(scale_min_tmp);
			scale_max.push_back(scale_max_tmp);
		}
		std::cout << "scaled description :";
		int index = 0;
		for(int i=0;i<description.size();i++){
			if(i<24 || i>28){
				if(description[i] == DBL_INF || description[i] == DBL_MINF){
					description[i] = 0;
				}
				if(description[i] < scale_min[index]){
					nodes[index].index = i+1;
					nodes[index].value = lower;
				}
				else if(description[i] > scale_max[index]){
					nodes[index].index = i+1;
					nodes[index].value = upper;
				}
				else{
					nodes[index].index = i+1;
					nodes[index].value = lower + (upper-lower) * 
							(description[i]-scale_min[index])/
							(scale_max[index]-scale_min[index]);
				}
				std::cout << nodes[index].index << ":" << nodes[index].value << ",";
				index++;
			}
		}
		nodes[index].index = -1;
		std::cout << std::endl;
		predict = svm_predict(model,nodes);
		ROS_INFO_STREAM("Predict" << predict);

		if(predict == 1){
			Vector4f center_of_mass_base;
			pcl::compute3DCentroid(*conv_input, center_of_mass_base);
			output_point->point.x = center_of_mass_base[0];
			output_point->point.y = center_of_mass_base[1];
			if(std::abs(center_of_mass_base[1]) > 2.0){
				if(center_of_mass_base[1] < 0){
					output_point->point.y = center_of_mass_base[1] + 1.0;
				}
				else{
					output_point->point.y = center_of_mass_base[1] - 1.0;
				}
			}
			else{
				output_point->point.x = center_of_mass_base[0] - 2.0;
			}
			output_point->point.z = 0.0;
			ROS_INFO_STREAM("target_center :" << center_of_mass_base[1] << "," << center_of_mass_base[0] << "," << center_of_mass_base[2]);
			output_point->header.frame_id = "base_link";
			output_point->header.stamp = input->header.stamp;
			try{
				listener->transformPoint("/map", *output_point, output_point_);
			}
			catch(tf::TransformException &e){
				ROS_WARN_STREAM("tf::TransformException: " << e.what());
			}
			pub_point.publish(output_point_);
		}
	}
}
Exemple #19
0
int main(int argc, char** argv)
{

  if (argc < 5)
  {
    PCL17_INFO ("Usage %s -input_file /in_file -output_file /out_file [options]\n", argv[0]);
    PCL17_INFO (" * where options are:\n"
        "         -tilt <X>  : tilt. Default : 30\n"
        "");
    return -1;
  }

  int tilt = 30;
  std::string input_file;
  std::string output_file;

  pcl17::console::parse_argument(argc, argv, "-input_file", input_file);
  pcl17::console::parse_argument(argc, argv, "-output_file", output_file);
  pcl17::console::parse_argument(argc, argv, "-tilt", tilt);

  pcl17::PointCloud<pcl17::PointXYZRGB> cloud, cloud_transformed, cloud_aligned, cloud_filtered;

  pcl17::io::loadPCDFile(input_file, cloud);

  Eigen::Affine3f view_transform;
  view_transform.matrix() << 0, 0, 1, 0, -1, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0, 1;

  Eigen::AngleAxis<float> rot(tilt * M_PI / 180, Eigen::Vector3f(0, 1, 0));

  view_transform.prerotate(rot);

  pcl17::transformPointCloud(cloud, cloud_transformed, view_transform);

  pcl17::ModelCoefficients::Ptr coefficients(new pcl17::ModelCoefficients);
  pcl17::PointIndices::Ptr inliers(new pcl17::PointIndices);

  pcl17::SACSegmentation<pcl17::PointXYZRGB> seg;

  seg.setOptimizeCoefficients(true);

  seg.setModelType(pcl17::SACMODEL_PLANE);
  seg.setMethodType(pcl17::SAC_RANSAC);
  seg.setDistanceThreshold(0.05);
  seg.setProbability(0.99);

  seg.setInputCloud(cloud_transformed.makeShared());
  seg.segment(*inliers, *coefficients);

  pcl17::ExtractIndices<pcl17::PointXYZRGB> extract;

  extract.setInputCloud(cloud_transformed.makeShared());
  extract.setIndices(inliers);
  extract.setNegative(true);
  extract.filter(cloud_transformed);

  std::cout << "Z vector: " << coefficients->values[0] << " " << coefficients->values[1] << " "
      << coefficients->values[2] << " " << coefficients->values[3] << std::endl;

  Eigen::Vector3f z_current(coefficients->values[0], coefficients->values[1], coefficients->values[2]);
  Eigen::Vector3f y(0, 1, 0);

  Eigen::Affine3f rotation;
  rotation = pcl17::getTransFromUnitVectorsZY(z_current, y);
  rotation.translate(Eigen::Vector3f(0, 0, coefficients->values[3]));

  pcl17::transformPointCloud(cloud_transformed, cloud_aligned, rotation);

  Eigen::Affine3f res = (rotation * view_transform);

  cloud_aligned.sensor_origin_ = res * Eigen::Vector4f(0, 0, 0, 1);
  cloud_aligned.sensor_orientation_ = res.rotate(Eigen::AngleAxisf(M_PI, Eigen::Vector3f(0, 0, 1))).rotation();

  seg.setInputCloud(cloud_aligned.makeShared());
  seg.segment(*inliers, *coefficients);

  std::cout << "Z vector: " << coefficients->values[0] << " " << coefficients->values[1] << " "
      << coefficients->values[2] << " " << coefficients->values[3] << std::endl;

  pcl17::io::savePCDFile(output_file, cloud_aligned);

  return 0;
}
Exemple #20
0
 /**
  * @brief Rotate the view matrix by a given quaternion.
  * @param rotation Rotation to apply to view matrix.
  */
 void rotate (const Eigen::Quaternion<float>& rotation)
 {
     view_matrix.rotate(rotation);
 }
void rawCloudHandler(const sensor_msgs::PointCloud2ConstPtr& laserCloud)
{
	//double timeScanCur = laserCloud->header.stamp.toSec();
	ros::Time timestamp = laserCloud->header.stamp;
	//bool returnBool;

	pcl::fromROSMsg(*laserCloud, *laserCloudIn);

	if (visualizationFlag)
	{
	viewer->removeAllPointClouds(0);
	viewer->removeAllShapes(0);
	}

	pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloudRGB(new pcl::PointCloud<pcl::PointXYZRGB>);
	pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloudLabeled(new pcl::PointCloud<pcl::PointXYZRGB>);
	pcl::PointCloud<pcl::PointXYZI>::Ptr cloudRaw(new pcl::PointCloud<pcl::PointXYZI>);
	//	std::vector<std::string> labelsC1;
	//	std::vector<std::string> labelsC2;
	//	pcl::PointCloud<SAFEFeatures>::Ptr featureCloudC1Acc(new pcl::PointCloud<SAFEFeatures>);
	//	pcl::PointCloud<SAFEFeatures>::Ptr featureCloudC2Acc(new pcl::PointCloud<SAFEFeatures>);
	std::vector<std::string> labelsC1;
	std::vector<std::string> labelsC2;
	std::vector<std::string> labelsC3;
	std::vector<std::string> labels;
	pcl::PointCloud<AllFeatures>::Ptr featureCloudAcc(new pcl::PointCloud<AllFeatures>);
	pcl::PointCloud<AllFeatures>::Ptr featureCloudC1Acc(new pcl::PointCloud<AllFeatures>);
	pcl::PointCloud<AllFeatures>::Ptr featureCloudC2Acc(new pcl::PointCloud<AllFeatures>);
	pcl::PointCloud<AllFeatures>::Ptr featureCloudC3Acc(new pcl::PointCloud<AllFeatures>);
	std::vector<Eigen::Matrix3f> confMats;
	pcl::PointCloud<pcl::PointXYZRGB>::Ptr classificationCloud(new pcl::PointCloud<pcl::PointXYZRGB>);

	std::vector<DatasetContainer> dataset;

	std::string folder = "/home/mikkel/catkin_ws/src/trainingSet/";

	pcl::copyPointCloud(*laserCloudIn, *cloudRGB);
	pcl::copyPointCloud(*laserCloudIn, *cloudRaw);

	// Single colored
	if (visualizationFlag)
		{
		pcl::visualization::PointCloudColorHandlerCustom<pcl::PointXYZRGB> color1(cloudRGB, 0, 0, 255);
	viewer->addPointCloud<pcl::PointXYZRGB>(cloudRGB,color1,"cloudRGB",viewP(RawCloud));
		}
	//viewer->addText ("RawCloud", 10, 10, "vPP text", viewP(RawCloud));

	// Ground modeling
	//	pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloudStat(new pcl::PointCloud<pcl::PointXYZRGB>);
	//	pcl::PointCloud<pcl::PointXYZ>::Ptr planeDistCloud(new pcl::PointCloud<pcl::PointXYZ>);
	//	pcl::copyPointCloud(*cloudRGB,*cloudStat);
	//	pcl::copyPointCloud(*cloudRGB,*planeDistCloud);


	// Remove everything outside the two lines
	pcl::PointCloud<pcl::PointXYZI>::Ptr cloud_filtered(new pcl::PointCloud<pcl::PointXYZI>);
	//pcl::copyPointCloud(*cloudRaw, *cloud_filtered);

	// Rotation
	float thetaZ = theta*PI/180; // The angle of rotation in radians
	Eigen::Affine3f transform_2 = Eigen::Affine3f::Identity();
	transform_2.rotate (Eigen::AngleAxisf (thetaZ, Eigen::Vector3f::UnitZ()));
	pcl::PointCloud<pcl::PointXYZ>::Ptr transformed_cloud (new pcl::PointCloud<pcl::PointXYZ> ());
	pcl::transformPointCloud (*cloudRaw, *cloud_filtered, transform_2);

	// Passthrough
	pcl::PassThrough<pcl::PointXYZI> passX;
	passX.setInputCloud (cloud_filtered);
	passX.setFilterFieldName ("x");
	passX.setFilterLimits (-0.5, 100.0);
	//pass.setFilterLimitsNegative (true);
	passX.filter (*cloud_filtered);

	pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud_filteredRGB(new pcl::PointCloud<pcl::PointXYZRGB>);
	if (receivedHough)
	{
		pcl::PassThrough<pcl::PointXYZI> pass;
		pass.setInputCloud (cloud_filtered);
		pass.setFilterFieldName ("y");
		pass.setFilterLimits (r1+WALL_CLEARANCE, r2-WALL_CLEARANCE);
		//pass.setFilterLimitsNegative (true);
		pass.filter (*cloud_filtered);
	}

	pcl::copyPointCloud(*cloud_filtered, *cloud_filteredRGB);
	if (visualizationFlag)
		{
	pcl::visualization::PointCloudColorHandlerCustom<pcl::PointXYZRGB> color9(cloudRGB, 0, 0, 255);
	viewer->addPointCloud<pcl::PointXYZRGB>(cloud_filteredRGB,color9,"LinesFiltered",viewP(LinesFiltered));
	viewer->setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_POINT_SIZE, 2, "LinesFiltered");
	viewer->addText ("LinesFiltered", 10, 10, fontsize, 1, 1, 1, "LinesFiltered text", viewP(LinesFiltered));
		}


	// Grid Minimum
//	pcl::PointCloud<pcl::PointXYZI>::Ptr gridCloud(new pcl::PointCloud<pcl::PointXYZI>);
//	pcl::GridMinimum<pcl::PointXYZI> gridm(1.0); // Set grid resolution
//	gridm.setInputCloud(cloud_filtered);
//	gridm.filter(*gridCloud);

	//*** Transform point cloud to adjust for a Ground Plane ***//
	pcl::ModelCoefficients ground_coefficients;
	pcl::PointIndices ground_indices;
	pcl::SACSegmentation<pcl::PointXYZI> ground_finder;
	ground_finder.setOptimizeCoefficients(true);
	ground_finder.setModelType(pcl::SACMODEL_PLANE);
	ground_finder.setMethodType(pcl::SAC_RANSAC);
	ground_finder.setDistanceThreshold(0.15);
	ground_finder.setInputCloud(cloud_filtered);
	ground_finder.segment(ground_indices, ground_coefficients);

	// Convert plane normal vector from type ModelCoefficients to Vector4f
	Eigen::Vector3f np = Eigen::Vector3f(ground_coefficients.values[0],ground_coefficients.values[1],ground_coefficients.values[2]);
	// Find rotation vector, u, and angle, theta
	Eigen::Vector3f u = np.cross(Eigen::Vector3f(0,0,1));
	u.normalize();
	float theta = acos(np.dot(Eigen::Vector3f(0,0,1)));
	// Construct transformation matrix (rotation matrix from axis and angle)
	Eigen::Affine3f tf = Eigen::Affine3f::Identity();
	float d = ground_coefficients.values[3];
	tf.translation() << d*np[0], d*np[1], d*np[2];
	tf.rotate (Eigen::AngleAxisf (theta, u));
	// Execute the transformation
	pcl::PointCloud<pcl::PointXYZI>::Ptr transformedCloud (new pcl::PointCloud<pcl::PointXYZI> ());
	pcl::transformPointCloud(*cloud_filtered, *transformedCloud, tf);


	// Compute statistical moments for least significant direction in neighborhood
#if (OLD_METHOD)
	pcl::NeighborhoodFeatures<pcl::PointXYZI, AllFeatures> features;
	pcl::PointCloud<AllFeatures>::Ptr featureCloud(new pcl::PointCloud<AllFeatures>);
	features.setInputCloud(transformedCloud);
	pcl::search::KdTree<pcl::PointXYZI>::Ptr search_tree5( new pcl::search::KdTree<pcl::PointXYZI>());
	features.setSearchMethod(search_tree5);
	//principalComponentsAnalysis.setRadiusSearch(0.6);
	//features.setKSearch(30);
	features.setRadiusSearch(0.01); // This value does not do anything! Look inside "NeighborgoodFeatures.h" to see adaptive radius calculation.
	features.compute(*featureCloud);

	//	ros::Time tFeatureCalculation2 = ros::Time::now();
	//	ros::Duration tFeatureCalculation = tFeatureCalculation2-tPreprocessing2;
	//	if (executionTimes==true)
	//		ROS_INFO("Feature calculation time = %i",tFeatureCalculation.nsec);



	visualizeFeature(*cloudRGB, *featureCloud, 0, viewP(GPDistMean), "GPDistMean");
	visualizeFeature(*cloudRGB, *featureCloud, 1, viewP(GPDistMin), "GPDistMin");
	visualizeFeature(*cloudRGB, *featureCloud, 2, viewP(GPDistPoint), "GPDistPoint");
	visualizeFeature(*cloudRGB, *featureCloud, 3, viewP(GPDistVar), "GPDistVar");
	visualizeFeature(*cloudRGB, *featureCloud, 4, viewP(PCA1), "PCA1"); // 4 = PCA1
	visualizeFeature(*cloudRGB, *featureCloud, 5, viewP(PCA2), "PCA2"); // 3 = GPVar
	visualizeFeature(*cloudRGB, *featureCloud, 6, viewP(PCA3), "PCA3"); // 0 = GPMean
	visualizeFeature(*cloudRGB, *featureCloud, 7, viewP(PCANX), "PCANX");
	visualizeFeature(*cloudRGB, *featureCloud, 8, viewP(PCANY), "PCANY");
	visualizeFeature(*cloudRGB, *featureCloud, 9, viewP(PCANZ), "PCANZ"); // 9 = PCANZ
	visualizeFeature(*cloudRGB, *featureCloud, 10, viewP(PointDist), "PointDist");
	visualizeFeature(*cloudRGB, *featureCloud, 11, viewP(RSS), "RSS");
	visualizeFeature(*cloudRGB, *featureCloud, 12, viewP(Reflectance), "Reflectance");

	ROS_INFO("Computed all neighborhood features");

	std::vector<float>* centroid = new std::vector<float>();
	std::vector<float>* stds = new std::vector<float>();

	//*** Testing ***//
	if (training == false)
	{
		Eigen::Matrix3f confMat = Eigen::Matrix3f::Zero();
		if (testdata==true)
		{
			*featureCloud = *featureCloudAcc;
		}

		pcl::copyPointCloud(*cloudRGB,*classificationCloud);

		// Load feature normalization parameters
		std::ifstream input_file((folder + "centroid").c_str());
		std::istream_iterator<float> start(input_file), end;
		std::vector<float> numbers(start, end);
		std::copy(numbers.begin(), numbers.end(), std::back_inserter(*centroid));
		std::ifstream input_file2((folder + "stds").c_str());
		std::istream_iterator<float> start2(input_file2), end2;
		std::vector<float> numbers2(start2, end2);
		std::copy(numbers2.begin(), numbers2.end(), std::back_inserter(*stds));


		// Normalize features
		//		if (pcl::io::savePCDFile ("testFeaturesNonNormalized.pcd", *featureCloud) != 0)
		//			return (false);

		ros::Time tNormalization1 = ros::Time::now();
		normalizeFeatures(*featureCloud,*featureCloud, &centroid, &stds);
		ros::Time tNormalization2 = ros::Time::now();
		ros::Duration tNormalization = tNormalization2-tNormalization1;
		if (executionTimes==true)
			ROS_INFO("Normalization time = %f",(float)(tNormalization.nsec)/1000000);

		pcl::PointIndices::Ptr unclassifiedIndices (new pcl::PointIndices ());
		//		if (pcl::io::savePCDFile ("testFeatures.pcd", *featureCloud) != 0)
		//			return (false);

		CvSVM SVM;
		//int dim = sizeof(featureCloud->points[0])/sizeof(float);
		int dim = useFeatures.size();//sizeof(useFeatures)/sizeof(int);

		SVM.load((folder + "svm").c_str());
		//SVM.load((folder + "svm2014-11-06-11-22-59").c_str());
		//SVM.load((folder + "svm2014-11-07-14-00-31").c_str());
		//SVM.load((folder + "svm2014-11-07-13-16-28").c_str());

		ros::Time tClassification1 = ros::Time::now();
		//int nMissingLabels = 0;
		for (size_t i=0;i<classificationCloud->points.size();i++)
		{
			float dataSVM[dim];
			for (int d=0;d<dim;d++)
			{
				//dataSVM[d] = featureCloud->points[i].data[d];
				dataSVM[d] = featureCloud->points[i].data[useFeatures[d]];
			}
			Mat labelsMat(1, dim, CV_32FC1, dataSVM);

			float response = SVM.predict(labelsMat);

			if (response == 1.0)
				classificationCloud->points[i].b = 255;
			else if (response == 2.0)
				classificationCloud->points[i].g = 255;
			else if (response == 3.0)
				classificationCloud->points[i].r = 255;
			else
				ROS_INFO("Another class label = %f",response);

			if (testdata==true)
			{
				int groundTruth;
				if (labels[i].compare("ground") == 0)
					groundTruth = 1;
				else if (labels[i].compare("vegetation") == 0)
					groundTruth = 2;
				else if (labels[i].compare("object") == 0)
					groundTruth = 3;
				confMat(groundTruth-1,(int)response-1)++;
			}
		}
		ros::Time tClassification2 = ros::Time::now();
		ros::Duration tClassification = tClassification2-tClassification1;
		if (executionTimes==true)
			ROS_INFO("Classification time = %f",(float)(tClassification.nsec)/100000);

		//ROS_INFO("Number of missing class labels = %i", nMissingLabels);

		pcl::visualization::PointCloudColorHandlerRGBField<pcl::PointXYZRGB> colorC(classificationCloud);
		viewer->addPointCloud<pcl::PointXYZRGB>(classificationCloud,colorC,"Classification",viewP(Classification));
		viewer->setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_POINT_SIZE, 2, "Classification");
		viewer->addText ("Classification", 10, 10, fontsize, 1, 1, 1, "Classification text", viewP(Classification));

		// Test set - calculate performance statistics
		if (testdata == true)
		{
			std::cout << confMat << std::endl;
			confMats.push_back(confMat);

			if (true)//k==files.size()-1)
			{
				ofstream myfile;
				string resultsFolder = "/home/mikkel/catkin_ws/src/Results/";
				myfile.open ((resultsFolder + "run_number_here.txt").c_str());

				// Distance threshold
				myfile << "DistanceThreshold:" << distThr << ";\n";

				// Neighborhood parameters
				myfile << "Neighborhood:" << rmin << ";" << rmindist << ";" << rmax << ";" << rmaxdist << ";\n";

				// Features
				myfile << "Features:";
				for (int d=0;d<dim;d++)
					myfile << useFeatures[d] << ";";
				myfile << "\n";

				myfile << "ConfusionMatrix:";
				for (size_t i=0;i<confMats.size();i++)
				{
					for (int r=0;r<confMats[i].rows();r++)
						for (int c=0;c<confMats[i].cols();c++)
							myfile << confMats[i](r,c) << ";";
					myfile << "\n";
				}

				myfile << "Accuracy:" << confMat.diagonal().sum()/confMat.sum() << ";\n";


				myfile.close();
			}

		}
		featureCloudAcc->clear();
		labels.clear();

#else

		for (size_t i=0;i<transformedCloud->size();i++)
		{
			pcl::PointXYZI pTmp2 = transformedCloud->points[i];
			pcl::PointXYZRGB pTmp;
			pTmp.x = pTmp2.x;
			pTmp.y = pTmp2.y;
			pTmp.z = pTmp2.z;
			pTmp.r = 255;
			pTmp.g = 255;
			if (pTmp.z > 0.1) // Object
			{
				classificationCloud->push_back(pTmp);
			}
		}

		if (visualizationFlag)
		{
			pcl::visualization::PointCloudColorHandlerRGBField<pcl::PointXYZRGB> colorC(classificationCloud);
			viewer->addPointCloud<pcl::PointXYZRGB>(classificationCloud,colorC,"Classification",viewP(Classification));
			viewer->setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_POINT_SIZE, 2, "Classification");
			viewer->addText ("Classification", 10, 10, fontsize, 1, 1, 1, "Classification text", viewP(Classification));
		}


#endif





		// REGION GROWING
		//ROS_INFO("region growing 5");
		pcl::PointCloud <pcl::PointXYZRGB>::Ptr objectCloud(new pcl::PointCloud<pcl::PointXYZRGB>);
		//pcl::PointCloud <pcl::PointXYZRGB>::Ptr cloud (new pcl::PointCloud <pcl::PointXYZRGB>);

		for (size_t i=0;i<classificationCloud->size();i++)
		{
			pcl::PointXYZRGB pTmp = classificationCloud->points[i];
			if ((pTmp.r == 255) || (pTmp.g == 255)) // Object
			{
				objectCloud->push_back(pTmp);
			}
		}

		pcl::RegionGrowing<pcl::PointXYZRGB, pcl::Normal> reg;

		reg.setInputCloud (objectCloud);
		//reg.setIndices (indices);
		pcl::search::Search<pcl::PointXYZRGB>::Ptr tree = boost::shared_ptr<pcl::search::Search<pcl::PointXYZRGB> > (new pcl::search::KdTree<pcl::PointXYZRGB>);
		reg.setSearchMethod (tree);
		//reg.setDistanceThreshold (0.0001f);
		//reg.setResidualThreshold(0.01f);
		//ROS_INFO("res = %f",reg.getSmoothnessThreshold());
		//reg.setPointColorThreshold (6);
		//reg.setRegionColorThreshold (5);
		reg.setMinClusterSize (1);
		reg.setResidualThreshold(min_cluster_distance);
		reg.setCurvatureTestFlag(false);
		//		reg.setResidualTestFlag(true);
		//		reg.setNormalTestFlag(false);
		//		reg.setSmoothModeFlag(false);
		std::vector <pcl::PointIndices> clusters;
		reg.extract (clusters);

		//ROS_INFO("clusters = %d", clusters.size());

		//std::cout << "testefter" << std::endl;

		pcl::PointCloud <pcl::PointXYZRGB>::Ptr colored_cloud = reg.getColoredCloud ();

		pcl::PointCloud <pcl::PointXYZRGB>::Ptr clustersFilteredCloud(new pcl::PointCloud<pcl::PointXYZRGB>);
		std::vector <pcl::PointIndices> clustersFiltered;
		//pcl::PointXYZRGB min_pt, max_pt;
		Eigen::Vector4f min_pt, max_pt;
		obstacle_detection::boundingbox bbox;
		obstacle_detection::boundingboxes bboxes;
		bboxes.header.stamp = timestamp;
		bboxes.header.frame_id = "/velodyne";

		if (visualizationFlag)
			viewer->removeAllShapes(viewP(SegmentsFiltered));


		for (size_t i=0;i<clusters.size();i++)
		{
			//ROS_INFO("cluster size = %d",clusters[i].indices.size());
			if (clusters[i].indices.size() > 100)
			{
				clustersFiltered.push_back(clusters[i]);

				for (size_t pp=0;pp<clusters[i].indices.size();pp++)
				{
					clustersFilteredCloud->push_back(colored_cloud->points[clusters[i].indices[pp]]);
				}

				// Calculate bounding box
				pcl::getMinMax3D(*colored_cloud, clusters[i], min_pt, max_pt);
				bbox.start.x = min_pt[0];
				bbox.start.y = min_pt[1];
				bbox.start.z = min_pt[2];
				bbox.width.x = max_pt[0]-min_pt[0];
				bbox.width.y = max_pt[1]-min_pt[1];
				bbox.width.z = max_pt[2]-min_pt[2];
				bbox.header.stamp = timestamp;
				bbox.header.frame_id = "/velodyne";

				bboxes.boundingboxes.push_back(bbox);

				if (visualizationFlag)
					viewer->addCube (min_pt[0], max_pt[0], min_pt[1], max_pt[1], min_pt[2], max_pt[2], 1.0f, 1.0f, 1.0f, (boost::format("%04d") % i).str().c_str(), viewP(SegmentsFiltered));
			}


		}

		pubBBoxesPointer->publish(bboxes);
		//
		//		//pcl::visualization::CloudViewer viewer ("Cluster viewer");
		//		//viewer.showCloud (colored_cloud);
		//
		if (visualizationFlag)
		{
		pcl::visualization::PointCloudColorHandlerRGBField<pcl::PointXYZRGB> colorC2(colored_cloud);
		viewer->addPointCloud<pcl::PointXYZRGB>(colored_cloud,colorC2,"Segments",viewP(Segments));
		viewer->setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_POINT_SIZE, 2, "Segments");
		viewer->addText ("Segments", 10, 10, fontsize, 1, 1, 1, "Segments text", viewP(Segments));


		pcl::visualization::PointCloudColorHandlerRGBField<pcl::PointXYZRGB> colorC3(clustersFilteredCloud);
		viewer->addPointCloud<pcl::PointXYZRGB>(clustersFilteredCloud,colorC3,"SegmentsFiltered",viewP(SegmentsFiltered));
		viewer->setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_POINT_SIZE, 2, "SegmentsFiltered");
		viewer->addText ("Segments", 10, 10, fontsize, 1, 1, 1, "SegmentsFiltered text", viewP(SegmentsFiltered));
		}
		//
		//
		//		// BOUNDING BOXES



		//		viewer->addCube (min_pt.x, max_pt.x, min_pt.y, max_pt.y, min_pt.z, max_pt.z);





		//		std_msgs::Float64 testF;
		//		//testF.data = 10;
		//		testF.data = 10;
		//



		// Compute principal directions
		//		Eigen::Vector4f pcaCentroid;
		//		pcl::compute3DCentroid(*clustersFilteredCloud, pcaCentroid);
		//		Eigen::Matrix3f covariance;
		//		computeCovarianceMatrixNormalized(*clustersFilteredCloud, pcaCentroid, covariance);
		//		Eigen::SelfAdjointEigenSolver<Eigen::Matrix3f> eigen_solver(covariance, Eigen::ComputeEigenvectors);
		//		Eigen::Matrix3f eigenVectorsPCA = eigen_solver.eigenvectors();
		//		eigenVectorsPCA.col(2) = eigenVectorsPCA.col(0).cross(eigenVectorsPCA.col(1));
		//
		//		// Transform the original cloud to the origin where the principal components correspond to the axes.
		//		Eigen::Matrix4f projectionTransform(Eigen::Matrix4f::Identity());
		//		projectionTransform.block<3,3>(0,0) = eigenVectorsPCA.transpose();
		//		projectionTransform.block<3,1>(0,3) = -1.f * (projectionTransform.block<3,3>(0,0) * pcaCentroid.head<3>());
		//		pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloudPointsProjected (new pcl::PointCloud<pcl::PointXYZRGB>);
		//		pcl::transformPointCloud(*clustersFilteredCloud, *cloudPointsProjected, projectionTransform);
		//		// Get the minimum and maximum points of the transformed cloud.
		//		pcl::PointXYZRGB minPoint, maxPoint;
		//		pcl::getMinMax3D(*cloudPointsProjected, minPoint, maxPoint);
		//		const Eigen::Vector3f meanDiagonal = 0.5f*(maxPoint.getVector3fMap() + minPoint.getVector3fMap());
		//
		//		// Final transform
		//		const Eigen::Quaternionf bboxQuaternion(eigenVectorsPCA); //Quaternions are a way to do rotations https://www.youtube.com/watch?v=mHVwd8gYLnI
		//		const Eigen::Vector3f bboxTransform = eigenVectorsPCA * meanDiagonal + pcaCentroid.head<3>();
		//		viewer->addCube(bboxTransform, bboxQuaternion, maxPoint.x - minPoint.x, maxPoint.y - minPoint.y, maxPoint.z - minPoint.z, "bbox");


























		//pcl::visualization::PointCloudColorHandlerCustom<pcl::PointXYZRGB> colorTTF1(featureCloudTest, 0, 255, 0);
		//	pcl::visualization::PointCloudColorHandlerRGBField<pcl::PointXYZRGB> colorTTF1(featureCloudTest);
		//	PCL_INFO("featureCloudTest size = %i",featureCloudTest->points.size());
		//	pcl::visualization::PointCloudColorHandlerCustom<pcl::PointXYZRGB> colorTTF2(featureCloudTrain, 255, 0, 0);
		//	viewer->addPointCloud<pcl::PointXYZRGB>(featureCloudTest,colorTTF1,"TestTrainFeatures1",viewP(TestTrainFeatures));
		//	viewer->addPointCloud<pcl::PointXYZRGB>(featureCloudTrain,colorTTF2,"TestTrainFeatures2",viewP(TestTrainFeatures));
		//	viewer->addText ("TestTrainFeatures", 10, 10, "TestTrainFeatures text", viewP(TestTrainFeatures));

		//		sensor_msgs::PointCloud2 processedCloud;
		//		pcl::toROSMsg(*classificationCloud, processedCloud);
		//		processedCloud.header.stamp = ros::Time::now();//processedCloud->header.stamp;
		//		processedCloud.header.frame_id = "/velodyne";
		//		pubProcessedPointCloudPointer->publish(processedCloud);
		//
		//		pcl::PointCloud<pcl::PointXYZRGB>::Ptr groundCloud2D(new pcl::PointCloud<pcl::PointXYZRGB>);
		//		pcl::PointCloud<pcl::PointXYZRGB>::Ptr objectCloud2D(new pcl::PointCloud<pcl::PointXYZRGB>);
		//
		//		for (size_t i=0;i<classificationCloud->size();i++)
		//		{
		//			pcl::PointXYZRGB pTmp = classificationCloud->points[i];
		//			pTmp.z = 0; // 3D -> 2D
		//			if ((pTmp.r == 255) || (pTmp.g == 255)) // Object
		//			{
		//				objectCloud2D->push_back(pTmp);
		//			}
		//			else if (pTmp.b == 255) // Object
		//			{
		//				groundCloud2D->push_back(pTmp);
		//			}
		//		}
		//
		//		//std::vector<int> indexVector;
		//		unsigned int leafNodeCounter = 0;
		//		unsigned int voxelDensity = 0;
		//		unsigned int totalPoints = 0;
		//		//int occupancyW = OCCUPANCY_WIDTH/OCCUPANCY_GRID_RESOLUTION;
		//		//int occupancyH = OCCUPANCY_DEPTH/OCCUPANCY_GRID_RESOLUTION;
		//		std::vector<unsigned int> ocGroundGrid(OCCUPANCY_WIDTH*OCCUPANCY_DEPTH, 0);
		//		std::vector<unsigned int> ocObjectGrid(OCCUPANCY_WIDTH*OCCUPANCY_DEPTH, 0);
		//		std::vector<signed char> ocGridFinal(OCCUPANCY_WIDTH*OCCUPANCY_DEPTH,-1);
		//
		//		// Ground grid
		//		pcl::octree::OctreePointCloudDensity< pcl::PointXYZRGB > octreeGround (OCCUPANCY_GRID_RESOLUTION);
		//		octreeGround.setInputCloud (groundCloud2D);
		//		pcl::PointXYZ bot(-OCCUPANCY_DEPTH_M/2,-OCCUPANCY_WIDTH_M/2,-0.5);
		//		pcl::PointXYZ top(OCCUPANCY_DEPTH_M/2,OCCUPANCY_WIDTH_M/2,0.5);
		//		octreeGround.defineBoundingBox(bot.x, bot.y, bot.z, top.x, top.y, top.z);    // I have already calculated the BBox of my point cloud
		//		octreeGround.addPointsFromInputCloud ();
		//		pcl::octree::OctreePointCloudDensity<pcl::PointXYZRGB>::LeafNodeIterator it1;
		//		pcl::octree::OctreePointCloudDensity<pcl::PointXYZRGB>::LeafNodeIterator it1_end = octreeGround.leaf_end();
		//		int minx = 1000;
		//		int miny = 1000;
		//		int maxx = -1000;
		//		int maxy = -1000;
		//		for (it1 = octreeGround.leaf_begin(); it1 != it1_end; ++it1)
		//		{
		//			pcl::octree::OctreePointCloudDensityContainer& container = it1.getLeafContainer();
		//			voxelDensity = container.getPointCounter();
		//			Eigen::Vector3f min_pt, max_pt;
		//			octreeGround.getVoxelBounds (it1, min_pt, max_pt);
		//			//ROS_INFO("x = %f, y = %f, z = %f, x2 = %f, y2 = %f, z2 = %f",min_pt[0],min_pt[1],min_pt[2],max_pt[0],max_pt[1],max_pt[2]);
		//			totalPoints += voxelDensity;
		//			if (min_pt[0] < minx)
		//				minx = min_pt[0];
		//			if (min_pt[0] > maxx)
		//				maxx = min_pt[0];
		//			if (min_pt[1] < miny)
		//				miny = min_pt[1];
		//			if (min_pt[1] > maxy)
		//				maxy = min_pt[1];
		//
		//			int r = OCCUPANCY_DEPTH-(min_pt[0]+OCCUPANCY_DEPTH_M/2)/OCCUPANCY_GRID_RESOLUTION;
		//			int c = OCCUPANCY_WIDTH-(min_pt[1]+OCCUPANCY_WIDTH_M/2)/OCCUPANCY_GRID_RESOLUTION;
		//
		//			//			if ((r == 51) || (c==51))
		//			//				ROS_INFO("r = %i,c = %i",r,c);
		//
		//			//if (((int)min_pt[0] == -1) || ((int)min_pt[1]==-1))
		//			//ROS_INFO("min_pt[0] = %f, min_pt[1]=%f",min_pt[0],min_pt[1]);
		//			if (!((r < 0) || (c < 0) || (r > OCCUPANCY_DEPTH-1) || (c > OCCUPANCY_WIDTH-1)))
		//				ocGroundGrid[r+OCCUPANCY_DEPTH*c] = voxelDensity;
		//			//ROS_INFO("density = %i, r = %i, c=%i",voxelDensity, r, c);
		//			//ROS_INFO("x (%f) -> r (%i), y (%f) -> c (%i)",min_pt[0],r,min_pt[1],c);
		//			leafNodeCounter++;
		//		}
		//
		//
		//		// Object grid
		//		pcl::octree::OctreePointCloudDensity< pcl::PointXYZRGB > octreeObject (OCCUPANCY_GRID_RESOLUTION);
		//		octreeObject.setInputCloud (objectCloud2D);
		//		octreeObject.defineBoundingBox(bot.x, bot.y, bot.z, top.x, top.y, top.z);    // I have already calculated the BBox of my point cloud
		//		octreeObject.addPointsFromInputCloud ();
		//		pcl::octree::OctreePointCloudDensity<pcl::PointXYZRGB>::LeafNodeIterator it2;
		//		pcl::octree::OctreePointCloudDensity<pcl::PointXYZRGB>::LeafNodeIterator it2_end = octreeObject.leaf_end();
		//		minx = 1000;
		//		miny = 1000;
		//		maxx = -1000;
		//		maxy = -1000;
		//		leafNodeCounter = 0;
		//		voxelDensity = 0;
		//		totalPoints = 0;
		//		int t1 = 0;
		//		int t2 = 0;
		//		int t3 = 0;
		//		for (it2 = octreeObject.leaf_begin(); it2 != it2_end; ++it2)
		//		{
		//			pcl::octree::OctreePointCloudDensityContainer& container = it2.getLeafContainer();
		//			voxelDensity = container.getPointCounter();
		//			Eigen::Vector3f min_pt, max_pt;
		//			octreeObject.getVoxelBounds (it2, min_pt, max_pt);
		//			//ROS_INFO("x = %f, y = %f, z = %f, x2 = %f, y2 = %f, z2 = %f",min_pt[0],min_pt[1],min_pt[2],max_pt[0],max_pt[1],max_pt[2]);
		//			totalPoints += voxelDensity;
		//			if (min_pt[0] < minx)
		//				minx = min_pt[0];
		//			if (min_pt[0] > maxx)
		//				maxx = min_pt[0];
		//			if (min_pt[1] < miny)
		//				miny = min_pt[1];
		//			if (min_pt[1] > maxy)
		//				maxy = min_pt[1];
		//
		//			int r = OCCUPANCY_DEPTH-(min_pt[0]+OCCUPANCY_DEPTH_M/2)/OCCUPANCY_GRID_RESOLUTION;
		//			int c = OCCUPANCY_WIDTH-(min_pt[1]+OCCUPANCY_WIDTH_M/2)/OCCUPANCY_GRID_RESOLUTION;
		//			if (!((r < 0) || (c < 0) || (r > OCCUPANCY_DEPTH-1) || (c > OCCUPANCY_WIDTH-1)))
		//				ocObjectGrid[r+OCCUPANCY_DEPTH*c] = voxelDensity;
		//			//ROS_INFO("object points = %i -> %i: x (%f) -> r (%i), y (%f) -> c (%i)",voxelDensity,ocGridFinal[r+OCCUPANCY_DEPTH*c],min_pt[0],r,min_pt[1],c);
		//			//ocGrid[r+OCCUPANCY_DEPTH*c] = voxelDensity;
		//			//ROS_INFO("density = %i, r = %i, c=%i",voxelDensity, r, c);
		//
		//			leafNodeCounter++;
		//		}
		//
		//		for (int r=0;r<OCCUPANCY_DEPTH;r++)
		//		{
		//			for (int c=0;c<OCCUPANCY_WIDTH;c++)
		//			{
		//				if ((ocGroundGrid[r+OCCUPANCY_DEPTH*c] == 0) && (ocObjectGrid[r+OCCUPANCY_DEPTH*c] <= 1))
		//				{
		//					ocGridFinal[r+OCCUPANCY_DEPTH*c] = -1.0; // 0.5 default likelihood
		//					t1++;
		//				}
		//				else if ((ocObjectGrid[r+OCCUPANCY_DEPTH*c] == 0))// && (ocGroundGrid[r+OCCUPANCY_DEPTH*c] > 0))
		//				{
		//					ocGridFinal[r+OCCUPANCY_DEPTH*c] = OCCUPANCY_MIN_PROB;
		//					t2++;
		//				}
		//				else
		//				{
		//					ocGridFinal[r+OCCUPANCY_DEPTH*c] = 50+(OCCUPANCY_MAX_PROB-50)*ocObjectGrid[r+OCCUPANCY_DEPTH*c]/(ocObjectGrid[r+OCCUPANCY_DEPTH*c]+ocGroundGrid[r+OCCUPANCY_DEPTH*c]);
		//					t3++;
		//				}
		//			}
		//		}
		//
		//		nav_msgs::OccupancyGrid occupancyGrid;
		//		occupancyGrid.info.resolution = OCCUPANCY_GRID_RESOLUTION;
		//		occupancyGrid.header.stamp = timestamp;//ros::Time::now();
		//		occupancyGrid.header.frame_id = "/velodyne";
		//		occupancyGrid.info.width = OCCUPANCY_WIDTH;
		//		occupancyGrid.info.height = OCCUPANCY_DEPTH;
		//		//geometry_msgs::Pose lidarPose;
		//		geometry_msgs::Point lidarPoint;
		//		lidarPoint.x = 0;
		//		lidarPoint.y = 0;
		//		lidarPoint.z = 0;
		//		geometry_msgs::Quaternion lidarQuaternion;
		//		lidarQuaternion.w = 1;
		//		lidarQuaternion.x = 0;
		//		lidarQuaternion.y = 0;
		//		lidarQuaternion.z = 0;
		//		occupancyGrid.info.origin.position = lidarPoint;
		//		occupancyGrid.info.origin.orientation = lidarQuaternion;
		//		occupancyGrid.data = ocGridFinal;
		//
		//		//ROS_INFO("total points = %i, total leaves = %i",totalPoints,leafNodeCounter);
		//		//ROS_INFO("minx = %i, maxx = %i, miny = %i, maxy = %i",minx,maxx,miny,maxy);
		//		//ROS_INFO("t1 = %i, t2 = %i, t3 = %i",t1,t2,t3);
		//
		//
		//		pubOccupancyGridPointer->publish(occupancyGrid);


		//		int l = 0;
		//		while (*++itLeafs)
		//		{
		//		    //Iteratively explore only the leaf nodes..
		//		    std::vector<PointXYZ> points;
		//		    itLeafs->getData(points);
		//		    l++;
		//		}
		//
		//		ROS_INFO("l = %i",l);

		//pcl::octree::OctreePointCloudSearch<pcl::PointXYZ> octree (OCCUPANCY_GRID_RESOLUTION);

		//		sensor_msgs::PointCloud2::Ptr cloud_filtered (new sensor_msgs::PointCloud2 ());
		//		pcl::VoxelGrid<sensor_msgs::PointCloud2> sor;
		//		sor.setInputCloud (processedCloud);
		//		sor.setLeafSize (0.01f, 0.01f, 0.01f);
		//		sor.filter (*cloud_filtered);
		if (n==0)
		{
			//viewer->setCameraPosition(-20,0,10,1,0,2,1,0,2,v1);
			//viewer->setCameraPosition(-20,0,10,1,0,2,1,0,2,v2);
			//viewer->loadCameraParameters("pcl_video.cam");
		}
#if (OLD_METHOD)
	}
#endif

	if (visualizationFlag)
		viewer->spinOnce();

	// Save screenshot
//	std::stringstream tmp;
//	tmp << n;
//	viewer->saveScreenshot("/home/mikkel/images/" + (boost::format("%04d") % n).str() + ".png");
//	n++;
}
Exemple #22
0
	void cloud_cb(
			const typename pcl::PointCloud<pcl::PointXYZRGB>::ConstPtr& cloud) {

		iter++;
		if(iter != skip) return;
		iter = 0;

		pcl::PointCloud<pcl::PointXYZRGB> cloud_transformed,
				cloud_aligned, cloud_filtered;

		Eigen::Affine3f view_transform;
		view_transform.matrix() << 0, 0, 1, 0, -1, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0, 1;

		Eigen::AngleAxis<float> rot(tilt * M_PI / 180,
				Eigen::Vector3f(0, 1, 0));

		view_transform.prerotate(rot);

		pcl::transformPointCloud(*cloud, cloud_transformed, view_transform);

		pcl::ModelCoefficients::Ptr coefficients(
				new pcl::ModelCoefficients);
		pcl::PointIndices::Ptr inliers(new pcl::PointIndices);

		pcl::SACSegmentation<pcl::PointXYZRGB> seg;

		seg.setOptimizeCoefficients(true);

		seg.setModelType(pcl::SACMODEL_PLANE);
		seg.setMethodType(pcl::SAC_RANSAC);
		seg.setDistanceThreshold(0.05);
		seg.setProbability(0.99);

		seg.setInputCloud(cloud_transformed.makeShared());
		seg.segment(*inliers, *coefficients);

		pcl::ExtractIndices<pcl::PointXYZRGB> extract;

		extract.setInputCloud(cloud_transformed.makeShared());
		extract.setIndices(inliers);
		extract.setNegative(true);
		extract.filter(cloud_transformed);

		std::cout << "Z vector: " << coefficients->values[0] << " "
				<< coefficients->values[1] << " " << coefficients->values[2]
				<< " " << coefficients->values[3] << std::endl;

		Eigen::Vector3f z_current(coefficients->values[0],
				coefficients->values[1], coefficients->values[2]);
		Eigen::Vector3f y(0, 1, 0);

		Eigen::Affine3f rotation;
		rotation = pcl::getTransFromUnitVectorsZY(z_current, y);
		rotation.translate(Eigen::Vector3f(0, 0, coefficients->values[3]));

		pcl::transformPointCloud(cloud_transformed, cloud_aligned, rotation);

		Eigen::Affine3f res = (rotation * view_transform);

		cloud_aligned.sensor_origin_ = res * Eigen::Vector4f(0, 0, 0, 1);
		cloud_aligned.sensor_orientation_ = res.rotate(
				Eigen::AngleAxisf(M_PI, Eigen::Vector3f(0, 0, 1))).rotation();

		seg.setInputCloud(cloud_aligned.makeShared());
		seg.segment(*inliers, *coefficients);

		std::cout << "Z vector: " << coefficients->values[0] << " "
				<< coefficients->values[1] << " " << coefficients->values[2]
				<< " " << coefficients->values[3] << std::endl;

		pub.publish(cloud_aligned);



	}