示例#1
0
Quaterniond
ArrowVisualizer::orientation(const Entity* parent, double t) const
{
    // The subclass computes the direction
    Vector3d targetDirection = direction(parent, t);

    Quaterniond rotation = Quaterniond::Identity();

    // The arrow geometry points in the +x direction, so calculate the rotation
    // required to make the arrow point in target direction
    rotation.setFromTwoVectors(Vector3d::UnitX(), targetDirection);

    return rotation;
}
示例#2
0
ArrowReferenceMark::ArrowReferenceMark(const Body& _body) :
    body(_body),
    size(1.0),
    color(1.0f, 1.0f, 1.0f),
#ifdef USE_HDR
    opacity(0.0f)
#else
    opacity(1.0f)
#endif
{
}


void
ArrowReferenceMark::setSize(float _size)
{
    size = _size;
}


void
ArrowReferenceMark::setColor(Color _color)
{
    color = _color;
}


void
ArrowReferenceMark::render(Renderer* /* renderer */,
                           const Vector3f& /* position */,
                           float /* discSize */,
                           double tdb) const
{
    Vector3d v = getDirection(tdb);
    if (v.norm() < 1.0e-12)
    {
        // Skip rendering of zero-length vectors
        return;
    }

    v.normalize();
    Quaterniond q;
    q.setFromTwoVectors(Vector3d::UnitZ(), v);

    if (opacity == 1.0f)
    {
        // Enable depth buffering
        glEnable(GL_DEPTH_TEST);
        glDepthMask(GL_TRUE);
        glDisable(GL_BLEND);
    }
    else
    {
        glEnable(GL_DEPTH_TEST);
        glDepthMask(GL_FALSE);
        glEnable(GL_BLEND);
#ifdef USE_HDR
        glBlendFunc(GL_ONE_MINUS_SRC_ALPHA, GL_SRC_ALPHA);
#else
        glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
#endif
    }

    glPushMatrix();
    glRotate(q.cast<float>());
    glScalef(size, size, size);
    glDisable(GL_LIGHTING);
    glDisable(GL_TEXTURE_2D);

    float shaftLength = 0.85f;
    float headLength = 0.10f;
    float shaftRadius = 0.010f;
    float headRadius = 0.025f;
    unsigned int nSections = 30;
	
    glColor4f(color.red(), color.green(), color.blue(), opacity);
    RenderArrow(shaftLength, headLength, shaftRadius, headRadius, nSections);

    glPopMatrix();

    glDisable(GL_DEPTH_TEST);
    glDepthMask(GL_FALSE);
    glEnable(GL_TEXTURE_2D);
    glEnable(GL_BLEND);
    glBlendFunc(GL_SRC_ALPHA, GL_ONE);
}
示例#3
0
    void PointcloudProc::match(const Frame& frame0, const Frame& frame1, 
          const Eigen::Vector3d& trans, const Eigen::Quaterniond& rot, 
          std::vector<cv::DMatch>& matches) const
    {
      PointCloud<PointXYZRGBNormal> transformed_cloud;
      
      // First, transform the current frame. (Is this inverse?) (Or just transform the other cloud?)
      //transformPointCloudWithNormals<PointXYZRGBNormal>(frame1.cloud, transformed_cloud, -trans.cast<float>(), rot.cast<float>().conjugate());
      
      //transformPointCloudWithNormals<PointXYZRGBNormal>(frame0.pointcloud, transformed_cloud, -trans.cast<float>(), rot.cast<float>().conjugate());
      transformPointCloudWithNormals<PointXYZRGBNormal>(frame0.pointcloud, transformed_cloud, Vector3f(0,0,0), rot.cast<float>().conjugate());
      transformPointCloudWithNormals<PointXYZRGBNormal>(transformed_cloud, transformed_cloud, -trans.cast<float>(), Quaternionf(1, 0, 0, 0));
      //pcl::io::savePCDFileASCII ("cloud0.pcd", transformed_cloud);
      //pcl::io::savePCDFileASCII ("cloud1.pcd", frame1.pointcloud);
      
      // Optional/TODO: Perform ICP to further refine estimate.
      /*PointCloud<PointXYZRGBNormal> cloud_reg;

      IterativeClosestPointNonLinear<PointXYZRGBNormal, PointXYZRGBNormal> reg;
      reg.setInputCloud (boost::make_shared<const PointCloud<PointXYZRGBNormal> > (transformed_cloud));
      reg.setInputTarget (boost::make_shared<const PointCloud<PointXYZRGBNormal> > (frame1.pointcloud));
      reg.setMaximumIterations(50);
      reg.setTransformationEpsilon (1e-8);

      reg.align(cloud_reg); */
            
      // Find matches between pointclouds in frames. (TODO: also compare normals)
      std::vector<int> f0_indices, f1_indices;
      getMatchingIndices(transformed_cloud, frame1.pointcloud, f0_indices, f1_indices);
      
      // Fill in keypoints and projections of relevant features.
      // Currently just done when setting the pointcloud.
      
      // Convert matches into the correct format.
      matches.clear();
      // Starting at i=1 as a hack to not let through (0,0,0) matches (why is this in the ptcloud?))
      
      #pragma omp parallel for shared( transformed_cloud, f0_indices, f1_indices )
      for (unsigned int i=1; i < f0_indices.size(); i++)
      {
        const PointXYZRGBNormal &pt0 = transformed_cloud.points[f0_indices[i]];
        const PointXYZRGBNormal &pt1 = frame1.pointcloud.points[f1_indices[i]];
        
        // Figure out distance and angle between normals
        Quaterniond normalquat;
        Vector3d norm0(pt0.normal[0], pt0.normal[1], pt0.normal[2]), norm1(pt1.normal[0], pt1.normal[1], pt1.normal[2]);
        normalquat.setFromTwoVectors(norm0, norm1);
        //double angledist = normalquat.angularDistance(normalquat.Identity());
        double dist = (Vector3d(pt0.x, pt0.y, pt0.z)-Vector3d(pt1.x, pt1.y, pt1.z)).norm();
        
        /* Vector4d p0_pt = Vector4d(pt0.x, pt0.y, pt0.z, 1.0);
        Vector3d expected_proj = projectPoint(p0_pt, frame0.cam);
        
        Vector3d diff = expected_proj - frame1.pl_kpts[f1_indices[i]].head<3>();
        diff(2) = diff(2) - diff(0);
        
        printf("[Proj difference] %f %f %f\n", diff(0), diff(1), diff(2)); */
        
        if ((norm0 - norm1).norm() < 0.5 && dist < 0.2)
          #pragma omp critical
          matches.push_back(cv::DMatch(f0_indices[i], f1_indices[i], dist));
      }
      
      printf("[Frame] Found %d matches, then converted %d matches.\n", (int)f0_indices.size(), (int)matches.size());
    }
void setupSBA(SysSBA &sys)
{
    // Create camera parameters.
    frame_common::CamParams cam_params;
    cam_params.fx = 430; // Focal length in x
    cam_params.fy = 430; // Focal length in y
    cam_params.cx = 320; // X position of principal point
    cam_params.cy = 240; // Y position of principal point
    cam_params.tx = -30; // Baseline (no baseline since this is monocular)

    // Define dimensions of the image.
    int maxx = 640;
    int maxy = 480;

    // Create a plane containing a wall of points.
    Plane middleplane;
    middleplane.resize(3, 2, 10, 5);
    middleplane.translate(0.0, 0.0, 7.0);
    
    Plane leftplane;
    leftplane.resize(1, 2, 6, 12);
    //    leftplane.rotate(-PI/4.0, 0, 1, 0);
    leftplane.translate(0, 0, 7.0);
    
    Plane rightplane;
    rightplane.resize(1, 2, 6, 12);
    //    rightplane.rotate(PI/4.0, 0, 1, 0);
    rightplane.translate(2, 0, 7.0);
    
    Plane topplane;
    topplane.resize(1, 1.5, 6, 12);
    //    topplane.rotate(PI/4.0, 1, 0, 0);
    topplane.translate(2, 0, 7.0);

    // Vector containing the true point positions.
    rightplane.normal = rightplane.normal; 
  
    vector<Point, Eigen::aligned_allocator<Point> > points;
    vector<Eigen::Vector3d, Eigen::aligned_allocator<Eigen::Vector3d> > normals;
    
    points.insert(points.end(), middleplane.points.begin(), middleplane.points.end());
    normals.insert(normals.end(), middleplane.points.size(), middleplane.normal);
    
    points.insert(points.end(), leftplane.points.begin(), leftplane.points.end());
    normals.insert(normals.end(), leftplane.points.size(), leftplane.normal);
    
    points.insert(points.end(), rightplane.points.begin(), rightplane.points.end());
    normals.insert(normals.end(), rightplane.points.size(), rightplane.normal);
    
    points.insert(points.end(), topplane.points.begin(), topplane.points.end());
    normals.insert(normals.end(), topplane.points.size(), topplane.normal);
    
    // Create nodes and add them to the system.
    unsigned int nnodes = 2; // Number of nodes.
    double path_length = 0.5; // Length of the path the nodes traverse.

    // Set the random seed.
    unsigned short seed = (unsigned short)time(NULL);
    seed48(&seed);
    
    unsigned int i = 0, j = 0;
    
    for (i = 0; i < nnodes; i++)
    { 
      // Translate in the x direction over the node path.
      Vector4d trans(i/(nnodes-1.0)*path_length, 0, 0, 1);
            
#if 1
      if (i >= 0)
	{
	  // perturb a little
	  double tnoise = 0.5;	// meters
	  trans.x() += tnoise*(drand48()-0.5);
	  trans.y() += tnoise*(drand48()-0.5);
	  trans.z() += tnoise*(drand48()-0.5);
	}
#endif

      // Don't rotate.
      Quaterniond rot(1, 0, 0, 0);
#if 1
      if (i >= 0)
	{
	  // perturb a little
	  double qnoise = 0.1;	// meters
	  rot.x() += qnoise*(drand48()-0.5);
	  rot.y() += qnoise*(drand48()-0.5);
	  rot.z() += qnoise*(drand48()-0.5);
	}
#endif
      rot.normalize();
      
      // Add a new node to the system.
      sys.addNode(trans, rot, cam_params, false);
    }
        
    double pointnoise = 1.0;
    
    // Add points into the system, and add noise.
    for (i = 0; i < points.size(); i++)
    {
      // Add up to .5 points of noise.
      Vector4d temppoint = points[i];
      temppoint.x() += pointnoise*(drand48() - 0.5);
      temppoint.y() += pointnoise*(drand48() - 0.5);
      temppoint.z() += pointnoise*(drand48() - 0.5);
      sys.addPoint(temppoint);
    }
    
    Vector2d proj2d;
    Vector3d proj, pc, baseline;
    
    Vector3d imagenormal(0, 0, 1);
    
    Matrix3d covar0;
    covar0 << sqrt(imagenormal(0)), 0, 0, 0, sqrt(imagenormal(1)), 0, 0, 0, sqrt(imagenormal(2));
    Matrix3d covar;
    
    Quaterniond rotation;
    Matrix3d rotmat;
    
    unsigned int midindex = middleplane.points.size();
    unsigned int leftindex = midindex + leftplane.points.size();
    unsigned int rightindex = leftindex + rightplane.points.size();
    printf("Normal for Middle Plane: [%f %f %f], index %d -> %d\n", middleplane.normal.x(), middleplane.normal.y(), middleplane.normal.z(), 0, midindex-1);
    printf("Normal for Left Plane:   [%f %f %f], index %d -> %d\n", leftplane.normal.x(), leftplane.normal.y(), leftplane.normal.z(), midindex, leftindex-1);
    printf("Normal for Right Plane:  [%f %f %f], index %d -> %d\n", rightplane.normal.x(), rightplane.normal.y(), rightplane.normal.z(), leftindex, rightindex-1);
    
    // Project points into nodes.
    for (i = 0; i < points.size(); i++)
    {
      for (j = 0; j < sys.nodes.size(); j++)
      {
        // Project the point into the node's image coordinate system.
        sys.nodes[j].setProjection();
        sys.nodes[j].project2im(proj2d, points[i]);
        
        
        // Camera coords for right camera
        baseline << sys.nodes[j].baseline, 0, 0;
        pc = sys.nodes[j].Kcam * (sys.nodes[j].w2n*points[i] - baseline); 
        proj.head<2>() = proj2d;
        proj(2) = pc(0)/pc(2);
        
        // If valid (within the range of the image size), add the stereo 
        // projection to SBA.
        if (proj.x() > 0 && proj.x() < maxx && proj.y() > 0 && proj.y() < maxy)
        {
          sys.addStereoProj(j, i, proj);
          
          // Create the covariance matrix: 
          // image plane normal = [0 0 1]
          // wall normal = [0 0 -1]
          // covar = (R)T*[0 0 0;0 0 0;0 0 1]*R
          
          rotation.setFromTwoVectors(imagenormal, normals[i]);
          rotmat = rotation.toRotationMatrix();
          covar = rotmat.transpose()*covar0*rotmat;
          
	  //          if (!(i % sys.nodes.size() == j))
	  //            sys.setProjCovariance(j, i, covar);
        }
      }
    }
    
    // Add noise to node position.
    
    double transscale = 2.0;
    double rotscale = 0.2;
    
    // Don't actually add noise to the first node, since it's fixed.
    for (i = 1; i < sys.nodes.size(); i++)
    {
      Vector4d temptrans = sys.nodes[i].trans;
      Quaterniond tempqrot = sys.nodes[i].qrot;
      
      // Add error to both translation and rotation.
      temptrans.x() += transscale*(drand48() - 0.5);
      temptrans.y() += transscale*(drand48() - 0.5);
      temptrans.z() += transscale*(drand48() - 0.5);
      tempqrot.x() += rotscale*(drand48() - 0.5);
      tempqrot.y() += rotscale*(drand48() - 0.5);
      tempqrot.z() += rotscale*(drand48() - 0.5);
      tempqrot.normalize();
      
      sys.nodes[i].trans = temptrans;
      sys.nodes[i].qrot = tempqrot;
      
      // These methods should be called to update the node.
      sys.nodes[i].normRot();
      sys.nodes[i].setTransform();
      sys.nodes[i].setProjection();
      sys.nodes[i].setDr(true);
    }
        
}