Пример #1
0
  void MeshRenderer :: estimateOptimalPlanes(const Pose3D& pose, float* near_plane, float* far_plane)
  {
    float min_z = std::numeric_limits<float>::max();
    float max_z = 0.01;

    for (int i = 0; i < m_mesh->faces.size(); ++i)
    {
      const Point3f& v1 = m_mesh->vertices[m_mesh->faces[i].indices[0]];
      const Point3f& v2 = m_mesh->vertices[m_mesh->faces[i].indices[1]];
      const Point3f& v3 = m_mesh->vertices[m_mesh->faces[i].indices[2]];

      Point3f pv1 = pose.cameraTransform(v1);
      Point3f pv2 = pose.cameraTransform(v2);
      Point3f pv3 = pose.cameraTransform(v3);

      min_z = ntk::math::min(min_z,-pv1.z);
      min_z = ntk::math::min(min_z,-pv2.z);
      min_z = ntk::math::min(min_z,-pv3.z);

      max_z = ntk::math::max(max_z,-pv1.z);
      max_z = ntk::math::max(max_z,-pv2.z);
      max_z = ntk::math::max(max_z,-pv3.z);
    }

    ntk_dbg_print(min_z, 2);
    ntk_dbg_print(max_z, 2);

    if (min_z < 0)
      min_z = 0.01;

    if (max_z < min_z)
      max_z = (min_z*2);

    *near_plane = min_z*0.9;
    *far_plane = max_z*1.1;
  }
Пример #2
0
void MeshViewer :: addMeshToVertexBufferObject(const ntk::Mesh& mesh, const Pose3D& pose, MeshViewerMode mode)
{
#if defined(NESTK_USE_GLEW) || defined(USE_GLEW)
    GLuint vbo_id = -1, vbo_faces_id = -1;
    glGenBuffersARB(1, &vbo_id);
    if (mesh.hasFaces())
        glGenBuffersARB(1, &vbo_faces_id);

    VertexBufferObject vbo;
    pose.cvCameraTransform().copyTo(vbo.model_view_matrix);
    // Transpose the matrix for OpenGL column-major.
    vbo.model_view_matrix = vbo.model_view_matrix.t();
    vbo.nb_faces = 0;
    vbo.vertex_id = vbo_id;
    vbo.faces_id = vbo_faces_id;
    vbo.has_faces = mesh.hasFaces();
    vbo.has_color = mesh.hasColors();
    vbo.has_texcoords = mesh.hasTexcoords();
    vbo.color_offset = mesh.vertices.size()*sizeof(Vec3f);
    vbo.texture_offset = mesh.vertices.size()*sizeof(Vec3f) + mesh.colors.size() * sizeof(Vec3b);
    vbo.nb_vertices = mesh.vertices.size();

    glBindBufferARB(GL_ARRAY_BUFFER_ARB, vbo.vertex_id);
    glBufferDataARB(GL_ARRAY_BUFFER_ARB,
                    mesh.colors.size()*sizeof(Vec3b)
                    + mesh.vertices.size()*sizeof(Vec3f)
                    + mesh.texcoords.size()*sizeof(Point2f), // size
                    0, // null pointer: just allocate memory
                    GL_STATIC_DRAW_ARB);
    glBufferSubDataARB(GL_ARRAY_BUFFER_ARB, 0, mesh.vertices.size()*sizeof(Vec3f), &mesh.vertices[0]);
    if (vbo.has_color)
        glBufferSubDataARB(GL_ARRAY_BUFFER_ARB, vbo.color_offset, mesh.colors.size()*sizeof(Vec3b), &mesh.colors[0]);
    if (vbo.has_texcoords)
        glBufferSubDataARB(GL_ARRAY_BUFFER_ARB, vbo.texture_offset, mesh.texcoords.size()*sizeof(Point2f), &mesh.texcoords[0]);

    if (vbo.has_faces)
    {
        vbo.nb_faces = mesh.faces.size();
        glBindBufferARB(GL_ELEMENT_ARRAY_BUFFER_ARB, vbo.faces_id);
        glBufferDataARB(GL_ELEMENT_ARRAY_BUFFER_ARB,
                        mesh.faces.size() * 3 * sizeof(GLuint), // size
                        (GLuint*)&mesh.faces[0],
                        GL_STATIC_DRAW_ARB);
    }
    m_upcoming_vertex_buffer_objects.push_back(vbo);
#endif
}
Пример #3
0
Pose3D ViewBikeMath::calculateHandPos(const JointData& jointData, const JointData::Joint& joint, const RobotDimensions& robotDimensions)
{
  Pose3D handPos;
  float sign = joint == JointData::LShoulderPitch ? 1.f : -1.f;

  handPos.translate(robotDimensions.armOffset.x, sign *  robotDimensions.armOffset.y, robotDimensions.armOffset.z);

  handPos.rotateY(-jointData.angles[joint]);
  handPos.rotateZ(sign * jointData.angles[joint + 1]);
  handPos.translate(robotDimensions.upperArmLength, 0, 0);

  handPos.rotateX(jointData.angles[joint + 2] * sign);
  handPos.rotateZ(sign * jointData.angles[joint + 3]);


  return handPos;
}
Пример #4
0
// atomic mean square pose estimation.
double rms_optimize_3d(Pose3D& pose3d,
                       const std::vector<Point3f>& ref_points,
                       const std::vector<Point3f>& img_points)
{
  std::vector<double> fx;
  std::vector<double> initial(6);
  reprojection_error_3d f(pose3d, ref_points, img_points);
  LevenbergMarquartMinimizer optimizer;
  std::fill(stl_bounds(initial), 0);
  fx.resize(ref_points.size()*3);
  optimizer.minimize(f, initial);
  optimizer.diagnoseOutcome();
  f.evaluate(initial, fx);

  double error = f.outputNorm(initial);

  pose3d.applyTransformAfter(Vec3f(initial[3],initial[4],initial[5]), cv::Vec3f(initial[0], initial[1], initial[2]));
  return error;
}
bool RelativePoseEstimatorICP<PointT> ::
computeRegistration(Pose3D& relative_pose,
                    PointCloudConstPtr source_cloud,
                    PointCloudConstPtr target_cloud,
                    PointCloudType& aligned_cloud)
{
    pcl::IterativeClosestPoint<PointT, PointT> reg;
    reg.setMaximumIterations (m_max_iterations);
    reg.setTransformationEpsilon (1e-10);
    reg.setRANSACOutlierRejectionThreshold(m_ransac_outlier_threshold);
    reg.setMaxCorrespondenceDistance (m_distance_threshold);
    reg.setInputCloud (source_cloud);
    reg.setInputTarget (target_cloud);
    reg.align (aligned_cloud);

    if (0)
    {
        ntk::Mesh mesh1, mesh2;
        pointCloudToMesh(mesh1, aligned_cloud);
        pointCloudToMesh(mesh2, *target_cloud);
        mesh1.saveToPlyFile("debug_icp_1.ply");
        mesh2.saveToPlyFile("debug_icp_2.ply");
    }

    if (!reg.hasConverged())
    {
      ntk_dbg(1) << "ICP did not converge, ignoring.";
      return false;
    }

    ntk_dbg_print(reg.getFitnessScore(), 1);

    Eigen::Matrix4f t = reg.getFinalTransformation ();
    cv::Mat1f T(4,4);
    //toOpencv(t,T);
    for (int r = 0; r < 4; ++r)
        for (int c = 0; c < 4; ++c)
            T(r,c) = t(r,c);

    relative_pose.setCameraTransform(T);
    return true;
}
Пример #6
0
  void MeshGenerator :: generatePointCloudMesh(const RGBDImage& image,
                                               const Pose3D& depth_pose,
                                               const Pose3D& rgb_pose)
  {
    m_mesh.clear();
    m_mesh.vertices.reserve(image.depth().rows*image.depth().cols);
    m_mesh.colors.reserve(image.depth().rows*image.depth().cols);
    m_mesh.normals.reserve(image.depth().rows*image.depth().cols);

    const cv::Mat1f& depth_im = image.depth();
    const cv::Mat1b& mask_im = image.depthMask();
    cv::Mat3f voxels (depth_im.size());
    cv::Mat3f rgb_points (depth_im.size());

    cv::Mat1b subsample_mask(mask_im.size());
    subsample_mask = 0;
    for (float r = 0; r < subsample_mask.rows-1; r += 1.0/m_resolution_factor)
      for (float c = 0; c < subsample_mask.cols-1; c += 1.0/m_resolution_factor)
        subsample_mask(ntk::math::rnd(r),ntk::math::rnd(c)) = 1;
    subsample_mask = mask_im & subsample_mask;

    depth_pose.unprojectFromImage(depth_im, subsample_mask, voxels);
    if (m_use_color && image.hasRgb())
      rgb_pose.projectToImage(voxels, subsample_mask, rgb_points);

    for (int r = 0; r < voxels.rows; ++r)
    {
      Vec3f* voxels_data = voxels.ptr<Vec3f>(r);
      const uchar* mask_data = subsample_mask.ptr<uchar>(r);
      for (int c = 0; c < voxels.cols; ++c)
      {
        if (!mask_data[c])
          continue;

        Vec3b color (0,0,0);
        if (m_use_color)
        {
          Point3f prgb = rgb_points(r,c);
          int i_y = ntk::math::rnd(prgb.y);
          int i_x = ntk::math::rnd(prgb.x);
          if (is_yx_in_range(image.rgb(), i_y, i_x))
          {
            Vec3b bgr = image.rgb()(i_y, i_x);
            color = Vec3b(bgr[2], bgr[1], bgr[0]);
          }
        }
        else
        {
          int g = 0;
          if (image.intensity().data)
            g = image.intensity()(r,c);
          else
            g = 255 * voxels_data[c][2] / 10.0;
          color = Vec3b(g,g,g);
        }

        m_mesh.vertices.push_back(voxels_data[c]);
        m_mesh.colors.push_back(color);
      }
    }
  }
Пример #7
0
  void MeshGenerator :: generateTriangleMesh(const RGBDImage& image,
                                             const Pose3D& depth_pose,
                                             const Pose3D& rgb_pose)
  {
    const Mat1f& depth_im = image.depth();
    const Mat1b& mask_im = image.depthMask();
    m_mesh.clear();
    if (m_use_color)
      image.rgb().copyTo(m_mesh.texture);
    else if (image.intensity().data)
      toMat3b(normalize_toMat1b(image.intensity())).copyTo(m_mesh.texture);
    else
    {
      m_mesh.texture.create(depth_im.size());
      m_mesh.texture = Vec3b(255,255,255);
    }
    m_mesh.vertices.reserve(depth_im.cols*depth_im.rows);
    m_mesh.texcoords.reserve(depth_im.cols*depth_im.rows);
    m_mesh.colors.reserve(depth_im.cols*depth_im.rows);
    Mat1i vertice_map(depth_im.size());
    vertice_map = -1;
    for_all_rc(depth_im)
    {
      if (!mask_im(r,c))
        continue;
      double depth = depth_im(r,c);
      Point3f p3d = depth_pose.unprojectFromImage(Point3f(c,r,depth));
      Point3f p2d_rgb;
      Point2f texcoords;
      if (m_use_color)
      {
        p2d_rgb = rgb_pose.projectToImage(p3d);
        texcoords = Point2f(p2d_rgb.x/image.rgb().cols, p2d_rgb.y/image.rgb().rows);
      }
      else
      {
        p2d_rgb = Point3f(c,r,depth);
        texcoords = Point2f(p2d_rgb.x/image.intensity().cols, p2d_rgb.y/image.intensity().rows);
      }
      vertice_map(r,c) = m_mesh.vertices.size();
      m_mesh.vertices.push_back(p3d);
      // m_mesh.colors.push_back(bgr_to_rgb(im.rgb()(p2d_rgb.y, p2d_rgb.x)));
      m_mesh.texcoords.push_back(texcoords);
    }

    for_all_rc(vertice_map)
    {
      if (vertice_map(r,c) < 0)
        continue;

      if ((c < vertice_map.cols - 1) &&  (r < vertice_map.rows - 1) &&
          (vertice_map(r+1,c)>=0) && (vertice_map(r,c+1) >= 0) &&
          (std::abs(depth_im(r,c) - depth_im(r+1, c)) < m_max_delta_depth) &&
          (std::abs(depth_im(r,c) - depth_im(r, c+1)) < m_max_delta_depth))
      {
        Face f;
        f.indices[2] = vertice_map(r,c);
        f.indices[1] = vertice_map(r,c+1);
        f.indices[0] = vertice_map(r+1,c);
        m_mesh.faces.push_back(f);
      }

      if ((c > 0) &&  (r < vertice_map.rows - 1) &&
          (vertice_map(r+1,c)>=0) && (vertice_map(r+1,c-1) >= 0) &&
          (std::abs(depth_im(r,c) - depth_im(r+1, c)) < m_max_delta_depth) &&
          (std::abs(depth_im(r,c) - depth_im(r+1, c-1)) < m_max_delta_depth))
      {
        Face f;
        f.indices[2] = vertice_map(r,c);
        f.indices[1] = vertice_map(r+1,c);
        f.indices[0] = vertice_map(r+1,c-1);
        m_mesh.faces.push_back(f);
      }
    }
    m_mesh.computeNormalsFromFaces();
  }
Пример #8
0
  void MeshGenerator :: generateSurfelsMesh(const RGBDImage& image,
                                            const Pose3D& depth_pose,
                                            const Pose3D& rgb_pose)
  {
    double min_val = 0, max_val = 0;
    if (image.amplitude().data)
      minMaxLoc(image.amplitude(), &min_val, &max_val);

    m_mesh.clear();

    const cv::Mat1f& depth_im = image.depth();
    const cv::Mat1b& mask_im = image.depthMask();

    for_all_rc(depth_im)
    {
      int i_r = r;
      int i_c = c;
      if (!is_yx_in_range(depth_im, i_r, i_c))
        continue;

      if (!mask_im(r,c))
        continue;

      double depth = depth_im(i_r,i_c);
      cv::Point3f p = depth_pose.unprojectFromImage(Point2f(c,r), depth);

      Point3f normal = image.normal().data ? image.normal()(i_r, i_c) : Vec3f(0,0,1);

      Vec3b color (0,0,0);
      if (m_use_color)
      {
        cv::Point3f prgb = rgb_pose.projectToImage(p);
        int i_y = ntk::math::rnd(prgb.y);
        int i_x = ntk::math::rnd(prgb.x);
        if (is_yx_in_range(image.rgb(), i_y, i_x))
        {
          Vec3b bgr = image.rgb()(i_y, i_x);
          color = Vec3b(bgr[2], bgr[1], bgr[0]);
        }
      }
      else
      {
        int g = 0;
        if (image.amplitude().data)
          g = 255.0 * (image.amplitude()(i_r,i_c) - min_val) / (max_val-min_val);
        else
          g = 255 * depth / 10.0;
        color = Vec3b(g,g,g);
      }

      Surfel s;
      s.color = color;
      s.confidence = 0;
      s.location = p;
      s.normal = normal;
      s.n_views = 1;
      double normal_z = std::max(normal.z, 0.5f);
      s.radius = m_resolution_factor * ntk::math::sqrt1_2 * depth
          / (depth_pose.focalX() * normal_z);
      m_mesh.addSurfel(s);
    }
  }
Пример #9
0
void calibrate_kinect_depth(std::vector< std::vector<Point2f> >& stereo_corners,
                            std::vector< DepthCalibrationPoint >& depth_values)
{
  std::vector< std::vector<Point2f> > good_corners;
  stereo_corners.resize(global::images_list.size());
  for (int i_image = 0; i_image < global::images_list.size(); ++i_image)
  {
    QString filename = global::images_list[i_image];
    QDir cur_image_dir (global::images_dir.absoluteFilePath(filename));

    std::string full_filename;
    cv::Mat3b image;
    load_intensity_file(cur_image_dir.path().toStdString(), full_filename, image);
    ntk_ensure(image.data, "Could not load color image");

    kinect_shift_ir_to_depth(image);

    std::vector<Point2f> current_view_corners;
    calibrationCorners(full_filename, "corners",
                       global::opt_pattern_width(), global::opt_pattern_height(),
                       current_view_corners, image, 1);

    if (current_view_corners.size() == global::opt_pattern_height()*global::opt_pattern_width())
    {
      good_corners.push_back(current_view_corners);
      stereo_corners[i_image] = current_view_corners;
      showCheckerboardCorners(image, current_view_corners, 1);
    }
    else
    {
      ntk_dbg(0) << "Warning: corners not detected";
      stereo_corners[i_image].resize(0);
    }
  }

  std::vector< std::vector<Point3f> > pattern_points;
  calibrationPattern(pattern_points,
                     global::opt_pattern_width(),  global::opt_pattern_height(), global::opt_square_size(),
                     good_corners.size());

  ntk_assert(pattern_points.size() == good_corners.size(), "Invalid points size");

  std::vector<Mat> rvecs, tvecs;

  int flags = 0;
  if (global::opt_ignore_distortions())
    flags = CV_CALIB_ZERO_TANGENT_DIST;

  double error = calibrateCamera(pattern_points, good_corners, global::image_size,
                                 global::depth_intrinsics, global::depth_distortion,
                                 rvecs, tvecs, flags);

  if (global::opt_ignore_distortions())
    global::depth_distortion = 0.f;

  ntk_dbg_print(error, 1);
  int good_i = 0;
  foreach_idx(stereo_i, stereo_corners)
  {
    if (stereo_corners[stereo_i].size() > 0)
    {
      QString filename = global::images_list[stereo_i];
      QDir cur_image_dir (global::images_dir.absoluteFilePath(filename));
      std::string full_filename;
      cv::Mat3b image;
      load_intensity_file(cur_image_dir.path().toStdString(), full_filename, image);
      ntk_ensure(image.data, "Could not load intensity image");
      kinect_shift_ir_to_depth(image);

      cv::Mat1f depth_image;

      if (is_file(cur_image_dir.absoluteFilePath("raw/depth.yml").toStdString()))
      {
        full_filename = cur_image_dir.absoluteFilePath("raw/depth.yml").toStdString();
        depth_image = imread_yml(full_filename);
      }
      else if (is_file(cur_image_dir.absoluteFilePath("raw/depth.raw").toStdString()))
      {
        full_filename = cur_image_dir.absoluteFilePath("raw/depth.raw").toStdString();
        depth_image = imread_Mat1f_raw(full_filename);
      }
      ntk_ensure(depth_image.data, "Could not load depth image");

      cv::Mat3b undistorted_image;
      if (global::opt_ignore_distortions())
        image.copyTo(undistorted_image);
      else
        undistort(image, undistorted_image, global::depth_intrinsics, global::depth_distortion);

      std::vector<Point2f> current_view_corners;
      calibrationCorners(full_filename, "corners",
                         global::opt_pattern_width(), global::opt_pattern_height(),
                         current_view_corners, undistorted_image, 1);

      if (current_view_corners.size() == (global::opt_pattern_width()*global::opt_pattern_height()))
      {
        stereo_corners[stereo_i] = current_view_corners;
        showCheckerboardCorners(undistorted_image, stereo_corners[stereo_i], 200);
      }
      else
      {
        stereo_corners[stereo_i].resize(0);
        continue;
      }

      // Generate calibration points
      {
        // FIXME: why rvecs and tvecs from calibrateCamera seems to be nonsense ?
        // calling findExtrinsics another time to get good chessboard estimations.

        cv::Mat1f H;
        estimate_checkerboard_pose(pattern_points[0],
                                   current_view_corners,
                                   global::depth_intrinsics,
                                   H);
        Pose3D pose;
        pose.setCameraParametersFromOpencv(global::depth_intrinsics);
        ntk_dbg_print(pose, 1);
        pose.setCameraTransform(H);

        foreach_idx(pattern_i, pattern_points[0])
        {
          ntk_dbg_print(pattern_points[0][pattern_i], 1);
          Point3f p = pose.projectToImage(pattern_points[0][pattern_i]);
          ntk_dbg_print(p, 1);
          double kinect_raw = depth_image(p.y, p.x);
          if (!(kinect_raw < 2047)) continue;
          ntk_dbg_print(kinect_raw, 1);
          double linear_depth = 1.0 / (kinect_raw * -0.0030711016 + 3.3309495161);
          const float k1 = 1.1863;
          const float k2 = 2842.5;
          const float k3 = 0.1236;
          double tan_depth = k3 * tanf(kinect_raw/k2 + k1);
          ntk_dbg_print(linear_depth, 1);
          ntk_dbg_print(tan_depth, 1);
          depth_values.push_back(DepthCalibrationPoint(kinect_raw, p.z));
        }
      }

      ++good_i;
    }
Пример #10
0
void MeshRenderer :: setPose(const Pose3D& pose, float* arg_near_plane, float* arg_far_plane)
{
    VertexBufferObject& vbo = m_vertex_buffer_object;
    pose.cvCameraTransform().copyTo(vbo.model_view_matrix);
    // Transpose the matrix for OpenGL column-major.
    vbo.model_view_matrix = vbo.model_view_matrix.t();

    if (!(arg_near_plane && arg_far_plane))
    {
        estimateOptimalPlanes(pose, &m_last_near_plane, &m_last_far_plane);
    }
    else
    {
        m_last_near_plane = *arg_near_plane;
        m_last_far_plane = *arg_far_plane;
    }

    m_pbuffer->makeCurrent();
    glMatrixMode (GL_MODELVIEW);
    glLoadIdentity ();
    cv::Vec3f euler_angles = pose.cvEulerRotation();
    glTranslatef(pose.cvTranslation()[0], pose.cvTranslation()[1], pose.cvTranslation()[2]);
    glRotatef(euler_angles[2]*180.0/M_PI, 0, 0, 1);
    glRotatef(euler_angles[1]*180.0/M_PI, 0, 1, 0);
    glRotatef(euler_angles[0]*180.0/M_PI, 1, 0, 0);

    glMatrixMode (GL_PROJECTION);
    glLoadIdentity ();
    double dx = pose.imageCenterX() - (m_pbuffer->width() / 2.0);
    double dy = pose.imageCenterY() - (m_pbuffer->height() / 2.0);
    glViewport(dx, -dy, m_pbuffer->width(), m_pbuffer->height());
    if (pose.isOrthographic())
    {
        ntk_dbg_print(pose.focalX()/2, 0);
        ntk_dbg_print(pose.focalY()/2, 0);
        glOrtho(-pose.focalX()/2, pose.focalX()/2, -pose.focalY()/2, pose.focalY()/2, m_last_near_plane, m_last_far_plane);
    }
    else
    {
        double fov = (180.0/M_PI) * 2.0*atan(m_pbuffer->height()/(2.0*pose.focalY()));
        // double fov2 = (180.0/M_PI) * 2.0*atan(image.cols/(2.0*pose.focalX()));
        // ntk_dbg_print(fov2, 2);
        // gluPerspective(fov2,  double(image.rows)/image.cols, near_plane, far_plane);
        gluPerspective(fov, double(m_pbuffer->width())/m_pbuffer->height(), m_last_near_plane, m_last_far_plane);
    }

    glMatrixMode (GL_MODELVIEW);
}
bool SurfelsRGBDModeler :: addNewView(const RGBDImage& image_, Pose3D& depth_pose)
{
    ntk::TimeCount tc("SurfelsRGBDModeler::addNewView", 1);
    const float max_camera_normal_angle = ntk::deg_to_rad(90);

    RGBDImage image;
    image_.copyTo(image);
    if (!image_.normal().data)
    {
        OpenniRGBDProcessor processor;
        processor.computeNormalsPCL(image);
    }

    Pose3D rgb_pose = depth_pose;
    rgb_pose.toRightCamera(image.calibration()->rgb_intrinsics, image.calibration()->R, image.calibration()->T);

    Pose3D world_to_camera_normal_pose;
    world_to_camera_normal_pose.applyTransformBefore(cv::Vec3f(0,0,0), depth_pose.cvEulerRotation());
    Pose3D camera_to_world_normal_pose = world_to_camera_normal_pose;
    camera_to_world_normal_pose.invert();

    const Mat1f& depth_im = image.depth();
    Mat1b covered_pixels (depth_im.size());
    covered_pixels = 0;

    std::list<Surfel> surfels_to_reinsert;

    // Surfel updating.
    for (SurfelMap::iterator next_it = m_surfels.begin(); next_it != m_surfels.end(); )
    {
        SurfelMap::iterator surfel_it = next_it;
        ++next_it;

        Surfel& surfel = surfel_it->second;
        if (!surfel.enabled())
            continue;

        Point3f surfel_2d = depth_pose.projectToImage(surfel.location);
        bool surfel_deleted = false;
        int r = ntk::math::rnd(surfel_2d.y);
        int c = ntk::math::rnd(surfel_2d.x);
        int d = ntk::math::rnd(surfel_2d.z);
        if (!is_yx_in_range(depth_im, r, c)
                || !image.depthMask()(r, c)
                || !image.isValidNormal(r,c))
            continue;

        const float update_max_dist = getCompatibilityDistance(depth_im(r,c));

        Vec3f camera_normal = image.normal()(r, c);
        normalize(camera_normal);

        Vec3f world_normal = camera_to_world_normal_pose.cameraTransform(camera_normal);
        normalize(world_normal);

        Vec3f eyev = camera_eye_vector(depth_pose, r, c);
        double camera_angle = acos(camera_normal.dot(-eyev));

        if (camera_angle > max_camera_normal_angle)
            continue;

        float normal_angle = acos(world_normal.dot(surfel.normal));
        // Surfels have different normals, maybe two different faces of the same object.
        if (normal_angle > (m_update_max_normal_angle*M_PI/180.0))
        {
            // Removal check. If a surfel has a different normal and is closer to the camera
            // than the new scan, remove it.
            if ((-surfel_2d.z) < depth_im(r,c) && surfel.n_views < 3)
            {
                m_surfels.erase(surfel_it);
                surfel_deleted = true;
            }
            continue;
        }

        // If existing surfel is far from new depth value:
        // - If existing one had a worst point of view, and was seen only once, remove it.
        // - Otherwise do not include the new one.
        if (std::abs(surfel_2d.z - depth_im(r,c)) > update_max_dist)
        {
            if (surfel.min_camera_angle > camera_angle && surfel.n_views < 3)
            {
                m_surfels.erase(surfel_it);
                surfel_deleted = true;
            }
            else
                covered_pixels(r,c) = 1;
            continue;
        }

        // Compatible surfel found.
        const float depth = depth_im(r,c) + m_global_depth_offset;

        Point3f p3d = depth_pose.unprojectFromImage(Point2f(c,r), depth);
        cv::Vec3b rgb_color = bgr_to_rgb(image.mappedRgb()(r, c));

        Surfel image_surfel;
        image_surfel.location = p3d;
        image_surfel.normal = world_normal;
        image_surfel.color = rgb_color;
        image_surfel.min_camera_angle = camera_angle;
        image_surfel.n_views = 1;
        image_surfel.radius = computeSurfelRadius(depth, camera_normal[2], depth_pose.meanFocal());
        mergeToLeftSurfel(surfel, image_surfel);

        covered_pixels(r,c) = 1;
        // needs to change the cell?
        Cell new_cell = worldToCell(surfel.location);
        if (new_cell != surfel_it->first)
        {
            surfels_to_reinsert.push_back(surfel);
            m_surfels.erase(surfel_it);
        }
    }

    foreach_const_it(it, surfels_to_reinsert, std::list<Surfel>)
    {
        Cell new_cell = worldToCell(it->location);
        m_surfels.insert(std::make_pair(new_cell, *it));
    }
Пример #12
0
void MeshViewer :: addMeshToDisplayList(const ntk::Mesh& mesh, const Pose3D& pose, MeshViewerMode mode)
{
    int new_list_index = glGenLists(1);
    glNewList(new_list_index, GL_COMPILE);
    if (mesh.texture.data)
    {
        // Last texture id was just created
        GLuint texture = m_upcoming_textures[m_upcoming_textures.size()-1];
        glEnable(GL_TEXTURE_2D);
        glBindTexture( GL_TEXTURE_2D, texture );
    }
    else
    {
        glDisable(GL_TEXTURE_2D);
    }
    glMatrixMode(GL_MODELVIEW);
    glPushMatrix();
    glTranslatef(pose.cvTranslation()[0], pose.cvTranslation()[1], pose.cvTranslation()[2]);
    Vec3f euler_angles = pose.cvEulerRotation();
    glRotatef(rad_to_deg(euler_angles[0]), 1, 0, 0);
    glRotatef(rad_to_deg(euler_angles[1]), 0, 1, 0);
    glRotatef(rad_to_deg(euler_angles[2]), 0, 0, 1);

    if (mode & WIREFRAME)
    {
        glPolygonMode( GL_FRONT_AND_BACK, GL_LINE );
    }
    else
    {
        glPolygonMode( GL_FRONT_AND_BACK, GL_FILL );
    }

    int64 point_start_time = ntk::Time::getMillisecondCounter();
    if (mesh.faces.size() == 0)
    {
        glBegin(GL_POINTS);
        for (int i = 0; i < mesh.vertices.size(); ++i)
        {
            const Point3f& v = mesh.vertices[i];
            // if (i % 1000 == 0)
            // ntk_dbg_print(v, 1);
            if (mesh.hasColors())
                glColor3f(mesh.colors[i][0]/255.0, mesh.colors[i][1]/255.0, mesh.colors[i][2]/255.0);
            glVertex3f(v.x, v.y, v.z);
        }
        glEnd();
    }
    int64 point_end_time = ntk::Time::getMillisecondCounter();
    ntk_dbg_print(point_end_time-point_start_time, 1);

    {
        glBegin(GL_TRIANGLES);
        for (int i = 0; i < mesh.faces.size(); ++i)
        {
            int i1 = mesh.faces[i].indices[0];
            int i2 = mesh.faces[i].indices[1];
            int i3 = mesh.faces[i].indices[2];

            const Point3f& v1 = mesh.vertices[i1];
            const Point3f& v2 = mesh.vertices[i2];
            const Point3f& v3 = mesh.vertices[i3];

            Vec3f nm = (Vec3f(v2-v1).cross(v3-v2));
            normalize(nm);

            if (!mesh.hasColors())
                glColor3f(1.0f, 0.0f, 0.0f);

            if (mesh.hasColors())
                glColor3f(mesh.colors[i1][0]/255.0, mesh.colors[i1][1]/255.0, mesh.colors[i1][2]/255.0);
            if (mesh.hasTexcoords())
                glTexCoord2f(mesh.texcoords[i1].x, mesh.texcoords[i1].y);
            glVertex3f(v1.x, v1.y, v1.z);
            glNormal3f(nm[0], nm[1], nm[2]);

            if (mesh.hasColors())
                glColor3f(mesh.colors[i2][0]/255.0, mesh.colors[i2][1]/255.0, mesh.colors[i2][2]/255.0);
            if (mesh.hasTexcoords())
                glTexCoord2f(mesh.texcoords[i2].x, mesh.texcoords[i2].y);
            glVertex3f(v2.x, v2.y, v2.z);
            glNormal3f(nm[0], nm[1], nm[2]);

            if (mesh.hasColors())
                glColor3f(mesh.colors[i3][0]/255.0, mesh.colors[i3][1]/255.0, mesh.colors[i3][2]/255.0);
            if (mesh.hasTexcoords())
                glTexCoord2f(mesh.texcoords[i3].x, mesh.texcoords[i3].y);
            glVertex3f(v3.x, v3.y, v3.z);
            glNormal3f(nm[0], nm[1], nm[2]);
        }
        glEnd();
    }
    glMatrixMode(GL_MODELVIEW);
    glPopMatrix();
    glEndList();

    m_upcoming_display_lists.push_back(new_list_index);
}
Пример #13
0
int main(void)
{

    Pose3D pz;
    pz.setAxisAngle(0,1,0, M_PI/3);
    pz.setPosition(0,2,0);

    Pose3D pz2;
    pz2.setAxisAngle(0,1,0, M_PI/2 - M_PI/3);
    pz2.setPosition(0,1,0);

    pz.multiplyPose(pz2);

    cout << pz;
    NEWMAT::Matrix m = pz.asMatrix();
    cout << m;

    Position p;
    p.x = 0;
    p.y = 0;
    p.z = 1;

    pz.applyToPosition(p);
    printf("After rotate: %f %f %f\n", p.x, p.y, p.z);

    cout << endl;

    for (int ind = 0; ind < 2; ind++)
    {
        bool caching;
        if (ind == 0) {
            caching = true;
            std::cout << "Caching Mode"<<std::endl;
        }
        else {
            caching = false;
            std::cout << std::endl<<std::endl<<std::endl<<"Not Caching Mode" <<std::endl;
        }



        double dx,dy,dz,dyaw,dp,dr;
        std::cout <<"Creating TransformReference" << std::endl;
        TransformReference mTR(caching);



        dx = dy= dz = 0;
        dyaw = dp = dr = 0.1;

        uint64_t atime;


        timeval temp_time_struct;
        gettimeofday(&temp_time_struct,NULL);
        atime =  temp_time_struct.tv_sec * 1000000000ULL + (uint64_t)temp_time_struct.tv_usec * 1000ULL;

        std::cout <<"Setting values" << std::endl;

        //Fill in some transforms
        //  mTR.setWithEulers(10,2,1,1,1,dyaw,dp,dr,atime); //Switching out for DH params below
        mTR.setWithDH("10","2",1.0,1.0,1.0,dyaw,atime);
        //mTR.setWithEulers("2","3",1-1,1,1,dyaw,dp,dr,atime-1000);
        mTR.setWithEulers("2","3",1,1,1,dyaw,dp,dr,atime-100);
        mTR.setWithEulers("2","3",1,1,1,dyaw,dp,dr,atime-50);
        mTR.setWithEulers("2","3",1,1,1,dyaw,dp,dr,atime-1000);
        mTR.setWithEulers("2","3",1,1,1,dyaw,dp,dr,atime+500);
        mTR.setWithEulers("2","3",1+100,1,1,dyaw,dp,dr,atime+1000);
        mTR.setWithEulers("2","3",1,1,1,dyaw,dp,dr,atime+1100);
        mTR.setWithEulers("3","5",dx,dy,dz,dyaw,dp,dr,atime);
        mTR.setWithEulers("5","1",dx,dy,dz,dyaw,dp,dr,atime);
        mTR.setWithEulers("6","5",dx,dy,dz,dyaw,dp,dr,atime);
        mTR.setWithEulers("6","5",dx,dy,dz,dyaw,dp,dr,atime);
        mTR.setWithEulers("7","6",1,1,1,dyaw,dp,dr,atime);
        mTR.setWithDH("8","7",1.0,1.0,1.0,dyaw,atime);
        //mTR.setWithEulers("8","7",1,1,1,dyaw,dp,dr,atime); //Switching out for DH params above

        std::cout <<"Trying some tests" << std::endl;
        //Demonstrate InvalidFrame LookupException
        try
        {
            std::cout<< mTR.viewChain("10","9");
        }
        catch (TransformReference::LookupException &ex)
        {
            std::cout << "Caught " << ex.what()<<std::endl;
        }




        // See the list of transforms to get between the frames
        std::cout<<"Viewing (10,8):"<<std::endl;
        std::cout << mTR.viewChain("10","8");


        //See the resultant transform
        std::cout <<"Calling getMatrix(10,8)"<<std::endl;
        //      NEWMAT::Matrix mat = mTR.getMatrix(1,1);
        NEWMAT::Matrix mat = mTR.getMatrix("10","8",atime);

        std::cout << "Result of getMatrix(10,8,atime):" << std::endl << mat<< std::endl;

        TFPoint mPoint;
        mPoint.x = 1;
        mPoint.y = 1;
        mPoint.z = 1;
        mPoint.frame = "10";
        mPoint.time = atime;

        TFPoint nPoint = mPoint;

        std::cout <<"Point 1,1,1 goes like this:" <<std::endl;
        std::cout << "(" << mPoint.x <<","<< mPoint.y << "," << mPoint.z << ") in frame " << mPoint.frame << std::endl;
        mPoint =  mTR.transformPoint("2", mPoint);
        std::cout << "(" << mPoint.x <<","<< mPoint.y << "," << mPoint.z << ") in frame " << mPoint.frame << std::endl;
        mPoint =  mTR.transformPoint("3", mPoint);
        std::cout << "(" << mPoint.x <<","<< mPoint.y << "," << mPoint.z << ") in frame " << mPoint.frame << std::endl;
        mPoint =  mTR.transformPoint("5", mPoint);
        std::cout << "(" << mPoint.x <<","<< mPoint.y << "," << mPoint.z << ") in frame " << mPoint.frame << std::endl;
        mPoint =  mTR.transformPoint("6", mPoint);
        std::cout << "(" << mPoint.x <<","<< mPoint.y << "," << mPoint.z << ") in frame " << mPoint.frame << std::endl;
        mPoint =  mTR.transformPoint("7", mPoint);
        std::cout << "(" << mPoint.x <<","<< mPoint.y << "," << mPoint.z << ") in frame " << mPoint.frame << std::endl;
        mPoint =  mTR.transformPoint("8", mPoint);
        std::cout << "(" << mPoint.x <<","<< mPoint.y << "," << mPoint.z << ") in frame " << mPoint.frame << std::endl;
        mPoint =  mTR.transformPoint("10", mPoint);
        std::cout << "(" << mPoint.x <<","<< mPoint.y << "," << mPoint.z << ") in frame " << mPoint.frame << std::endl;

        //Break the graph, making it loop and demonstrate catching MaxDepthException
        mTR.setWithEulers("6","7",dx,dy,dz,dyaw,dp,dr,atime);

        try {
            std::cout<<mTR.viewChain("10","8");
        }
        catch (TransformReference::MaxDepthException &ex)
        {
            std::cout <<"caught loop in graph"<<ex.what()<<std::endl;
        }

        //Break the graph, making it disconnected, and demonstrate catching ConnectivityException
        mTR.setWithEulers("6","0",dx,dy,dz,dyaw,dp,dr,atime);

        try {
            std::cout<<mTR.viewChain("10","8");
        }
        catch (TransformReference::ConnectivityException &ex)
        {
            std::cout <<"caught unconnected frame"<<ex.what()<<std::endl;
        }

        //Testing clearing the history with parent change
        mTR.setWithEulers("7","5",1,1,1,dyaw,dp,dr,atime);
        //todo display this somehow


        std::cout << " Testing accessors" <<std::endl;

        double x,y,z,yaw,pitch,roll;

        x = 30.0;
        y = 10.0;
        z = 0.0;
        yaw = 0.1;
        pitch = 0.0;
        roll = 0.0;

        mTR.setWithEulers("2","1",x,y,z,yaw,pitch,roll,atime);

        libTF::TFPose2D in, out;

        in.x = 0.0;
        in.y = 0.0;
        in.yaw = 0.0;
        in.frame = "2";
        in.time = atime;

        out = mTR.transformPose2D("1",in);

        printf("%.3f %.3f %.3f\n",
               out.x, out.y, out.yaw*180.0/M_PI);


        try
        {
            out = mTR.transformPose2D("0",in);
            std::cout << "failed to throw" << std::endl;
        }
        catch (TransformReference::LookupException &ex)
        {
            std::cout << "transformPose2D(0,in): Caught " << ex.what()<<std::endl;
        }
        catch (TransformReference::ConnectivityException &ex)
        {
            std::cout <<"caught unconnected frame, used to be lookup exception: "<<ex.what()<<std::endl;
        }

        libTF::TFEulerYPR ypr_in;
        ypr_in.yaw = 0;
        ypr_in.pitch = 0;
        ypr_in.roll = 0;
        ypr_in.time = atime;
        ypr_in.frame = "1";
        std::cout <<"YPR in:"<< ypr_in.yaw<<","<<ypr_in.pitch <<"," <<ypr_in.roll<<std::endl;

        libTF::TFEulerYPR ypr_out = mTR.transformEulerYPR("2", ypr_in);
        std::cout <<"YPR out:"<< ypr_out.yaw<<","<<ypr_out.pitch <<"," <<ypr_out.roll<<std::endl;

    }

    std::cout <<"Congratulations! You reached the end of the test program without errors." <<std::endl;

    return 0;
};
Пример #14
0
void ODEOutBehaviorProvider::setBehaviorData()
{
	int i = 0;
	int numSet = 0;
	int fallState = FallDownState::upright;
	bool bumperRight = false, bumperLeft = false, chestButton = false;
// 	if(theKeyStates != NULL){
// 		bumperRight = theKeyStates->pressed[KeyStates::RBumperRight] || theKeyStates->pressed[KeyStates::RBumperLeft];
// 		bumperLeft  = theKeyStates->pressed[KeyStates::LBumperRight] || theKeyStates->pressed[KeyStates::LBumperLeft];
// 		chestButton = theKeyStates->pressed[KeyStates::ChestButton];
// 	}
	if(theFallDownState != NULL){
		fallState = theFallDownState->state;
	}
	i = 0;
	outBehaviorData.intData.clear();
	outBehaviorData.intData.resize(OutBehaviorData::NUM_INT_DATA);
	outBehaviorData.intData[i++] = fallState;//fallen state
	outBehaviorData.intData[i++] = bumperRight;//bumper right
	outBehaviorData.intData[i++] = bumperLeft;//bumper left
	outBehaviorData.intData[i++] = chestButton;//chest button
	//    numSet = i;
	//    if(numSet == OutBehaviorData::NUM_INT_DATA)
	//        std::cout<<"Num of Data Set missmatch with NUM_INT_DATA: NumSet: "<<numSet<<", NUM_INT_DATA: "<<OutBehaviorData::NUM_INT_DATA<<std::endl;
	//    printf("%d Fallen and Button Data Set to :\n", outBehaviorData.intData.size());
	//    for(i = 0; i < numSet; i++)
	//    {
	//        printf("the %d number is ", i);
	//        std::cout<<outBehaviorData.intData[i]<<std::endl;
	//    }

	//========for joint data
	i = 0;
	outBehaviorData.jointData.clear();
	outBehaviorData.jointData.resize(OutBehaviorData::NUM_JOINT_DATA);
	outBehaviorData.jointData[i++] =(float)theSensoryJointData->angles[JointData::HeadYaw];
	outBehaviorData.jointData[i++] =(float)theSensoryJointData->angles[JointData::HeadPitch];
	outBehaviorData.jointData[i++] =(float)theSensoryJointData->angles[JointData::RShoulderPitch];
	outBehaviorData.jointData[i++] =(float)theSensoryJointData->angles[JointData::RShoulderRoll];
	outBehaviorData.jointData[i++] =(float)theSensoryJointData->angles[JointData::RElbowYaw];
	outBehaviorData.jointData[i++] =(float)theSensoryJointData->angles[JointData::RElbowRoll];
	outBehaviorData.jointData[i++] =(float)theSensoryJointData->angles[JointData::LShoulderPitch];
	outBehaviorData.jointData[i++] =(float)theSensoryJointData->angles[JointData::LShoulderRoll];
	outBehaviorData.jointData[i++] =(float)theSensoryJointData->angles[JointData::LElbowYaw];
	outBehaviorData.jointData[i++] =(float)theSensoryJointData->angles[JointData::LElbowRoll];
	outBehaviorData.jointData[i++] =(float)theSensoryJointData->angles[JointData::RHipYawPitch];
	outBehaviorData.jointData[i++] =(float)theSensoryJointData->angles[JointData::RHipRoll];
	outBehaviorData.jointData[i++] =(float)theSensoryJointData->angles[JointData::RHipPitch];
	outBehaviorData.jointData[i++] =(float)theSensoryJointData->angles[JointData::RKneePitch];
	outBehaviorData.jointData[i++] =(float)theSensoryJointData->angles[JointData::RAnklePitch];
	outBehaviorData.jointData[i++] =(float)theSensoryJointData->angles[JointData::RAnkleRoll];
	outBehaviorData.jointData[i++] =(float)theSensoryJointData->angles[JointData::LHipYawPitch];
	outBehaviorData.jointData[i++] =(float)theSensoryJointData->angles[JointData::LHipRoll];
	outBehaviorData.jointData[i++] =(float)theSensoryJointData->angles[JointData::LHipPitch];
	outBehaviorData.jointData[i++] =(float)theSensoryJointData->angles[JointData::LKneePitch];
	outBehaviorData.jointData[i++] =(float)theSensoryJointData->angles[JointData::LAnklePitch];
	outBehaviorData.jointData[i++] =(float)theSensoryJointData->angles[JointData::LAnkleRoll];
	//	numSet = i;
	//    if(numSet == OutBehaviorData::NUM_JOINT_DATA)
	//        std::cout<<"Num of Data Set missmatch with NUM_JOINT_DATA: NumSet: "<<numSet<<", NUM_JOINT_DATA: "<<OutBehaviorData::NUM_JOINT_DATA<<std::endl;
	//    printf("%d joint data Set to :\n", outBehaviorData.jointData.size());
	//    for(i = 0; i < numSet; i++)
	//    {
	//        printf("the %d number is ", i);
	//        std::cout<<outBehaviorData.jointData[i]<<std::endl;
	//    }
	RotationMatrix Rsupport;
	Rsupport = theNaoStructure->uLink[NaoStructure::bodyLink].R.invert();
	Pose3D Tsupport;
	Tsupport.rotation= Rsupport;
	Tsupport.translate(theNaoStructure->uLink[NaoStructure::bodyLink].p*(-1));
	if (theNaoStructure->supportingMode ==NaoConfig::SUPPORT_MODE_LEFT || theNaoStructure->supportingMode == NaoConfig::SUPPORT_MODE_DOUBLE_LEFT)
	{
		outBehaviorData.leftx = Tsupport.translation.x*1000;
		outBehaviorData.lefty = Tsupport.translation.y*1000;
		Pose3D Tswing;
		Tswing.rotation = Rsupport;
		Tswing.translate(theNaoStructure->uLink[NaoStructure::rFootLink].p-theNaoStructure->uLink[NaoStructure::bodyLink].p);
		outBehaviorData.rightx = Tswing.translation.x*1000;
		outBehaviorData.righty = Tswing.translation.y*1000;
	}
	else
	{
		outBehaviorData.rightx = Tsupport.translation.x*1000;
		outBehaviorData.righty = Tsupport.translation.y*1000;
		Pose3D Tswing;
		Tswing.rotation = Rsupport;
		Tswing.translate(theNaoStructure->uLink[NaoStructure::lFootLink].p-theNaoStructure->uLink[NaoStructure::bodyLink].p);
		outBehaviorData.leftx = Tswing.translation.x*1000;
		outBehaviorData.lefty = Tswing.translation.y*1000;

	}
	

}
Пример #15
0
bool RelativePoseEstimatorICPWithNormals<PointT> ::
computeRegistration(Pose3D& relative_pose,
                    PointCloudConstPtr source_cloud,
                    PointCloudConstPtr target_cloud,
                    PointCloudType& aligned_cloud)
{
#ifndef HAVE_PCL_GREATER_THAN_1_2_0
    return super::computeRegistration(relative_pose, source_cloud, target_cloud, aligned_cloud);
#else
    pcl::IterativeClosestPoint<PointT, PointT> reg;
    typedef pcl::registration::TransformationEstimationPointToPlaneLLS<PointT, PointT> PointToPlane;
    boost::shared_ptr<PointToPlane> point_to_plane (new PointToPlane);
    reg.setTransformationEstimation (point_to_plane);

    // typedef TransformationEstimationRGBD<PointT, PointT> TransformRGBD;
    // boost::shared_ptr<TransformRGBD> transform_rgbd (new TransformRGBD);
    // reg.setTransformationEstimation (transform_rgbd);

#ifdef HAVE_PCL_GREATER_THAN_1_6_0 // rejectors are not well supported before 1.7

    boost::shared_ptr<pcl::registration::CorrespondenceRejectorDistance> rejector_distance (new pcl::registration::CorrespondenceRejectorDistance);
    rejector_distance->setInputSource<PointT>(source_cloud);
    rejector_distance->setInputTarget<PointT>(target_cloud);
    rejector_distance->setMaximumDistance(m_distance_threshold);
    reg.addCorrespondenceRejector(rejector_distance);

    boost::shared_ptr<pcl::registration::CorrespondenceRejectorSurfaceNormal> rejector_normal (new pcl::registration::CorrespondenceRejectorSurfaceNormal);
    rejector_normal->setThreshold(cos(M_PI/4.f));
    rejector_normal->initializeDataContainer<PointT, PointT>();
    rejector_normal->setInputSource<PointT>(source_cloud);
    rejector_normal->setInputTarget<PointT>(target_cloud);
    rejector_normal->setInputNormals<PointT, PointT>(source_cloud);
    rejector_normal->setTargetNormals<PointT, PointT>(target_cloud);
    reg.addCorrespondenceRejector(rejector_normal);

    boost::shared_ptr<pcl::registration::CorrespondenceRejectorOneToOne> rejector_one_to_one (new pcl::registration::CorrespondenceRejectorOneToOne);
    reg.addCorrespondenceRejector(rejector_one_to_one);

    typedef pcl::registration::CorrespondenceRejectorSampleConsensus<PointT> RejectorConsensusT;
    boost::shared_ptr<RejectorConsensusT> rejector_ransac (new RejectorConsensusT());
    rejector_ransac->setInputSource(source_cloud);
    rejector_ransac->setInputTarget(target_cloud);
    rejector_ransac->setInlierThreshold(m_ransac_outlier_threshold);
    rejector_ransac->setMaxIterations(100);
    reg.addCorrespondenceRejector(rejector_ransac);

    boost::shared_ptr<pcl::registration::CorrespondenceRejectorVarTrimmed> rejector_var_trimmed (new pcl::registration::CorrespondenceRejectorVarTrimmed());
    rejector_var_trimmed->setInputSource<PointT>(source_cloud);
    rejector_var_trimmed->setInputTarget<PointT>(target_cloud);
    rejector_var_trimmed->setMinRatio(0.1f);
    rejector_var_trimmed->setMaxRatio(0.75f);
    reg.addCorrespondenceRejector(rejector_var_trimmed);

    boost::shared_ptr<pcl::registration::CorrespondenceRejectorTrimmed> rejector_trimmed (new pcl::registration::CorrespondenceRejectorTrimmed());
    rejector_trimmed->setMinCorrespondences(static_cast<int>(0.1f * source_cloud->size()));
    rejector_trimmed->setOverlapRatio(0.5f);
    reg.addCorrespondenceRejector(rejector_trimmed);
#endif

#if 0
    ntk::Mesh target_mesh;
    pointCloudToMesh(target_mesh, *target_cloud);
    target_mesh.saveToPlyFile("debug_target.ply");

    ntk::Mesh source_mesh;
    pointCloudToMesh(source_mesh, *source_cloud);
    source_mesh.saveToPlyFile("debug_source.ply");
#endif

    reg.setMaximumIterations (m_max_iterations);
    reg.setTransformationEpsilon (1e-10);
    reg.setMaxCorrespondenceDistance (m_distance_threshold);
    reg.setRANSACOutlierRejectionThreshold(m_ransac_outlier_threshold);
#ifdef HAVE_PCL_GREATER_THAN_1_6_0
    reg.setInputSource (source_cloud);
#else
    reg.setInputCloud (source_cloud);
#endif
    reg.setInputTarget (target_cloud);
    reg.align (aligned_cloud);

    if (!reg.hasConverged())
    {
      ntk_dbg(1) << "ICP did not converge, ignoring.";
      return false;
    }

    if (0)
    {
        ntk::Mesh mesh1, mesh2;
        pointCloudToMesh(mesh1, aligned_cloud);
        pointCloudToMesh(mesh2, *target_cloud);
        mesh1.saveToPlyFile("debug_icp_1.ply");
        mesh2.saveToPlyFile("debug_icp_2.ply");
    }

#if 0
    ntk::Mesh debug_mesh;
    pointCloudToMesh(debug_mesh, aligned_cloud);
    debug_mesh.saveToPlyFile("debug_aligned.ply");
#endif

    ntk_dbg_print(reg.getFitnessScore(), 1);

    Eigen::Matrix4f t = reg.getFinalTransformation ();
    cv::Mat1f T(4,4);
    //toOpencv(t,T);
    for (int r = 0; r < 4; ++r)
        for (int c = 0; c < 4; ++c)
            T(r,c) = t(r,c);

    relative_pose.setCameraTransform(T);
#endif
    return true;
}
Пример #16
0
void calibrate_kinect_depth(std::vector< DepthCalibrationPoint >& depth_values)
{
    std::vector< std::vector<Point3f> > pattern_points;
    calibrationPattern(pattern_points,
                       global::opt_pattern_width(),  global::opt_pattern_height(), global::opt_square_size(),
                       global::images_list.size());

    for (int i_image = 0; i_image < global::images_list.size(); ++i_image)
    {
        // Generate depth calibration points
        QString filename = global::images_list[i_image];
        QDir cur_image_dir (global::images_dir.absoluteFilePath(filename));
        std::string full_filename = cur_image_dir.absoluteFilePath("raw/color.png").toStdString();
        RGBDImage image;
        OpenniRGBDProcessor processor;
        processor.setFilterFlag(RGBDProcessorFlags::ComputeMapping, true);
        image.loadFromDir(cur_image_dir.absolutePath().toStdString(), &global::calibration, &processor);

        imshow_normalized("mapped depth", image.mappedDepth());
        imshow("color", image.rgb());

        std::vector<Point2f> current_view_corners;
        calibrationCorners(full_filename, "corners",
                           global::opt_pattern_width(), global::opt_pattern_height(),
                           current_view_corners, image.rgb(), 1, global::pattern_type);

        if (current_view_corners.size() != (global::opt_pattern_width()*global::opt_pattern_height()))
        {
            ntk_dbg(1) << "Corners not detected in " << cur_image_dir.absolutePath().toStdString();
            continue;
        }

        // FIXME: why rvecs and tvecs from calibrateCamera seems to be nonsense ?
        // calling findExtrinsics another time to get good chessboard estimations.

        cv::Mat1f H;
        estimate_checkerboard_pose(pattern_points[0],
                                   current_view_corners,
                                   global::calibration.rgb_intrinsics,
                                   H);
        Pose3D pose;
        pose.setCameraParametersFromOpencv(global::calibration.rgb_intrinsics);
        pose.setCameraTransform(H);
        ntk_dbg_print(pose, 1);

        cv::Mat3b debug_img;
        image.rgb().copyTo(debug_img);
        foreach_idx(pattern_i, pattern_points[0])
        {
            Point3f p = pose.projectToImage(pattern_points[0][pattern_i]);
            ntk_dbg_print(p, 1);
            float kinect_raw = image.mappedDepth()(p.y, p.x);
            ntk_dbg_print(kinect_raw, 1);
            if (kinect_raw < 1e-5) continue;
            float err = kinect_raw-p.z;
            cv::putText(debug_img, format("%d", (int)(err*1000)), Point(p.x, p.y), CV_FONT_NORMAL, 0.4, Scalar(255,0,0));
            ntk_dbg_print(pattern_points[0][pattern_i], 1);
            ntk_dbg_print(p, 1);
            ntk_dbg_print(kinect_raw, 1);
            depth_values.push_back(DepthCalibrationPoint(kinect_raw, p.z));
        }
        imshow("errors", debug_img);
        cv::waitKey(0);
    }
Пример #17
0
VectorialProjector::VectorialProjector(const Pose3D &pose)
{
    sse_proj = toSSE(pose.cvProjectionMatrix());
    sse_unproj = toSSE(pose.cvInvProjectionMatrix());
}
Пример #18
0
  void MeshRenderer :: computeProjectionMatrix(cv::Mat4b& image, const Pose3D& pose)
  {
    double near_plane, far_plane;
    estimateOptimalPlanes(pose, &near_plane, &far_plane);
    ntk_dbg_print(near_plane, 2);
    ntk_dbg_print(far_plane, 2);
    m_last_near_plane = near_plane;
    m_last_far_plane = far_plane;

    m_pbuffer->makeCurrent();
    glMatrixMode (GL_MODELVIEW);
    glLoadIdentity ();
    cv::Vec3f euler_angles = pose.cvEulerRotation();
    glTranslatef(pose.cvTranslation()[0], pose.cvTranslation()[1], pose.cvTranslation()[2]);
    glRotatef(euler_angles[2]*180.0/M_PI, 0, 0, 1);
    glRotatef(euler_angles[1]*180.0/M_PI, 0, 1, 0);
    glRotatef(euler_angles[0]*180.0/M_PI, 1, 0, 0);

    glMatrixMode (GL_PROJECTION);
    glLoadIdentity ();
    double dx = pose.imageCenterX() - (image.cols / 2.0);
    double dy = pose.imageCenterY() - (image.rows / 2.0);
    glViewport(dx, -dy, image.cols, image.rows);
    //glViewport(0, 0, image.cols, image.rows);
    if (pose.isOrthographic())
    {
      gluOrtho2D(-pose.focalX()/2, pose.focalX()/2, -pose.focalY()/2, pose.focalY()/2);
    }
    else
    {
      double fov = (180.0/M_PI) * 2.0*atan(image.rows/(2.0*pose.focalY()));
      // double fov2 = (180.0/M_PI) * 2.0*atan(image.cols/(2.0*pose.focalX()));
      // ntk_dbg_print(fov2, 2);
      // gluPerspective(fov2,  double(image.rows)/image.cols, near_plane, far_plane);
      gluPerspective(fov, double(image.cols)/image.rows, near_plane, far_plane);
    }

    glMatrixMode (GL_MODELVIEW);
  }