Beispiel #1
0
void Mesh::applyTransform(const Pose3D& pose)
{
    foreach_idx(i, vertices)
            vertices[i] = pose.cameraTransform(vertices[i]);

    ntk::Pose3D normal_pose;
    normal_pose.applyTransformBefore(cv::Vec3f(0.f,0.f,0.f), pose.cvEulerRotation());

    foreach_idx(i, normals)
            normals[i] = normal_pose.cameraTransform(normals[i]);
}
Beispiel #2
0
void MeshRenderer :: setPose(const Pose3D& pose, float* arg_near_plane, float* arg_far_plane)
{
    VertexBufferObject& vbo = m_vertex_buffer_object;
    pose.cvCameraTransform().copyTo(vbo.model_view_matrix);
    // Transpose the matrix for OpenGL column-major.
    vbo.model_view_matrix = vbo.model_view_matrix.t();

    if (!(arg_near_plane && arg_far_plane))
    {
        estimateOptimalPlanes(pose, &m_last_near_plane, &m_last_far_plane);
    }
    else
    {
        m_last_near_plane = *arg_near_plane;
        m_last_far_plane = *arg_far_plane;
    }

    m_pbuffer->makeCurrent();
    glMatrixMode (GL_MODELVIEW);
    glLoadIdentity ();
    cv::Vec3f euler_angles = pose.cvEulerRotation();
    glTranslatef(pose.cvTranslation()[0], pose.cvTranslation()[1], pose.cvTranslation()[2]);
    glRotatef(euler_angles[2]*180.0/M_PI, 0, 0, 1);
    glRotatef(euler_angles[1]*180.0/M_PI, 0, 1, 0);
    glRotatef(euler_angles[0]*180.0/M_PI, 1, 0, 0);

    glMatrixMode (GL_PROJECTION);
    glLoadIdentity ();
    double dx = pose.imageCenterX() - (m_pbuffer->width() / 2.0);
    double dy = pose.imageCenterY() - (m_pbuffer->height() / 2.0);
    glViewport(dx, -dy, m_pbuffer->width(), m_pbuffer->height());
    if (pose.isOrthographic())
    {
        ntk_dbg_print(pose.focalX()/2, 0);
        ntk_dbg_print(pose.focalY()/2, 0);
        glOrtho(-pose.focalX()/2, pose.focalX()/2, -pose.focalY()/2, pose.focalY()/2, m_last_near_plane, m_last_far_plane);
    }
    else
    {
        double fov = (180.0/M_PI) * 2.0*atan(m_pbuffer->height()/(2.0*pose.focalY()));
        // double fov2 = (180.0/M_PI) * 2.0*atan(image.cols/(2.0*pose.focalX()));
        // ntk_dbg_print(fov2, 2);
        // gluPerspective(fov2,  double(image.rows)/image.cols, near_plane, far_plane);
        gluPerspective(fov, double(m_pbuffer->width())/m_pbuffer->height(), m_last_near_plane, m_last_far_plane);
    }

    glMatrixMode (GL_MODELVIEW);
}
Beispiel #3
0
void CalibrationMeshViewer::onCameraPositionUpdate(const cv::Vec3f &translation, const cv::Vec3f &rotation)
{
    if (!m_calibration_mode)
    {
        MeshViewer::onCameraPositionUpdate(translation, rotation);
        return;
    }

    GLdouble m[16];
    GLdouble deltam[16];

    const float rotation_scale = 0.2;
    const float translation_scale = 0.2;

    // Get the delta transformation is visualization frame.
    makeCurrent();
    glMatrixMode(GL_MODELVIEW);
    glGetDoublev(GL_MODELVIEW_MATRIX, m);
    glLoadIdentity();
    glTranslatef(translation_scale*translation[0],translation_scale*translation[1],translation_scale*translation[2]);
    glTranslatef(m_display_center.x,m_display_center.y,m_display_center.z);
    glRotatef(rotation_scale*rotation[0], 0,1,0);
    glRotatef(rotation_scale*rotation[1], 1,0,0);
    glTranslatef(-m_display_center.x,-m_display_center.y,-m_display_center.z);
    glGetDoublev(GL_MODELVIEW_MATRIX, deltam);
    glLoadMatrixd(m);

    cv::Vec3f t,r;
    window->getCalibration(t, r);
    Pose3D p_old;
    p_old.applyTransformBefore(t, r);

    cv::Mat1d H_old = p_old.cvCameraTransformd();
    cv::Mat1d H(4,4,(double*)deltam); H = H.t(); // delta rotation AFTER model view matrix
    cv::Mat1d M(4,4,(double*)m); M = M.t(); // model view matrix

    cv::Mat1d Hp = (M.inv() * H * M * H_old.inv()).inv(); // delta rotation BEFORE model view matrix

    Pose3D p;
    p.setCameraTransform(Hp);

    window->updateFromCalibration(p.cvTranslation(), p.cvEulerRotation());
    window->updateToCalibration();
}
Beispiel #4
0
  void MeshRenderer :: computeProjectionMatrix(cv::Mat4b& image, const Pose3D& pose)
  {
    double near_plane, far_plane;
    estimateOptimalPlanes(pose, &near_plane, &far_plane);
    ntk_dbg_print(near_plane, 2);
    ntk_dbg_print(far_plane, 2);
    m_last_near_plane = near_plane;
    m_last_far_plane = far_plane;

    m_pbuffer->makeCurrent();
    glMatrixMode (GL_MODELVIEW);
    glLoadIdentity ();
    cv::Vec3f euler_angles = pose.cvEulerRotation();
    glTranslatef(pose.cvTranslation()[0], pose.cvTranslation()[1], pose.cvTranslation()[2]);
    glRotatef(euler_angles[2]*180.0/M_PI, 0, 0, 1);
    glRotatef(euler_angles[1]*180.0/M_PI, 0, 1, 0);
    glRotatef(euler_angles[0]*180.0/M_PI, 1, 0, 0);

    glMatrixMode (GL_PROJECTION);
    glLoadIdentity ();
    double dx = pose.imageCenterX() - (image.cols / 2.0);
    double dy = pose.imageCenterY() - (image.rows / 2.0);
    glViewport(dx, -dy, image.cols, image.rows);
    //glViewport(0, 0, image.cols, image.rows);
    if (pose.isOrthographic())
    {
      gluOrtho2D(-pose.focalX()/2, pose.focalX()/2, -pose.focalY()/2, pose.focalY()/2);
    }
    else
    {
      double fov = (180.0/M_PI) * 2.0*atan(image.rows/(2.0*pose.focalY()));
      // double fov2 = (180.0/M_PI) * 2.0*atan(image.cols/(2.0*pose.focalX()));
      // ntk_dbg_print(fov2, 2);
      // gluPerspective(fov2,  double(image.rows)/image.cols, near_plane, far_plane);
      gluPerspective(fov, double(image.cols)/image.rows, near_plane, far_plane);
    }

    glMatrixMode (GL_MODELVIEW);
  }
bool SurfelsRGBDModeler :: addNewView(const RGBDImage& image_, Pose3D& depth_pose)
{
    ntk::TimeCount tc("SurfelsRGBDModeler::addNewView", 1);
    const float max_camera_normal_angle = ntk::deg_to_rad(90);

    RGBDImage image;
    image_.copyTo(image);
    if (!image_.normal().data)
    {
        OpenniRGBDProcessor processor;
        processor.computeNormalsPCL(image);
    }

    Pose3D rgb_pose = depth_pose;
    rgb_pose.toRightCamera(image.calibration()->rgb_intrinsics, image.calibration()->R, image.calibration()->T);

    Pose3D world_to_camera_normal_pose;
    world_to_camera_normal_pose.applyTransformBefore(cv::Vec3f(0,0,0), depth_pose.cvEulerRotation());
    Pose3D camera_to_world_normal_pose = world_to_camera_normal_pose;
    camera_to_world_normal_pose.invert();

    const Mat1f& depth_im = image.depth();
    Mat1b covered_pixels (depth_im.size());
    covered_pixels = 0;

    std::list<Surfel> surfels_to_reinsert;

    // Surfel updating.
    for (SurfelMap::iterator next_it = m_surfels.begin(); next_it != m_surfels.end(); )
    {
        SurfelMap::iterator surfel_it = next_it;
        ++next_it;

        Surfel& surfel = surfel_it->second;
        if (!surfel.enabled())
            continue;

        Point3f surfel_2d = depth_pose.projectToImage(surfel.location);
        bool surfel_deleted = false;
        int r = ntk::math::rnd(surfel_2d.y);
        int c = ntk::math::rnd(surfel_2d.x);
        int d = ntk::math::rnd(surfel_2d.z);
        if (!is_yx_in_range(depth_im, r, c)
                || !image.depthMask()(r, c)
                || !image.isValidNormal(r,c))
            continue;

        const float update_max_dist = getCompatibilityDistance(depth_im(r,c));

        Vec3f camera_normal = image.normal()(r, c);
        normalize(camera_normal);

        Vec3f world_normal = camera_to_world_normal_pose.cameraTransform(camera_normal);
        normalize(world_normal);

        Vec3f eyev = camera_eye_vector(depth_pose, r, c);
        double camera_angle = acos(camera_normal.dot(-eyev));

        if (camera_angle > max_camera_normal_angle)
            continue;

        float normal_angle = acos(world_normal.dot(surfel.normal));
        // Surfels have different normals, maybe two different faces of the same object.
        if (normal_angle > (m_update_max_normal_angle*M_PI/180.0))
        {
            // Removal check. If a surfel has a different normal and is closer to the camera
            // than the new scan, remove it.
            if ((-surfel_2d.z) < depth_im(r,c) && surfel.n_views < 3)
            {
                m_surfels.erase(surfel_it);
                surfel_deleted = true;
            }
            continue;
        }

        // If existing surfel is far from new depth value:
        // - If existing one had a worst point of view, and was seen only once, remove it.
        // - Otherwise do not include the new one.
        if (std::abs(surfel_2d.z - depth_im(r,c)) > update_max_dist)
        {
            if (surfel.min_camera_angle > camera_angle && surfel.n_views < 3)
            {
                m_surfels.erase(surfel_it);
                surfel_deleted = true;
            }
            else
                covered_pixels(r,c) = 1;
            continue;
        }

        // Compatible surfel found.
        const float depth = depth_im(r,c) + m_global_depth_offset;

        Point3f p3d = depth_pose.unprojectFromImage(Point2f(c,r), depth);
        cv::Vec3b rgb_color = bgr_to_rgb(image.mappedRgb()(r, c));

        Surfel image_surfel;
        image_surfel.location = p3d;
        image_surfel.normal = world_normal;
        image_surfel.color = rgb_color;
        image_surfel.min_camera_angle = camera_angle;
        image_surfel.n_views = 1;
        image_surfel.radius = computeSurfelRadius(depth, camera_normal[2], depth_pose.meanFocal());
        mergeToLeftSurfel(surfel, image_surfel);

        covered_pixels(r,c) = 1;
        // needs to change the cell?
        Cell new_cell = worldToCell(surfel.location);
        if (new_cell != surfel_it->first)
        {
            surfels_to_reinsert.push_back(surfel);
            m_surfels.erase(surfel_it);
        }
    }

    foreach_const_it(it, surfels_to_reinsert, std::list<Surfel>)
    {
        Cell new_cell = worldToCell(it->location);
        m_surfels.insert(std::make_pair(new_cell, *it));
    }
Beispiel #6
0
void MeshViewer :: addMeshToDisplayList(const ntk::Mesh& mesh, const Pose3D& pose, MeshViewerMode mode)
{
    int new_list_index = glGenLists(1);
    glNewList(new_list_index, GL_COMPILE);
    if (mesh.texture.data)
    {
        // Last texture id was just created
        GLuint texture = m_upcoming_textures[m_upcoming_textures.size()-1];
        glEnable(GL_TEXTURE_2D);
        glBindTexture( GL_TEXTURE_2D, texture );
    }
    else
    {
        glDisable(GL_TEXTURE_2D);
    }
    glMatrixMode(GL_MODELVIEW);
    glPushMatrix();
    glTranslatef(pose.cvTranslation()[0], pose.cvTranslation()[1], pose.cvTranslation()[2]);
    Vec3f euler_angles = pose.cvEulerRotation();
    glRotatef(rad_to_deg(euler_angles[0]), 1, 0, 0);
    glRotatef(rad_to_deg(euler_angles[1]), 0, 1, 0);
    glRotatef(rad_to_deg(euler_angles[2]), 0, 0, 1);

    if (mode & WIREFRAME)
    {
        glPolygonMode( GL_FRONT_AND_BACK, GL_LINE );
    }
    else
    {
        glPolygonMode( GL_FRONT_AND_BACK, GL_FILL );
    }

    int64 point_start_time = ntk::Time::getMillisecondCounter();
    if (mesh.faces.size() == 0)
    {
        glBegin(GL_POINTS);
        for (int i = 0; i < mesh.vertices.size(); ++i)
        {
            const Point3f& v = mesh.vertices[i];
            // if (i % 1000 == 0)
            // ntk_dbg_print(v, 1);
            if (mesh.hasColors())
                glColor3f(mesh.colors[i][0]/255.0, mesh.colors[i][1]/255.0, mesh.colors[i][2]/255.0);
            glVertex3f(v.x, v.y, v.z);
        }
        glEnd();
    }
    int64 point_end_time = ntk::Time::getMillisecondCounter();
    ntk_dbg_print(point_end_time-point_start_time, 1);

    {
        glBegin(GL_TRIANGLES);
        for (int i = 0; i < mesh.faces.size(); ++i)
        {
            int i1 = mesh.faces[i].indices[0];
            int i2 = mesh.faces[i].indices[1];
            int i3 = mesh.faces[i].indices[2];

            const Point3f& v1 = mesh.vertices[i1];
            const Point3f& v2 = mesh.vertices[i2];
            const Point3f& v3 = mesh.vertices[i3];

            Vec3f nm = (Vec3f(v2-v1).cross(v3-v2));
            normalize(nm);

            if (!mesh.hasColors())
                glColor3f(1.0f, 0.0f, 0.0f);

            if (mesh.hasColors())
                glColor3f(mesh.colors[i1][0]/255.0, mesh.colors[i1][1]/255.0, mesh.colors[i1][2]/255.0);
            if (mesh.hasTexcoords())
                glTexCoord2f(mesh.texcoords[i1].x, mesh.texcoords[i1].y);
            glVertex3f(v1.x, v1.y, v1.z);
            glNormal3f(nm[0], nm[1], nm[2]);

            if (mesh.hasColors())
                glColor3f(mesh.colors[i2][0]/255.0, mesh.colors[i2][1]/255.0, mesh.colors[i2][2]/255.0);
            if (mesh.hasTexcoords())
                glTexCoord2f(mesh.texcoords[i2].x, mesh.texcoords[i2].y);
            glVertex3f(v2.x, v2.y, v2.z);
            glNormal3f(nm[0], nm[1], nm[2]);

            if (mesh.hasColors())
                glColor3f(mesh.colors[i3][0]/255.0, mesh.colors[i3][1]/255.0, mesh.colors[i3][2]/255.0);
            if (mesh.hasTexcoords())
                glTexCoord2f(mesh.texcoords[i3].x, mesh.texcoords[i3].y);
            glVertex3f(v3.x, v3.y, v3.z);
            glNormal3f(nm[0], nm[1], nm[2]);
        }
        glEnd();
    }
    glMatrixMode(GL_MODELVIEW);
    glPopMatrix();
    glEndList();

    m_upcoming_display_lists.push_back(new_list_index);
}