bool SurfelsRGBDModeler :: addNewView(const RGBDImage& image_, Pose3D& depth_pose) { ntk::TimeCount tc("SurfelsRGBDModeler::addNewView", 1); const float max_camera_normal_angle = ntk::deg_to_rad(90); RGBDImage image; image_.copyTo(image); if (!image_.normal().data) { OpenniRGBDProcessor processor; processor.computeNormalsPCL(image); } Pose3D rgb_pose = depth_pose; rgb_pose.toRightCamera(image.calibration()->rgb_intrinsics, image.calibration()->R, image.calibration()->T); Pose3D world_to_camera_normal_pose; world_to_camera_normal_pose.applyTransformBefore(cv::Vec3f(0,0,0), depth_pose.cvEulerRotation()); Pose3D camera_to_world_normal_pose = world_to_camera_normal_pose; camera_to_world_normal_pose.invert(); const Mat1f& depth_im = image.depth(); Mat1b covered_pixels (depth_im.size()); covered_pixels = 0; std::list<Surfel> surfels_to_reinsert; // Surfel updating. for (SurfelMap::iterator next_it = m_surfels.begin(); next_it != m_surfels.end(); ) { SurfelMap::iterator surfel_it = next_it; ++next_it; Surfel& surfel = surfel_it->second; if (!surfel.enabled()) continue; Point3f surfel_2d = depth_pose.projectToImage(surfel.location); bool surfel_deleted = false; int r = ntk::math::rnd(surfel_2d.y); int c = ntk::math::rnd(surfel_2d.x); int d = ntk::math::rnd(surfel_2d.z); if (!is_yx_in_range(depth_im, r, c) || !image.depthMask()(r, c) || !image.isValidNormal(r,c)) continue; const float update_max_dist = getCompatibilityDistance(depth_im(r,c)); Vec3f camera_normal = image.normal()(r, c); normalize(camera_normal); Vec3f world_normal = camera_to_world_normal_pose.cameraTransform(camera_normal); normalize(world_normal); Vec3f eyev = camera_eye_vector(depth_pose, r, c); double camera_angle = acos(camera_normal.dot(-eyev)); if (camera_angle > max_camera_normal_angle) continue; float normal_angle = acos(world_normal.dot(surfel.normal)); // Surfels have different normals, maybe two different faces of the same object. if (normal_angle > (m_update_max_normal_angle*M_PI/180.0)) { // Removal check. If a surfel has a different normal and is closer to the camera // than the new scan, remove it. if ((-surfel_2d.z) < depth_im(r,c) && surfel.n_views < 3) { m_surfels.erase(surfel_it); surfel_deleted = true; } continue; } // If existing surfel is far from new depth value: // - If existing one had a worst point of view, and was seen only once, remove it. // - Otherwise do not include the new one. if (std::abs(surfel_2d.z - depth_im(r,c)) > update_max_dist) { if (surfel.min_camera_angle > camera_angle && surfel.n_views < 3) { m_surfels.erase(surfel_it); surfel_deleted = true; } else covered_pixels(r,c) = 1; continue; } // Compatible surfel found. const float depth = depth_im(r,c) + m_global_depth_offset; Point3f p3d = depth_pose.unprojectFromImage(Point2f(c,r), depth); cv::Vec3b rgb_color = bgr_to_rgb(image.mappedRgb()(r, c)); Surfel image_surfel; image_surfel.location = p3d; image_surfel.normal = world_normal; image_surfel.color = rgb_color; image_surfel.min_camera_angle = camera_angle; image_surfel.n_views = 1; image_surfel.radius = computeSurfelRadius(depth, camera_normal[2], depth_pose.meanFocal()); mergeToLeftSurfel(surfel, image_surfel); covered_pixels(r,c) = 1; // needs to change the cell? Cell new_cell = worldToCell(surfel.location); if (new_cell != surfel_it->first) { surfels_to_reinsert.push_back(surfel); m_surfels.erase(surfel_it); } } foreach_const_it(it, surfels_to_reinsert, std::list<Surfel>) { Cell new_cell = worldToCell(it->location); m_surfels.insert(std::make_pair(new_cell, *it)); }
void MeshGenerator :: generateSurfelsMesh(const RGBDImage& image, const Pose3D& depth_pose, const Pose3D& rgb_pose) { double min_val = 0, max_val = 0; if (image.amplitude().data) minMaxLoc(image.amplitude(), &min_val, &max_val); m_mesh.clear(); const cv::Mat1f& depth_im = image.depth(); const cv::Mat1b& mask_im = image.depthMask(); for_all_rc(depth_im) { int i_r = r; int i_c = c; if (!is_yx_in_range(depth_im, i_r, i_c)) continue; if (!mask_im(r,c)) continue; double depth = depth_im(i_r,i_c); cv::Point3f p = depth_pose.unprojectFromImage(Point2f(c,r), depth); Point3f normal = image.normal().data ? image.normal()(i_r, i_c) : Vec3f(0,0,1); Vec3b color (0,0,0); if (m_use_color) { cv::Point3f prgb = rgb_pose.projectToImage(p); int i_y = ntk::math::rnd(prgb.y); int i_x = ntk::math::rnd(prgb.x); if (is_yx_in_range(image.rgb(), i_y, i_x)) { Vec3b bgr = image.rgb()(i_y, i_x); color = Vec3b(bgr[2], bgr[1], bgr[0]); } } else { int g = 0; if (image.amplitude().data) g = 255.0 * (image.amplitude()(i_r,i_c) - min_val) / (max_val-min_val); else g = 255 * depth / 10.0; color = Vec3b(g,g,g); } Surfel s; s.color = color; s.confidence = 0; s.location = p; s.normal = normal; s.n_views = 1; double normal_z = std::max(normal.z, 0.5f); s.radius = m_resolution_factor * ntk::math::sqrt1_2 * depth / (depth_pose.focalX() * normal_z); m_mesh.addSurfel(s); } }
void PlaneEstimator :: estimate(RGBDImage& image, Mat1b& plane_points) { // Passing from 3D to the optimizer const cv::Mat3f& normal_image = image.normal(); const cv::Mat1f& distance_image = image.depth(); cv::Mat1b& mask_image = image.depthMaskRef(); cv::Mat1b objfilter; mask_image.copyTo(objfilter); plane_points = image.normal().clone(); plane_points = 0; if (!image.normal().data) { ntk_dbg(0) << "WARNING: you must active the normal filter to get plane estimation!"; return; } double min[dim]; double max[dim]; int i; for (i=0;i<dim;i++) { max[i] = 1000.0; min[i] = -1000.0; } m_solver.Setup(min,max,DifferentialEvolutionSolver::stBest1Exp,0.8,0.75); // Early estimation of plane points projecting the normal values for (int r = 1; r < plane_points.rows-1; ++r) for (int c = 1; c < plane_points.cols-1; ++c) { if (objfilter.data && objfilter(r,c)) { cv::Vec3f normal = normal_image(r,c); double prod = normal.dot(m_ref_plane); if (prod > 0.95) plane_points(r,c) = 255; else plane_points(r,c) = 0; } } // cleaning of the surface very first estimation dilate(plane_points,plane_points,cv::Mat()); erode(plane_points,plane_points,cv::Mat()); //imwrite("plane-initial.png",plane_points); std::vector<Point3f>& g = m_solver.planePointsRef(); g.clear(); for (int r = 1; r < plane_points.rows-1; ++r) for (int c = 1; c < plane_points.cols-1; ++c) { if (plane_points(r,c)) { // possible table member! Point3f p3d = image.calibration()->depth_pose->unprojectFromImage(Point2f(c,r), distance_image(r,c)); g.push_back(p3d); } } // Calculating... m_solver.Solve(max_generations); double *solution = m_solver.Solution(); // Plane normalizer float suma = solution[0] + solution[1] + solution[2] + solution[3] ; for (int i = 0; i < 4; i++) solution[i] = solution[i]/ suma; ntk::Plane plano (solution[0], solution[1], solution[2], solution[3]); //Update RGBD object m_plane.set(solution[0], solution[1], solution[2], solution[3]); // Final estimation of plane points projecting the normal values cv::Vec3f diffevolnormal(solution[0], solution[1], solution[2]); for (int r = 1; r < plane_points.rows-1; ++r) for (int c = 1; c < plane_points.cols-1; ++c) { if (objfilter.data && objfilter(r,c)) { cv::Vec3f normal = normal_image(r,c); double prod = normal.dot(diffevolnormal); if (prod > 0.5) plane_points(r,c) = 255; else plane_points(r,c) = 0; } } // Cleaning of the DE plane-pixels solution dilate(plane_points,plane_points,cv::Mat()); erode(plane_points,plane_points,cv::Mat()); // imwrite("plane-DE.png",plane_points); }