void CalibratorBlock::calibrateWithICP(FrameVectorVectorConstPtr frames_) { FrameVectorConstPtr frames = frames_->frames[0]; if (frames->images.size() < 2) { ntk_dbg(0) << "Only one device, no need for alignment!"; return; } // Compute Normals std::vector<RGBDImage> images (frames->images.size()); OpenniRGBDProcessor processor; foreach_idx(i, images) { if (!frames->images[i]->calibration()) continue; frames->images[i]->copyTo(images[i]); processor.computeNormalsPCL(images[i]); } if (!images[0].calibration()) { ntk_dbg(0) << "The first camera " << frames->images[0]->cameraSerial() << " does not have calibration. It can´t be the reference, returning."; return; } for (size_t i = 1; i < images.size(); ++i) { if (!images[i].calibration()) { ntk_dbg(0) << "Warning: camera " << images[i].cameraSerial() << " does not have calibration."; continue; } RelativePoseEstimatorICPWithNormals<pcl::PointNormal> estimator; estimator.setDistanceThreshold(0.05); estimator.setVoxelSize(0.01); estimator.setTargetImage(images[0]); estimator.setSourceImage(images[i]); bool ok = estimator.estimateNewPose(); if (!ok) continue; Pose3D new_pose = *images[i].calibration()->depth_pose; new_pose.applyTransformBefore(estimator.estimatedSourcePose()); CalibrationParametersPtr params (new CalibrationParameters); params->new_t = new_pose.cvTranslation(); params->new_r = new_pose.cvEulerRotation(); params->camera_serial = images[i].cameraSerial(); params->calibration = images[i].calibration(); broadcastEvent(params); } }
bool SurfelsRGBDModeler :: addNewView(const RGBDImage& image_, Pose3D& depth_pose) { ntk::TimeCount tc("SurfelsRGBDModeler::addNewView", 1); const float max_camera_normal_angle = ntk::deg_to_rad(90); RGBDImage image; image_.copyTo(image); if (!image_.normal().data) { OpenniRGBDProcessor processor; processor.computeNormalsPCL(image); } Pose3D rgb_pose = depth_pose; rgb_pose.toRightCamera(image.calibration()->rgb_intrinsics, image.calibration()->R, image.calibration()->T); Pose3D world_to_camera_normal_pose; world_to_camera_normal_pose.applyTransformBefore(cv::Vec3f(0,0,0), depth_pose.cvEulerRotation()); Pose3D camera_to_world_normal_pose = world_to_camera_normal_pose; camera_to_world_normal_pose.invert(); const Mat1f& depth_im = image.depth(); Mat1b covered_pixels (depth_im.size()); covered_pixels = 0; std::list<Surfel> surfels_to_reinsert; // Surfel updating. for (SurfelMap::iterator next_it = m_surfels.begin(); next_it != m_surfels.end(); ) { SurfelMap::iterator surfel_it = next_it; ++next_it; Surfel& surfel = surfel_it->second; if (!surfel.enabled()) continue; Point3f surfel_2d = depth_pose.projectToImage(surfel.location); bool surfel_deleted = false; int r = ntk::math::rnd(surfel_2d.y); int c = ntk::math::rnd(surfel_2d.x); int d = ntk::math::rnd(surfel_2d.z); if (!is_yx_in_range(depth_im, r, c) || !image.depthMask()(r, c) || !image.isValidNormal(r,c)) continue; const float update_max_dist = getCompatibilityDistance(depth_im(r,c)); Vec3f camera_normal = image.normal()(r, c); normalize(camera_normal); Vec3f world_normal = camera_to_world_normal_pose.cameraTransform(camera_normal); normalize(world_normal); Vec3f eyev = camera_eye_vector(depth_pose, r, c); double camera_angle = acos(camera_normal.dot(-eyev)); if (camera_angle > max_camera_normal_angle) continue; float normal_angle = acos(world_normal.dot(surfel.normal)); // Surfels have different normals, maybe two different faces of the same object. if (normal_angle > (m_update_max_normal_angle*M_PI/180.0)) { // Removal check. If a surfel has a different normal and is closer to the camera // than the new scan, remove it. if ((-surfel_2d.z) < depth_im(r,c) && surfel.n_views < 3) { m_surfels.erase(surfel_it); surfel_deleted = true; } continue; } // If existing surfel is far from new depth value: // - If existing one had a worst point of view, and was seen only once, remove it. // - Otherwise do not include the new one. if (std::abs(surfel_2d.z - depth_im(r,c)) > update_max_dist) { if (surfel.min_camera_angle > camera_angle && surfel.n_views < 3) { m_surfels.erase(surfel_it); surfel_deleted = true; } else covered_pixels(r,c) = 1; continue; } // Compatible surfel found. const float depth = depth_im(r,c) + m_global_depth_offset; Point3f p3d = depth_pose.unprojectFromImage(Point2f(c,r), depth); cv::Vec3b rgb_color = bgr_to_rgb(image.mappedRgb()(r, c)); Surfel image_surfel; image_surfel.location = p3d; image_surfel.normal = world_normal; image_surfel.color = rgb_color; image_surfel.min_camera_angle = camera_angle; image_surfel.n_views = 1; image_surfel.radius = computeSurfelRadius(depth, camera_normal[2], depth_pose.meanFocal()); mergeToLeftSurfel(surfel, image_surfel); covered_pixels(r,c) = 1; // needs to change the cell? Cell new_cell = worldToCell(surfel.location); if (new_cell != surfel_it->first) { surfels_to_reinsert.push_back(surfel); m_surfels.erase(surfel_it); } } foreach_const_it(it, surfels_to_reinsert, std::list<Surfel>) { Cell new_cell = worldToCell(it->location); m_surfels.insert(std::make_pair(new_cell, *it)); }