void LKTracker::UpdatePointsOnModelAfterDerivatives(const Pose &pose){ for (size_t i = 0; i < tracked_points_.size(); ++i){ if (tracked_points_[i].frame_point[1] >= 0 && tracked_points_[i].frame_point[0] >= 0){ cv::Vec3f in_camera_coords = pose.TransformPoint(tracked_points_[i].model_point); cv::Point2f on_image_plane = camera_->ProjectPoint(cv::Point3f(in_camera_coords)); tracked_points_[i].frame_point = on_image_plane; } } }
std::vector<float> LKTracker::GetPointDerivative(const cv::Vec3f &world_previous, const cv::Vec2f &image_previous, const cv::Vec2f &image_new, const Pose &pose){ cv::Vec3f camera_coordinates = pose.TransformPoint(world_previous); if (camera_coordinates[2] == 0.0) camera_coordinates[2] = 0.001; float z_inv_sq = 1.0 / camera_coordinates[2]; std::vector<ci::Vec3f> jacs = pose.ComputeJacobian(ci::Vec3f(camera_coordinates[0], camera_coordinates[1], camera_coordinates[2])); std::vector<float> jacobians; for (int dof = 0; dof < pose.GetNumDofs(); ++dof){ jacobians.push_back(0.0f); } const float x_error = image_new[0] - image_previous[0]; const float y_error = image_new[1] - image_previous[1]; if (std::abs(x_error) < std::numeric_limits<float>::epsilon() && std::abs(y_error) < std::numeric_limits<float>::epsilon()) return jacobians; for (int dof = 0; dof<pose.GetNumDofs(); dof++){ const ci::Vec3f ddi = jacs[dof]; const cv::Vec3f dof_derivatives(ddi[0], ddi[1], ddi[2]); const float dXdL = camera_->Fx() * (z_inv_sq*((camera_coordinates[2] * dof_derivatives[0]) - (camera_coordinates[0] * dof_derivatives[2]))); const float dYdL = camera_->Fy() * (z_inv_sq*((camera_coordinates[2] * dof_derivatives[1]) - (camera_coordinates[1] * dof_derivatives[2]))); const float inv_sqrt = 1.0 / (2.0 * std::sqrt(x_error * x_error + y_error * y_error)); const float dPxdL = -2 * x_error * dXdL; const float dPydL = -2 * y_error * dYdL; jacobians[dof] = (inv_sqrt * (dPxdL + dPydL)); } current_error_ += std::sqrt(x_error * x_error + y_error * y_error); return jacobians; }