Exemplo n.º 1
0
void PoseRT::computeMeanPose(const std::vector<PoseRT> &poses, PoseRT &meanPose)
{
  meanPose = PoseRT();
  if (poses.empty())
  {
    return;
  }

  Mat meanTvec = meanPose.tvec;
  Mat meanRotationMatrix = meanPose.getRotationMatrix();
  for (size_t i = 0; i < poses.size(); ++i)
  {
    meanTvec += poses[i].tvec;
    meanRotationMatrix += poses[i].getRotationMatrix();
  }
  meanTvec /= poses.size();
  meanRotationMatrix /= poses.size();

  SVD svd;
  Mat w, u, vt;
  svd.compute(meanRotationMatrix, w, u, vt, SVD::FULL_UV);
  Mat meanOrthogonalRotationMatrix = u * vt;

  meanPose.tvec = meanTvec;
  meanPose.setRotation(meanOrthogonalRotationMatrix);
}
Exemplo n.º 2
0
PoseRT PoseRT::operator*(const PoseRT &pose) const
{
  PoseRT result;
  composeRT(pose.getRvec(), pose.getTvec(), rvec, tvec, result.rvec, result.tvec);
  //composeRT(rvec, tvec, pose.getRvec(), pose.getTvec(), result.rvec, result.tvec);
  return result;
}
void TODBaseImporter::importOcclusionObjects(const std::string &modelsPath,
                                             std::vector<EdgeModel> &occlusionObjects, std::vector<PoseRT> &occlusionOffsets) const
{
  //TODO: move up
  const string occlusionPrefix = "occlusion_";
  const string occlusionPostFix = ".xml";
  //TOOD: port to other systems too
  DIR *directory = opendir(testFolder.c_str());
  CV_Assert(directory != 0);

  occlusionObjects.clear();
  for (dirent *entry = readdir(directory); entry != 0; entry = readdir(directory))
  {
    string filename = entry->d_name;
    if (filename.substr(0, occlusionPrefix.length()) != occlusionPrefix)
    {
      continue;
    }

    int objectNameLength = static_cast<int>(filename.length()) - static_cast<int>(occlusionPostFix.length()) - static_cast<int>(occlusionPrefix.length());
    string objectName = filename.substr(occlusionPrefix.length(), objectNameLength);

    EdgeModel edgeModel;
    importEdgeModel(modelsPath, objectName, edgeModel);
    occlusionObjects.push_back(edgeModel);

    PoseRT offset;
    offset.read(testFolder + "/" + filename);
    occlusionOffsets.push_back(offset);
  }
}
void publishPoints(const std::vector<cv::Point3f>& points, const boost::shared_ptr<pcl::visualization::PCLVisualizer> &viewer, cv::Scalar color, const std::string &title, const PoseRT &pose)
{
  vector<Point3f> rotatedPoints;
  project3dPoints(points, pose.getRvec(), pose.getTvec(), rotatedPoints);
  pcl::PointCloud<pcl::PointXYZ> pclPoints;
  cv2pcl(rotatedPoints, pclPoints);

  pcl::visualization::PointCloudColorHandlerCustom<pcl::PointXYZ> pointsColor(pclPoints.makeShared(), color[2], color[1], color[0]);
  viewer->addPointCloud<pcl::PointXYZ>(pclPoints.makeShared(), pointsColor, title);
}
Exemplo n.º 5
0
void poseinfo(string path) {
    FileStorage in(path, FileStorage::READ);
    PoseRT pose;
    pose.read(in[PoseRT::YAML_NODE_NAME]);
    in.release();
    cout << boost::format("%10.7f %10.7f %10.7f %10.7f %10.7f %10.7f")
         % pose.tvec.at<double>(0, 0)
         % pose.tvec.at<double>(1, 0)
         % pose.tvec.at<double>(2, 0)
         % pose.rvec.at<double>(0, 0)
         % pose.rvec.at<double>(1, 0)
         % pose.rvec.at<double>(2, 0) << endl;
}
Exemplo n.º 6
0
void PoseRT::computeDistance(const PoseRT &pose1, const PoseRT &pose2, double &rotationDistance, double &translationDistance, const cv::Mat &Rt_obj2cam)
{
  Mat Rt_diff_cam = pose1.getProjectiveMatrix() * pose2.getProjectiveMatrix().inv(DECOMP_SVD);
  Mat Rt_diff_obj = Rt_diff_cam;
  if (!Rt_obj2cam.empty())
  {
    Rt_diff_obj = Rt_obj2cam.inv(DECOMP_SVD) * Rt_diff_cam * Rt_obj2cam;
  }

  Mat rvec_diff_obj, tvec_diff_obj;
  getRvecTvec(Rt_diff_obj, rvec_diff_obj, tvec_diff_obj);
  rotationDistance = norm(rvec_diff_obj);
  translationDistance = norm(tvec_diff_obj);
}
void evaluatePoseWithRotation(const EdgeModel &originalEdgeModel, const PoseRT &est_cam, const PoseRT &ground_cam, PoseError &poseError)
{
  EdgeModel groundModel, estimatedModel;
  originalEdgeModel.rotate_cam(ground_cam, groundModel);
  originalEdgeModel.rotate_cam(est_cam, estimatedModel);

  const double eps = 1e-4;
  CV_Assert(groundModel.hasRotationSymmetry);
  CV_Assert(estimatedModel.hasRotationSymmetry);

  Point3d tvecPoint = groundModel.getObjectCenter() - estimatedModel.getObjectCenter();
  double tvecDiff = norm(tvecPoint);

  double cosAngle = groundModel.upStraightDirection.ddot(estimatedModel.upStraightDirection);
  cosAngle = std::min(cosAngle, 1.0);
  cosAngle = std::max(cosAngle, -1.0);
  double hartleyDiff = acos(cosAngle);

  PoseRT diff_cam = est_cam * ground_cam.inv();
  Mat Rt_diff_obj = groundModel.Rt_obj2cam.inv(DECOMP_SVD) * diff_cam.getProjectiveMatrix() * groundModel.Rt_obj2cam;
  Mat rvec_diff_obj, tvec_diff_obj;
  getRvecTvec(Rt_diff_obj, rvec_diff_obj, tvec_diff_obj);

/*
    point2col(tvecPoint, *tvec);
    Point3d rvecPoint = estimatedModel.rotationAxis.cross(groundModel.rotationAxis);
    rvecPoint *= hartleyDiff / norm(rvecPoint);
    point2col(rvecPoint, *rvec);
*/
  poseError.init(PoseRT(rvec_diff_obj, tvec_diff_obj), hartleyDiff, tvecDiff);

/*

  Point3d zRvecPoint_obj = estimatedModel.rotationAxis.cross(groundModel.rotationAxis);

  CV_Assert(norm(zRvecPoint_obj) > eps);
  zRvecPoint_obj *= hartleyDiff / norm(zRvecPoint_obj);
  Mat zRvec_obj;
  point2col(zRvecPoint_obj, zRvec_obj);
  const int dim = 3;
  Point3d tvecPoint_obj = groundModel.getObjectCenter() - estimatedModel.getObjectCenter();

  zRvec_obj = Mat::zeros(dim, 1, CV_64FC1);

  Mat zTvec_obj;
  point2col(tvecPoint_obj, zTvec_obj);

  //zTvec_obj = Mat::zeros(dim, 1, CV_64FC1);

  PoseRT zPose_obj = PoseRT(zRvec_obj, zTvec_obj);
  Mat withoutZRotation_Rt = estimatedModel.Rt_obj2cam * zPose_obj.getProjectiveMatrix() * estimatedModel.Rt_obj2cam.inv(DECOMP_SVD) * est_cam.getProjectiveMatrix();
  PoseRT withoutZRotationPose = PoseRT(withoutZRotation_Rt);


  double xyRotationDiff, xyTranslationDiff;
  PoseRT::computeDistance(ground_cam, withoutZRotationPose, xyRotationDiff, xyTranslationDiff, groundModel.Rt_obj2cam);
  //PoseRT::computeDistance(ground_cam, withoutZRotationPose, xyRotationDiff, xyTranslationDiff);
  cout << "xy: " << xyTranslationDiff << " " << xyRotationDiff * 180.0 / CV_PI<< endl;
*/
}
Exemplo n.º 8
0
void PoseRT::computeObjectDistance(const PoseRT &pose1, const PoseRT &pose2, double &rotationDistance, double &translationDistance)
{
  PoseRT diff_cam = pose1.inv() * pose2;

  rotationDistance = norm(diff_cam.getRvec());
  translationDistance = norm(diff_cam.getTvec());
}
void evaluatePose(const EdgeModel &testEdgeModel, const cv::Mat &rvec_est_cam, const cv::Mat &tvec_est_cam, const PoseRT &ground_cam, string prefix = "")
{
  double distance = 0;
  std::cout << prefix + "Hausdorff distance: " << distance << endl;

  Mat Rt_est_cam2test;
  createProjectiveMatrix(rvec_est_cam, tvec_est_cam, Rt_est_cam2test);

  Mat Rt_diff_cam = ground_cam.getProjectiveMatrix() * Rt_est_cam2test.inv(DECOMP_SVD);
  Mat Rt_obj2cam = testEdgeModel.Rt_obj2cam;
  Mat Rt_diff_obj = Rt_obj2cam.inv(DECOMP_SVD) * Rt_diff_cam * Rt_obj2cam;


  Mat rvec_diff_obj, tvec_diff_obj;
  getRvecTvec(Rt_diff_obj, rvec_diff_obj, tvec_diff_obj);
  double hartleyDiff = norm(rvec_diff_obj);


  const double radians2degrees = 180.0 / CV_PI;
  cout << "Hartley diff (deg): " << radians2degrees * hartleyDiff << endl;
  cout << "Angle diff (deg): " << 0 << endl;
  cout << "Normal diff (deg): " << 0 << endl;
  cout << "Tvec diff: " << norm(tvec_diff_obj) << endl;
}
void PinholeCamera::projectPoints(const std::vector<cv::Point3f> &points, const PoseRT &pose_cam, std::vector<cv::Point2f> &projectedPoints) const
{
  PoseRT fullPose = extrinsics * pose_cam;
  cv::projectPoints(Mat(points), fullPose.getRvec(), fullPose.getTvec(), cameraMatrix, distCoeffs, projectedPoints);
}
void TODBaseImporter::importOffset(PoseRT &offset) const
{
  //TODO: move up
  const string offsetFilename = "offset.xml";
  offset.read(testFolder + "/" + offsetFilename);
}