예제 #1
0
const double CMiniVisionToolbox::getTransformationDelta( const Eigen::Isometry3d& p_matTransformationFrom, const Eigen::Isometry3d& p_matTransformationTo )
{
    //ds compute pose change
    const Eigen::Isometry3d matTransformChange( p_matTransformationTo*p_matTransformationFrom.inverse( ) );

    //ds check point
    const Eigen::Vector4d vecSamplePoint( 40.2, -1.25, 2.5, 1.0 );
    double dNorm( vecSamplePoint.norm( ) );
    const Eigen::Vector4d vecDifference( vecSamplePoint-matTransformChange*vecSamplePoint );
    dNorm = ( dNorm + vecDifference.norm( ) )/2;

    //ds return norm
    return ( std::fabs( vecDifference(0) ) + std::fabs( vecDifference(1) ) + std::fabs( vecDifference(2) ) )/dNorm;
}
예제 #2
0
 inline sm::kinematics::Transformation transformationFromPropertyTree(const sm::ConstPropertyTree & config)
 {
     Eigen::Vector4d q = sm::eigen::quaternionFromPropertyTree( sm::ConstPropertyTree(config, "q_a_b") );
     q = q / q.norm();
     Eigen::Vector3d t = sm::eigen::vector3FromPropertyTree( sm::ConstPropertyTree(config, "t_a_b_a" ) );
     return sm::kinematics::Transformation(q,t);
 }
예제 #3
0
    EIGEN_MAKE_ALIGNED_OPERATOR_NEW /* Structures containing 
				       "fixed-sized vectorizable" Eigen classes
				       need proper memory alignment */
    Ray(const Eigen::Vector4d &orig, const Eigen::Vector4d &direction, const int fromObj=ID_AIR) :
	origin(orig), dir(direction), fromObj(fromObj) {
	BOOST_ASSERT_MSG(orig[3] == 1, 
			 "Origin of ray must have 4th coord equal to 1");
	BOOST_ASSERT_MSG(direction[3] == 0, 
			 "Direction of ray must have 4th coord equal to 0");
	BOOST_ASSERT_MSG(std::abs(direction.norm() - 1) < EPSILON, "Require unit direction vector!");
    }
예제 #4
0
void randomUnitQuaternion(Eigen::Vector4d &quat) {
    static boost::random::mt19937 rng(time(NULL));
    static boost::random::normal_distribution<> normal;

    do {
        quat(0) = normal(rng);
        quat(1) = normal(rng);
        quat(2) = normal(rng);
        quat(3) = normal(rng);
    } while (quat.norm() < 0.00001);
    quat.normalize();
}
bool WHeadPositionCorrection::checkMovementThreshold( const WLEMDHPI::TransformationT& trans )
{
    const Eigen::Vector4d diffTrans = m_transExc.data().col( 3 ) - trans.data().col( 3 );
    if( diffTrans.norm() > m_movementThreshold )
    {
        m_transExc = trans;
        return true;
    }
    // TODO(pieloth): check rotation, e.g. with 1 or more key points
    // initial: pick 6 key points from dipoles min/max (x,0,0); (0,y,0); (0,0,z)
    // compare max distance
    // keyPoint 3x6
    // (trans * keyPoints).colwise().norm() - (m_transExc * keyPoints).colwise().norm()).maxCoeff(), mind homog. coord.
    return false;
}
TEST(UncertainHomogeneousPointTestSuite, testUav)
{
  try {
    using namespace sm::kinematics;
    Eigen::Vector4d p;
    p.setRandom();
    p = p/p.norm();
    Eigen::Matrix3d U;
    U = sm::eigen::randomCovariance<3>();
    
    UncertainHomogeneousPoint uhp(p,U);
    
    sm::eigen::assertNear(U, uhp.U_av_form(), 1e-10, SM_SOURCE_FILE_POS, "Checking that I can recover the av/form point uncertainty");
  } 
  catch(std::exception const & e)
    {
      FAIL() << e.what();
    }
}
예제 #7
0
/*
 * @brief Normalize the given quaternion to unit quaternion.
 */
inline void quaternionNormalize(Eigen::Vector4d& q) {
  double norm = q.norm();
  q = q / norm;
  return;
}
예제 #8
0
void calculateOverlappingFOV(boost::shared_ptr<CAMERA_1_T> camera1,
                             boost::shared_ptr<CAMERA_1_T> camera2,
                             const sm::kinematics::Transformation& T_cam1_cam2,
                             cameras::ImageMask& outMask1,
                             cameras::ImageMask& outMask2,
                             double& outOverlappingRatio1,
                             double& outOverlappingRatio2,
                             const Eigen::VectorXi& sampleDistances,
                             double scale) {
  // get dimensions
  int width1 = (int) camera1->projection().ru() * scale;
  int height1 = (int) camera1->projection().rv() * scale;
  int width2 = (int) camera2->projection().ru() * scale;
  int height2 = (int) camera2->projection().rv() * scale;

  // clear incoming masks
  cv::Mat outMask1CV = outMask1.getMask();
  cv::Mat outMask2CV = outMask2.getMask();
  outMask1CV = cv::Mat::zeros(height1, width1, CV_8UC1);
  outMask2CV = cv::Mat::zeros(height2, width2, CV_8UC1);
  outMask1.setScale(scale);
  outMask2.setScale(scale);

  Eigen::Vector4d point;

  // build outMask1
  outOverlappingRatio1 = 0;
  for (int x = 0; x < width1; x++) {
    for (int y = 0; y < height1; y++) {
      // if visible
      // TODO: BB: what about rounding mistakes?
      if (camera1->projection().keypointToHomogeneous(
          Eigen::Vector2d((int) x / scale, (int) y / scale), point)) {
        double norm = point.norm();
        int i = 0;
        // check points on the beam until one is found or no other points to check
        while (i < sampleDistances.size()
            && outMask1CV.at<unsigned char>(y, x) == 0) {
          // Send point to distance sampleDistances(i) on the beam
          Eigen::Vector4d tempPoint = point;
          tempPoint(3) = norm / (norm + sampleDistances(i));

          // Project point to other camera frame
          tempPoint = T_cam1_cam2.inverse() * tempPoint;

          Eigen::Vector2d keypointLocation;
          if (camera2->projection().homogeneousToKeypoint(tempPoint,
                                                          keypointLocation)) {
            outMask1CV.at<unsigned char>(y, x) = 255;
            outOverlappingRatio1++;
          }  // if
          i++;
        }  // while
      }  // if
    }  // for
  }  // for
  outOverlappingRatio1 = outOverlappingRatio1 / (width1 * height1);

  // build outMask2
  outOverlappingRatio2 = 0;
  for (int x = 0; x < width2; x++) {
    for (int y = 0; y < height2; y++) {
      // if visible
      // TODO: BB: what about rounding mistakes?
      if (camera2->projection().keypointToHomogeneous(
          Eigen::Vector2d((int) x / scale, (int) y / scale), point)) {
        double norm = point.norm();
        int i = 0;
        // check points on the beam until one is found or no other points to check
        while (i < sampleDistances.size()
            && outMask2CV.at<unsigned char>(y, x) == 0) {
          // Send point to distance sampleDistances(i) on the beam
          Eigen::Vector4d tempPoint = point;
          tempPoint(3) = norm / (norm + sampleDistances(i));

          // Project point to other camera frame
          tempPoint = T_cam1_cam2 * tempPoint;

          Eigen::Vector2d keypointLocation;
          if (camera1->projection().homogeneousToKeypoint(tempPoint,
                                                          keypointLocation)) {
            outMask2CV.at<unsigned char>(y, x) = 255;
            outOverlappingRatio2++;
          }  // if
          i++;
        }  // while
      }  // if
    }  // for
  }  // for
  outOverlappingRatio2 = outOverlappingRatio2 / (width2 * height2);
}