Пример #1
0
// Converts Eigen Quaternion to a Vector3f of Euler Angles
// adapted from sixense_math file of the sixense SDK
Vector3f xen_rift::getEulerAnglesFromQuat(Quaternionf& input){
    //printf("Input: %f %f %f %f\n", input.w(), input.x(), input.y(), input.z());
    Vector3f retval;
    Matrix3f cols=input.toRotationMatrix();
    float h, p, r;
    float A, B;

    B = cols(1, 2);
    p = asinf( B );
    A = cosf( p );


    if( fabs( A ) > 0.005f ) {
        h = atan2f( -cols(0, 2)/A, cols(2, 2)/A ); // atan2( D, C )
        r = atan2f( -cols(1, 0)/A, cols(1, 1)/A ); // atan2( F, E )
    } else {
        h = 0;
        r = atan2f( cols(2, 1), cols(2, 0) ); // atan2( F, E ) when B=0, D=1
    }

    retval[1] = -h;
    retval[0] = -p;
    retval[2] = -r;

    //printf("output: %f %f %f\n", retval[0], retval[1], retval[2]);
    return retval;
}
Пример #2
0
void Camera::updateViewMatrix(void) const
{
    if(!mViewIsUptodate)
    {
        Quaternionf q = orientation().conjugate();
        mViewMatrix.linear() = q.toRotationMatrix();
        mViewMatrix.translation() = - (mViewMatrix.linear() * position());

        mViewIsUptodate = true;
    }
}
Пример #3
0
void YAW_EST::imuCallback(const sensor_msgs::Imu &msg)
{
	double q0,q1,q2,q3;
	q0 = msg.orientation.w;
	q1 = msg.orientation.x;
	q2 = msg.orientation.y;
	q3 = msg.orientation.z;
	Q = Quaternionf(q0,q1,q2,q3);
	R = Q.toRotationMatrix(); 

	pitch = atan2(2*(q2*q3-q0*q1), q0*q0-q1*q1-q2*q2+q3*q3);
	roll = asin(-2*(q0*q2+q1*q3));
	yaw = atan2(2*(q1*q2-q0*q3), q0*q0+q1*q1-q2*q2-q3*q3);
	//ROS_INFO("roll: %f, pitch: %f, yaw: %f", roll*57.3, pitch*57.3, yaw*57.3);
}
Пример #4
0
void computeSensorOffsetAndK(Eigen::Isometry3f &sensorOffset, Eigen::Matrix3f &cameraMatrix, ParameterCamera *cameraParam, int reduction) {
  sensorOffset = Isometry3f::Identity();
  cameraMatrix.setZero();
      
  int cmax = 4;
  int rmax = 3;
  for (int c=0; c<cmax; c++){
    for (int r=0; r<rmax; r++){
      sensorOffset.matrix()(r, c) = cameraParam->offset()(r, c);
      if (c<3)
	cameraMatrix(r,c) = cameraParam->Kcam()(r, c);
    }
  }
  sensorOffset.translation() = Vector3f(0.15f, 0.0f, 0.05f);
  Quaternionf quat = Quaternionf(0.5, -0.5, 0.5, -0.5);
  sensorOffset.linear() = quat.toRotationMatrix();
  sensorOffset.matrix().block<1, 4>(3, 0) << 0.0f, 0.0f, 0.0f, 1.0f;
	
  float scale = 1./reduction;
  cameraMatrix *= scale;
  cameraMatrix(2,2) = 1;
}
Пример #5
0
int main(int argc, char **argv) {
  float pointSize;
  float pointStep;
  float normalLenght;
  float normalStep;
  float alphaColor;
  int initialSensorOffset;
  string filename;

  // Input parameters handling.
  g2o::CommandArgs arg;
  arg.param("pointSize", pointSize, 1.0f, "Size of the points");
  arg.param("normalLenght", normalLenght, 0, "Lenght of the normals");
  arg.param("alpha", alphaColor, 1.0f, "Alpha channel for points");
  arg.param("pointStep", pointStep, 1, "Step of the points");
  arg.param("normalStep", normalStep, 1, "Step of the normals");
  arg.param("initialSensorOffset", initialSensorOffset, 0, "Choose if use the initial sensor offset");
  arg.paramLeftOver("filename", filename, "", "Input depth image or .pwn file", true);
 
  // Set parser input.
  arg.parseArgs(argc, argv);

  Isometry3f sensorOffsetInit = Isometry3f::Identity();
  if(initialSensorOffset) {
    sensorOffsetInit.translation() = Vector3f(0.15f, 0.0f, 0.05f);
    Quaternionf quaternion = Quaternionf(0.5f, -0.5f, 0.5f, -0.5f);
    sensorOffsetInit.linear() = quaternion.toRotationMatrix();
  }
  sensorOffsetInit.matrix().row(3) << 0.0f, 0.0f, 0.0f, 1.0f;

  // Windows name
  static const char CALIBRATE_WINDOW[] = "Calibrate Window";

  // Calibration angles
  int maxTrackBarValue = 1800;
  int alpha = maxTrackBarValue / 2; // Around x
  int beta = maxTrackBarValue / 2;  // Around y
  int theta = maxTrackBarValue / 2; // Around z

  // Create the window and the trackbars
  cv::namedWindow(CALIBRATE_WINDOW);
  cvMoveWindow(CALIBRATE_WINDOW, 0, 0);
  cvResizeWindow(CALIBRATE_WINDOW, 500, 200);
  cvCreateTrackbar("Calibrate X", CALIBRATE_WINDOW, &alpha, 1800, NULL);
  cvCreateTrackbar("Calibrate Y", CALIBRATE_WINDOW, &beta, 1800, NULL);
  cvCreateTrackbar("Calibrate Z", CALIBRATE_WINDOW, &theta, 1800, NULL);  
  
  // Create objects in order to read the input depth image / pwn file
  Eigen::Matrix3f cameraMatrix;
  cameraMatrix << 
    525.0f, 0.0f, 319.5f,
    0.0f, 525.0f, 239.5f,
    0.0f, 0.0f, 1.0f;

  string extension;
  extension = filename.substr(filename.rfind(".") + 1);

  Frame frame;
  Isometry3f globalT = Isometry3f::Identity();
  cerr << "Loading " << filename.c_str() << endl;
  if(extension == "pgm") {
    PinholePointProjector projector;
    projector.setCameraMatrix(cameraMatrix);
    StatsCalculator statsCalculator;
    PointInformationMatrixCalculator pointInformationMatrixCalculator;
    NormalInformationMatrixCalculator normalInformationMatrixCalculator;
    DepthImageConverter depthImageConverter(&projector, &statsCalculator,
					    &pointInformationMatrixCalculator,
					    &normalInformationMatrixCalculator);
    
    DepthImage inputImage;
    inputImage.load(filename.c_str(),true);
    
    cerr << "Computing stats... ";
    depthImageConverter.compute(frame, inputImage);
    cerr << " done" << endl;
  }
  else if(extension == "pwn") {
    frame.load(globalT, filename.c_str());
  }
  else {
    cerr << "File extension nor recognized, quitting." << endl;
    return(0);
  }

  Isometry3f oldSensorOffset = Isometry3f::Identity();
  oldSensorOffset.matrix().row(3) << 0.0f, 0.0f, 0.0f, 1.0f;

  QApplication application(argc,argv);
  QWidget* mainWindow = new QWidget();
  mainWindow->setWindowTitle("pwn_calibration");
  QHBoxLayout* hlayout = new QHBoxLayout();
  mainWindow->setLayout(hlayout);
  QVBoxLayout* vlayout = new QVBoxLayout();
  hlayout->addItem(vlayout);
  
  PWNQGLViewer* viewer = new PWNQGLViewer(mainWindow);
  vlayout->addWidget(viewer);
  
  viewer->init();
  viewer->show();
  viewer->setAxisIsDrawn(true);
  mainWindow->show();
  
  GLParameterPoints *pointsParams = new GLParameterPoints(pointSize, Eigen::Vector4f(1.0f, 0.0f, 0.0f, 1.0f));
  pointsParams->setStep(pointStep);
  DrawablePoints *drawablePoints = new DrawablePoints(oldSensorOffset, pointsParams, &frame.points(), &frame.normals());
  viewer->addDrawable(drawablePoints);
  
  GLParameterNormals *normalParams = new GLParameterNormals(pointSize, Eigen::Vector4f(0.0f, 0.0f, 1.0f, alphaColor), normalLenght);
  DrawableNormals *drawableNormals = new DrawableNormals(oldSensorOffset, normalParams, &frame.points(), &frame.normals());
  normalParams->setStep(normalStep);
  normalParams->setNormalLength(normalLenght);
  viewer->addDrawable(drawableNormals);

  // Keep cycling
  Isometry3f sensorOffset;
  while(mainWindow->isVisible()) {
    // Updating variables
    float alphar = 2.0f * 3.14*((float)alpha - maxTrackBarValue / 2) / maxTrackBarValue;
    float betar = 2.0f * 3.14*((float)beta - maxTrackBarValue / 2) / maxTrackBarValue;
    float thetar = 2.0f * 3.14*((float)theta - maxTrackBarValue / 2) / maxTrackBarValue;
    
    // Generate rotations
    Quaternion<float> qx, qy, qz; 
    qx = AngleAxis<float>(alphar, Vector3f(1.0f, 0.0f, 0.0f));
    qy = AngleAxis<float>(betar, Vector3f(0.0f, 1.0f, 0.0f));
    qz = AngleAxis<float>(thetar, Vector3f(0.0f, 0.0f, 1.0f));
    
    Matrix3f totalRotation = qz.toRotationMatrix() * qy.toRotationMatrix() * qx.toRotationMatrix();
    sensorOffset = Isometry3f::Identity();
    sensorOffset.translation() = Vector3f(0.0f, 0.0f, 0.0f);
    sensorOffset.linear() = totalRotation * sensorOffsetInit.linear();
    sensorOffset.matrix().row(3) << 0.0f, 0.0f, 0.0f, 1.0f;

    drawablePoints->setTransformation(sensorOffset);
    drawableNormals->setTransformation(sensorOffset);

    oldSensorOffset = sensorOffset;

    viewer->updateGL();
    application.processEvents();

    cv::waitKey(33);
  }

  ofstream os("sensorOffset.txt");
  Vector6f offset = t2v(sensorOffset);
  os << offset[0] << " " << offset[1] << " " << offset[2] << " " 
     << offset[3] << " " << offset[4] << " " << offset[5] << endl;

  return(0);
}
Пример #6
0
  void ViewerState::init(){
    imageRows = 0;
    imageCols = 0;
    
    ng_worldRadius = 0.1f;
    ng_minImageRadius = 10;
    ng_curvatureThreshold = 1.0f;
    al_innerIterations = 1;
    al_outerIterations = 10;
    vz_step = 5;
    if_curvatureThreshold = 0.1f;
    reduction = 2;


    cameraMatrix <<     
      525.0f, 0.0f, 319.5f,
      0.0f, 525.0f, 239.5f,
      0.0f, 0.0f, 1.0f;
    
    float scale = 1./reduction;
    cameraMatrix*=scale;
    cameraMatrix(2,2) = 1;

    if (0){
      sensorOffset.setIdentity();
    } else {
      sensorOffset.translation() = Vector3f(0.15f, 0.0f, 0.05f);
      Quaternionf quat = Quaternionf(0.5, -0.5, 0.5, -0.5);
      sensorOffset.linear() = quat.toRotationMatrix();
    }

    sensorOffset.matrix().row(3) << 0,0,0,1;

    projector = new PinholePointProjector();
    statsCalculator = new StatsCalculator();

    pointInformationMatrixCalculator = new PointInformationMatrixCalculator();
    normalInformationMatrixCalculator = new NormalInformationMatrixCalculator();
    converter = new DepthImageConverter(projector, statsCalculator, 
					pointInformationMatrixCalculator, normalInformationMatrixCalculator);

    projector->setTransform(Eigen::Isometry3f::Identity());
    projector->setCameraMatrix(cameraMatrix);

    // Creating and setting aligner object.
    //Aligner aligner;
    correspondenceFinder = new CorrespondenceFinder();
    linearizer = new Linearizer() ;
#ifdef _PWN_USE_CUDA_
    aligner = new CuAligner() ;
#else
    aligner = new Aligner();
#endif
    
    aligner->setProjector(projector);
    aligner->setLinearizer(linearizer);
    linearizer->setAligner(aligner);
    aligner->setCorrespondenceFinder(correspondenceFinder);

    
    statsCalculator->setWorldRadius(ng_worldRadius);
    //statsFinder->setCurvatureThreshold(ng_curvatureThreshold);
    statsCalculator->setMinPoints(ng_minImageRadius);
    aligner->setInnerIterations(al_innerIterations);
    aligner->setOuterIterations(al_outerIterations);
    converter->_curvatureThreshold = ng_curvatureThreshold;
    pointInformationMatrixCalculator->setCurvatureThreshold(if_curvatureThreshold);
    normalInformationMatrixCalculator->setCurvatureThreshold(if_curvatureThreshold);

    refScn = pwnGMW->scene0();
    currScn = pwnGMW->scene1();

    listWidget = pwnGMW->listWidget;
    drawableFrameParameters = new DrawableFrameParameters();

    typedef BlockSolver< BlockSolverTraits<-1, -1> >  SlamBlockSolver;
    typedef LinearSolverCSparse<SlamBlockSolver::PoseMatrixType> SlamLinearSolver;
    SlamLinearSolver* linearSolver = new SlamLinearSolver();
    linearSolver->setBlockOrdering(false);
    SlamBlockSolver* blockSolver = new SlamBlockSolver(linearSolver);
    OptimizationAlgorithmGaussNewton* solverGauss = new OptimizationAlgorithmGaussNewton(blockSolver);
    graph = new SparseOptimizer();
    graph->setAlgorithm(solverGauss);
    continuousMode = false;
  }