TEST(TFEigenConversions, tf_eigen_transform)
{
  tf::Transform t;
  tf::Quaternion tq;
  tq[0] = gen_rand(-1.0,1.0);
  tq[1] = gen_rand(-1.0,1.0);
  tq[2] = gen_rand(-1.0,1.0);
  tq[3] = gen_rand(-1.0,1.0);
  tq.normalize();
  t.setOrigin(tf::Vector3(gen_rand(-10,10),gen_rand(-10,10),gen_rand(-10,10)));
  t.setRotation(tq);

  Eigen::Affine3d k;
  transformTFToEigen(t,k);

  for(int i=0; i < 3; i++)
  {
    ASSERT_NEAR(t.getOrigin()[i],k.matrix()(i,3),1e-6);
    for(int j=0; j < 3; j++)
    {      
      ASSERT_NEAR(t.getBasis()[i][j],k.matrix()(i,j),1e-6);
    }
  }
  for (int col = 0 ; col < 3; col ++)
    ASSERT_NEAR(k.matrix()(3, col), 0, 1e-6);
  ASSERT_NEAR(k.matrix()(3,3), 1, 1e-6);
  
}
Пример #2
0
void drawRedDotsPlane()
{
    int planeTilt = (int)factors.at("Tilt");
    int planeSlant = (int)factors.at("Slant");
    double planeDef = factors.at("Def");
    double planeOnset=1;
    bool backprojectionActive=true;
    double theta=0.0;
    Eigen::Affine3d modelTransformation;

    glPushMatrix();
    //glLoadIdentity();
    //glTranslated(0,0,focalDistance);

    glMultMatrixd(stimTransformation.data());
    switch ( planeTilt )
    {
    case 0:
    {
        theta = -acos(exp(-0.346574+0.5*planeDef-planeDef/2*(1-planeOnset*periodicValue/oscillationAmplitude)));
        glRotated(toDegrees(theta),0,1,0);
        if (backprojectionActive)
            glScaled(1/sin(toRadians( -90-planeSlant)),1,1);	//backprojection phase
    }
    break;
    case 90:
    {
        theta = -acos(exp(-0.346574+0.5*planeDef-planeDef/2*(1-planeOnset*periodicValue/oscillationAmplitude)));
        glRotated( toDegrees(theta),1,0,0);
        if (backprojectionActive)
            glScaled(1,1/sin(toRadians( -90-planeSlant )),1); //backprojection phase
    }
    break;
    case 180:
    {
        theta = acos(exp(-0.346574+0.5*planeDef-planeDef/2*(1+planeOnset*periodicValue/oscillationAmplitude)));
        glRotated(toDegrees(theta),0,1,0);
        if (backprojectionActive)
            glScaled(1/sin(toRadians( -90-planeSlant )),1,1); //backprojection phase
    }
    break;
    case 270:
    {
        theta = acos(exp(-0.346574+0.5*planeDef-planeDef/2*(1+planeOnset*periodicValue/oscillationAmplitude)));
        glRotated( toDegrees(theta),1,0,0);
        if (backprojectionActive)
            glScaled(1,1/sin(toRadians( -90-planeSlant )),1); //backprojection phase
    }
    break;
    default:
        cerr << "Error, select tilt to be {0,90,180,270} only" << endl;
    }

    stimDrawer.draw();
    glGetDoublev(GL_MODELVIEW_MATRIX,modelTransformation.matrix().data());
    modelTransformation.matrix() = modelTransformation.matrix().eval();
    glPopMatrix();
}
Eigen::Affine3d transformTFToEigen(const tf::Transform &t) {
    Eigen::Affine3d e;
    for (int i = 0; i < 3; i++) {
        e.matrix()(i, 3) = t.getOrigin()[i];
        for (int j = 0; j < 3; j++) {
            e.matrix()(i, j) = t.getBasis()[i][j];
        }
    }
    // Fill in identity in last row
    for (int col = 0; col < 3; col++)
        e.matrix()(3, col) = 0;
    e.matrix()(3, 3) = 1;
    return e;
}
Eigen::Affine3d transformTFToEigen(const tf::Transform &t) {
    Eigen::Affine3d e;
    // treat the Eigen::Affine as a 4x4 matrix:
    for (int i = 0; i < 3; i++) {
        e.matrix()(i, 3) = t.getOrigin()[i]; //copy the origin from tf to Eigen
        for (int j = 0; j < 3; j++) {
            e.matrix()(i, j) = t.getBasis()[i][j]; //and copy 3x3 rotation matrix
        }
    }
    // Fill in identity in last row
    for (int col = 0; col < 3; col++)
        e.matrix()(3, col) = 0;
    e.matrix()(3, 3) = 1;
    return e;
}
Пример #5
0
 void TSDFVolumeOctree::getFrustumCulledVoxels (const Eigen::Affine3d& trans,
                                             std::vector<OctreeNode::Ptr> &voxels) const {
     std::vector<OctreeNode::Ptr> voxels_all;
     octree_->getLeaves (voxels_all, max_cell_size_x_, max_cell_size_y_, max_cell_size_z_);
     pcl::PointCloud<pcl::PointXYZ>::Ptr voxel_cloud(new pcl::PointCloud<pcl::PointXYZ> (voxels_all.size(), 1));
     std::vector<int> indices;
     for (size_t i = 0; i < voxel_cloud->size(); ++i) {
         pcl::PointXYZ &pt = voxel_cloud->at (i);
         voxels_all[i]->getCenter (pt.x, pt.y, pt.z);
     }
     pcl::FrustumCulling<pcl::PointXYZ> fc (false);
     Eigen::Matrix4f cam2robot;
     cam2robot << 0, 0, 1, 0,
                  0, -1, 0, 0,
                  1, 0, 0, 0,
                  0, 0, 0, 1;
     Eigen::Matrix4f trans_robot = trans.matrix().cast<float> () * cam2robot;
     fc.setCameraPose (trans_robot);
     fc.setHorizontalFOV (70);
     fc.setVerticalFOV (70);
     fc.setNearPlaneDistance (min_sensor_dist_);
     fc.setFarPlaneDistance (max_sensor_dist_);
     fc.setInputCloud (voxel_cloud);
     fc.filter (indices);
     voxels.resize (indices.size());
     for (size_t i = 0; i < indices.size(); ++i) {
         voxels[i] = voxels_all[indices[i]];
     }
 }
	bool srvSetHuboObjectPose(HuboApplication::SetHuboObjectPose::Request &req,
						      HuboApplication::SetHuboObjectPose::Response &res)
	{
		bool response = true;
		Eigen::Affine3d tempPose;
		Eigen::Isometry3d armPose;

		tf::poseMsgToEigen(req.Target, tempPose);
		if (tempPose(0,3) != tempPose(0,3)) // null check
		{
			res.Success = false;
			return false;
		}

		armPose = tempPose.matrix();

		//std::cerr << tempPose.matrix() << std::endl;

		m_Manip.setControlMode(OBJECT_POSE);
		m_Manip.setAngleMode(QUATERNION);
		m_Manip.setPose(armPose, req.ObjectIndex);
		m_Manip.sendCommand();
		//std::cerr << armPose.matrix() << std::endl;
			res.Success = response;
		return response;
	}
	bool srvSetHuboArmPose(HuboApplication::SetHuboArmPose::Request &req,
						   HuboApplication::SetHuboArmPose::Response &res)
	{
		bool response = true;
		Eigen::Affine3d tempPose;
		Eigen::Isometry3d armPose;

		tf::poseMsgToEigen(req.Target, tempPose);
		if (tempPose(0,3) != tempPose(0,3)) // null check
		{
			res.Success = false;
			return false;
		}

		//std::cerr << tempPose.matrix() << std::endl;

		armPose = tempPose.matrix();
		//armPose.translation() = tempPose.translation();
		//armPose.linear() = tempPose.rotation();
		//std::cerr << armPose.matrix() << std::endl;

		m_Manip.setControlMode(END_EFFECTOR);
		m_Manip.setAngleMode(QUATERNION);
		m_Manip.setPose(armPose, req.ArmIndex);
		m_Manip.sendCommand();
		//std::cerr << armPose.matrix() << std::endl;
			res.Success = response;
		return response;
	}
Пример #8
0
 void transformEigenToTF(const Eigen::Affine3d &e, tf::Transform &t)
 {
   t.setOrigin(tf::Vector3( e.matrix()(0,3), e.matrix()(1,3), e.matrix()(2,3)));
   t.setBasis(tf::Matrix3x3(e.matrix()(0,0), e.matrix()(0,1),e.matrix()(0,2),
                            e.matrix()(1,0), e.matrix()(1,1),e.matrix()(1,2),
                            e.matrix()(2,0), e.matrix()(2,1),e.matrix()(2,2)));
 }
Пример #9
0
void Data4Viewer::drawGlobalView()
{
	_pKinect->_pRGBCamera->setGLProjectionMatrix(0.1f, 100.f);

	glMatrixMode(GL_MODELVIEW);
	Eigen::Affine3d tmp; tmp.setIdentity();
	Eigen::Matrix4d mMat;
	mMat.row(0) = tmp.matrix().row(0);
	mMat.row(1) = -tmp.matrix().row(1);
	mMat.row(2) = -tmp.matrix().row(2);
	mMat.row(3) = tmp.matrix().row(3);

	glLoadMatrixd(mMat.data());
	GpuMat rgb(_pKinect->_cvmRGB);
	_pKinect->_pRGBCamera->renderCameraInLocal(rgb, _pGL.get(), false, NULL, 0.2f, true); //render in model coordinate
	//cout<<("drawRGBView");
	return;
}
void visualOdometry::mainloop(){
    FRAME lastFrame;
    std::string file_name_ground =pd_.getData( "file_name_ground");

    readFromFile( file_name_ground,ground_truth_list_);
    lastFrame = readFrameFromGroundTruthList(start_index_ ,ground_truth_list_);

    // 我们总是在比较currFrame和lastFrame
    string detector = pd_.getData( "detector" );
    string descriptor = pd_.getData( "descriptor" );
    CAMERA_INTRINSIC_PARAMETERS camera = getDefaultCamera();
    computeKeyPointsAndDesp( lastFrame, detector, descriptor );

    // 是否显示点云
    bool visualize = pd_.getData("visualize_pointcloud")==string("true");

    int min_inliers = atoi( pd_.getData("min_inliers").c_str() );
    double max_norm = atof( pd_.getData("max_norm").c_str() );

    for (int currIndex=start_index_+1; currIndex<end_index_; currIndex++ )
    {
        current_index_ = currIndex;
        cout<<"Reading files "<<currIndex<<endl;
        FRAME currFrame = readFrameFromGroundTruthList(currIndex, ground_truth_list_); // 读取currFrame
        computeKeyPointsAndDesp( currFrame, detector, descriptor );
        // 比较currFrame 和 lastFrame
        RESULT_OF_PNP result = estimateMotion( lastFrame, currFrame, camera );
        if ( result.inliers < min_inliers ){ //inliers不够,放弃该帧
            std::cout<<"not enouth inliers: "<<result.inliers;
            continue;
        }
        //ROS_INFO_STREAM("trans: "<< result.tvec <<" rot: "<< result.rvec);

        cv::Mat rotation_matrix;
        cv::Rodrigues(result.rvec, rotation_matrix);
        Eigen::Matrix3d rotation_eigen_temp;
        cv::cv2eigen(rotation_matrix,rotation_eigen_temp);
        Eigen::AngleAxisd rotation_eigen(rotation_eigen_temp);

        Eigen::Affine3d incrementalAffine = Eigen::Affine3d::Identity();
        incrementalAffine = rotation_eigen;
        incrementalAffine(0,3) = result.tvec.ptr<double>(0)[0];
        incrementalAffine(1,3) = result.tvec.ptr<double>(1)[0];
        incrementalAffine(2,3) = result.tvec.ptr<double>(2)[0];

        //pose_camera_ = incrementalAffine * pose_camera_;

        publishTrajectory();
        publishGroundTruth();
        ROS_INFO_STREAM("RT: "<< incrementalAffine.matrix());
        lastFrame = currFrame;
    }
}
Пример #11
0
 Plane Plane::transform(const Eigen::Affine3d& transform)
 {
   Eigen::Vector4d n;
   n[0] = normal_[0];
   n[1] = normal_[1];
   n[2] = normal_[2];
   n[3] = d_;
   Eigen::Matrix4d m = transform.matrix();
   Eigen::Vector4d n_d = m.transpose() * n;
   Eigen::Vector4d n_dd = n_d.normalized();
   
   return Plane(Eigen::Vector3d(n_dd[0], n_dd[1], n_dd[2]), n_dd[3]);
 }
TEST(TFEigenConversions, eigen_tf_transform)
{
  tf::Transform t;
  Eigen::Affine3d k;
  Eigen::Quaterniond kq;
  kq.coeffs()(0) = gen_rand(-1.0,1.0);
  kq.coeffs()(1) = gen_rand(-1.0,1.0);
  kq.coeffs()(2) = gen_rand(-1.0,1.0);
  kq.coeffs()(3) = gen_rand(-1.0,1.0);
  kq.normalize();
  k.translate(Eigen::Vector3d(gen_rand(-10,10),gen_rand(-10,10),gen_rand(-10,10)));
  k.rotate(kq);

  transformEigenToTF(k,t);
  for(int i=0; i < 3; i++)
  {
    ASSERT_NEAR(t.getOrigin()[i],k.matrix()(i,3),1e-6);
    for(int j=0; j < 3; j++)
    {      
      ASSERT_NEAR(t.getBasis()[i][j],k.matrix()(i,j),1e-6);
    }
  }
}
Пример #13
0
void idle()
{
Timer frameTimer; frameTimer.start();
    // Timing things
    timeFrame+=1;

    double oscillationPeriod = factors.at("StimulusDuration")*TIMER_MS;

    switch (stimMotion)
    {
    case SAWTOOTH_MOTION:
        periodicValue = oscillationAmplitude*mathcommon::sawtooth(timeFrame,oscillationPeriod);
        break;
    case TRIANGLE_MOTION:
        periodicValue = oscillationAmplitude*mathcommon::trianglewave(timeFrame,oscillationPeriod);
        break;
    case SINUSOIDAL_MOTION:
        periodicValue = oscillationAmplitude*sin(3.14*timeFrame/(oscillationPeriod));
        break;
    default:
        SAWTOOTH_MOTION;
    }

    timingFile << totalTimer.getElapsedTimeInMilliSec() << " " << periodicValue << endl;

    // Simulate head translation
    // Coordinates picker
    markers[1] = Vector3d(0,0,0);
    markers[2] = Vector3d(0,10,0);
    markers[3] = Vector3d(0,0,10);

    headEyeCoords.update(markers[1],markers[2],markers[3]);

    eyeLeft = headEyeCoords.getLeftEye();
    eyeRight = headEyeCoords.getRightEye();

    Vector3d fixationPoint = (headEyeCoords.getRigidStart().getFullTransformation() * ( Vector3d(0,0,focalDistance) ) );
    // Projection of view normal on the focal plane
    Eigen::ParametrizedLine<double,3> pline = Eigen::ParametrizedLine<double,3>::Through(eyeRight,fixationPoint);
    projPoint = pline.intersection(focalPlane)*((fixationPoint - eyeRight).normalized()) + eyeRight;

    stimTransformation.matrix().setIdentity();
    stimTransformation.translation() <<0,0,focalDistance;

	Timer sleepTimer;
	sleepTimer.sleep((TIMER_MS - frameTimer.getElapsedTimeInMilliSec())/2);
}
Пример #14
0
Eigen::Matrix4d getTranformationMatrix(const string &frameName, const string &coordSys ){
    tf::TransformListener listener;
    ros::Time ts(0) ;
    tf::StampedTransform transform;
    Eigen::Affine3d pose;



    try {
        listener.waitForTransform( coordSys, frameName, ts, ros::Duration(1) );
        listener.lookupTransform( coordSys, frameName, ts, transform);
        tf::TransformTFToEigen(transform, pose);
    }
    catch (tf::TransformException ex){
      ROS_ERROR("%s",ex.what());
    }

    return pose.matrix();
}
Пример #15
0
TEST(TfEigen, ConvertTransform)
{
  Eigen::Matrix4d tm;
  
  double alpha = M_PI/4.0;
  double theta = M_PI/6.0;
  double gamma = M_PI/12.0;
  
  tm << cos(theta)*cos(gamma),-cos(theta)*sin(gamma),sin(theta), 1, //
  cos(alpha)*sin(gamma)+sin(alpha)*sin(theta)*cos(gamma),cos(alpha)*cos(gamma)-sin(alpha)*sin(theta)*sin(gamma),-sin(alpha)*cos(theta), 2, //
  sin(alpha)*sin(gamma)-cos(alpha)*sin(theta)*cos(gamma),cos(alpha)*sin(theta)*sin(gamma)+sin(alpha)*cos(gamma),cos(alpha)*cos(theta), 3, //
  0, 0, 0, 1;
  
  Eigen::Affine3d T(tm);
  
  geometry_msgs::TransformStamped msg = tf2::eigenToTransform(T);
  Eigen::Affine3d Tback = tf2::transformToEigen(msg);
  
  EXPECT_TRUE(T.isApprox(Tback));
  EXPECT_TRUE(tm.isApprox(Tback.matrix()));
    
}
Пример #16
0
  static bool colorize(const drc::map_image_t& iDepthMap,
                       const Eigen::Affine3d& iLocalToCamera,
                       const bot_core::image_t& iImage,
                       const BotCamTrans* iCamTrans,
                       bot_core::image_t& oImage) {
    oImage.utime = iDepthMap.utime;
    oImage.width = iDepthMap.width;
    oImage.height = iDepthMap.height;
    oImage.row_stride = 3*iDepthMap.width;
    oImage.pixelformat = PIXEL_FORMAT_RGB;
    oImage.size = oImage.row_stride * oImage.height;
    oImage.nmetadata = 0;
    oImage.data.resize(oImage.size);

    Eigen::Matrix4d xform;
    for (int i = 0; i < 4; ++i) {
      for (int j = 0; j < 4; ++j) {
        xform(i,j) = iDepthMap.transform[i][j];
      }
    }
    xform = iLocalToCamera.matrix()*xform.inverse();

    for (int i = 0; i < iDepthMap.height; ++i) {
      for (int j = 0; j < iDepthMap.width; ++j) {
        double z;
        if (iDepthMap.format == drc::map_image_t::FORMAT_GRAY_FLOAT32) {
          z = ((float*)(&iDepthMap.data[0] + i*iDepthMap.row_bytes))[j];
          if (z < -1e10) {
            continue;
          }
        }
        else if (iDepthMap.format == drc::map_image_t::FORMAT_GRAY_UINT8) {
          uint8_t val = iDepthMap.data[i*iDepthMap.row_bytes + j];
          if (val == 0) {
            continue;
          }
          z = val;
        }
        else {
          continue;
        }

        Eigen::Vector4d pt = xform*Eigen::Vector4d(j,i,z,1);
        double p[3] = {pt(0)/pt(3),pt(1)/pt(3),pt(2)/pt(3)};
        double pix[3];
        bot_camtrans_project_point(iCamTrans, p, pix);
        if (pix[2] < 0) {
          continue;
        }
        uint8_t r,g,b;
        if (!interpolate(pix[0], pix[1], iImage, r, g, b)) {
          continue;
        }
        int outImageIndex = i*oImage.row_stride + 3*j;
        oImage.data[outImageIndex+0] = r;
        oImage.data[outImageIndex+1] = g;
        oImage.data[outImageIndex+2] = b;
      }
    }

    return true;
  }
Пример #17
0
void idle()
{

    if (totalTimer.getElapsedTimeInSec() > initialAdaptationTimeInSeconds
            && trialMode == INITIALADAPTATION && experimentStarted )
    {
        trialMode = STIMULUSMODE;
        totalTimer.start();
    }

    if (totalTimer.getElapsedTimeInSec() > fixationDurationInSeconds && trialMode == FIXATIONMODE && experimentStarted )
    {
        timeFrame=0.0;
        trialMode=STIMULUSMODE;
        totalTimer.start();
    }

    if (totalTimer.getElapsedTimeInMilliSec() > stimulusDurationInMilliSeconds && trialMode == STIMULUSMODE && experimentStarted )
    {
        timeFrame=0.0;
        trialMode=PROBEMODE;
        totalTimer.start();
    }

    if (!experimentStarted)
        return;
    Timer frameTimer;
    frameTimer.start();
    // Timing things

    double oscillationPeriod = stimulusDurationInMilliSeconds;
    switch (stimMotion)
    {
    case SAWTOOTH_MOTION:
        periodicValue = oscillationAmplitude*mathcommon::sawtooth(timeFrame,oscillationPeriod);
        break;
    case TRIANGLE_MOTION:
        periodicValue = oscillationAmplitude*mathcommon::trianglewave(timeFrame,oscillationPeriod);
        break;
    case SINUSOIDAL_MOTION:
        periodicValue = oscillationAmplitude*sin(3.14*timeFrame/(oscillationPeriod));
        break;
    default:
        SAWTOOTH_MOTION;
    }

    timingFile << totalTimer.getElapsedTimeInMilliSec() << " " << periodicValue << endl;

    // Simulate head translation
    // Coordinates picker
    markers[1] = Vector3d(0,0,0);
    markers[2] = Vector3d(0,10,0);
    markers[3] = Vector3d(0,0,10);

    headEyeCoords.update(markers[1],markers[2],markers[3]);

    eyeLeft = headEyeCoords.getLeftEye();
    eyeRight = headEyeCoords.getRightEye();

    stimTransformation.matrix().setIdentity();
    stimTransformation.translation() <<0,0,focalDistance;

    Timer sleepTimer;
    sleepTimer.sleep(abs(TIMER_MS - frameTimer.getElapsedTimeInMilliSec()));
    timeFrame+=1;

}
Пример #18
0
bool
pcl::EnsensoGrabber::computeCalibrationMatrix (const std::vector<Eigen::Affine3d, Eigen::aligned_allocator<Eigen::Affine3d> > &robot_poses,
                                               std::string &json,
                                               const std::string setup,
                                               const std::string target,
                                               const Eigen::Affine3d &guess_tf,
                                               const bool pretty_format) const
{
  try
  {
    std::vector<std::string> robot_poses_json;
    robot_poses_json.resize (robot_poses.size ());
    for (uint i = 0; i < robot_poses_json.size (); ++i)
    {
      if (!matrixTransformationToJson (robot_poses[i], robot_poses_json[i]))
        return (false);
    }

    // Feed all robot poses into the calibration command
    NxLibCommand calibrate (cmdCalibrateHandEye);
    for (uint i = 0; i < robot_poses_json.size (); ++i)
    {
      // Very weird behaviour here:
      // If you modify this loop, check that all the transformations are still here in the [itmExecute][itmParameters] node
      // because for an unknown reason sometimes the old transformations are erased in the tree ("null" in the tree)
      calibrate.parameters ()[itmTransformations][i].setJson (robot_poses_json[i], false);
    }

    // Set Hand-Eye calibration parameters
    if (boost::iequals (setup, "Fixed"))
      calibrate.parameters ()[itmSetup].set (valFixed);
    else
      calibrate.parameters ()[itmSetup].set (valMoving);

    calibrate.parameters ()[itmTarget].set (target);

    // Set guess matrix
    if (guess_tf.matrix () != Eigen::Matrix4d::Identity ())
    {
      // Matrix > JSON > Angle axis
      NxLibItem tf ("/tmpTF");
      if (!matrixTransformationToJson (guess_tf, json))
        return (false);
      tf.setJson (json);


      // Rotation
      double theta = tf[itmRotation][itmAngle].asDouble ();  // Angle of rotation
      double x = tf[itmRotation][itmAxis][0].asDouble ();   // X component of Euler vector
      double y = tf[itmRotation][itmAxis][1].asDouble ();   // Y component of Euler vector
      double z = tf[itmRotation][itmAxis][2].asDouble ();   // Z component of Euler vector

      (*root_)[itmLink][itmRotation][itmAngle].set (theta);
      (*root_)[itmLink][itmRotation][itmAxis][0].set (x);
      (*root_)[itmLink][itmRotation][itmAxis][1].set (y);
      (*root_)[itmLink][itmRotation][itmAxis][2].set (z);

      // Translation
      (*root_)[itmLink][itmTranslation][0].set (guess_tf.translation ()[0]);
      (*root_)[itmLink][itmTranslation][1].set (guess_tf.translation ()[1]);
      (*root_)[itmLink][itmTranslation][2].set (guess_tf.translation ()[2]);
    }

    calibrate.execute ();  // Might take up to 120 sec (maximum allowed by Ensenso API)

    if (calibrate.successful ())
    {
      json = calibrate.result ().asJson (pretty_format);
      return (true);
    }
    else
      return (false);
  }
  catch (NxLibException &ex)
  {
    ensensoExceptionHandling (ex, "computeCalibrationMatrix");
    return (false);
  }
}
bool Mapper::getBoundedMap(ethzasl_icp_mapper::GetBoundedMap::Request &req, ethzasl_icp_mapper::GetBoundedMap::Response &res)
{
	if (!mapPointCloud)
		return false;

	const float max_x = req.topRightCorner.x;
	const float max_y = req.topRightCorner.y;
	const float max_z = req.topRightCorner.z;

	const float min_x = req.bottomLeftCorner.x;
	const float min_y = req.bottomLeftCorner.y;
	const float min_z = req.bottomLeftCorner.z;

	cerr << "min [" << min_x << ", " << min_y << ", " << min_z << "] " << endl;
	cerr << "max [" << max_x << ", " << max_y << ", " << max_z << "] " << endl;



	tf::StampedTransform stampedTr;
	
	Eigen::Affine3d eigenTr;
	tf::poseMsgToEigen(req.mapCenter, eigenTr);
	Eigen::MatrixXf T = eigenTr.matrix().inverse().cast<float>();
	//const Eigen::MatrixXf T = eigenTr.matrix().cast<float>();

	cerr << "T:" << endl << T << endl;
	T = transformation->correctParameters(T);

		
	// FIXME: do we need a mutex here?
	const DP centeredPointCloud = transformation->compute(*mapPointCloud, T); 
	DP cutPointCloud = centeredPointCloud.createSimilarEmpty();

	cerr << centeredPointCloud.features.topLeftCorner(3, 10) << endl;
	cerr << T << endl;

	int newPtCount = 0;
	for(int i=0; i < centeredPointCloud.features.cols(); i++)
	{
		const float x = centeredPointCloud.features(0,i);
		const float y = centeredPointCloud.features(1,i);
		const float z = centeredPointCloud.features(2,i);
		
		if(x < max_x && x > min_x &&
			 y < max_y && y > min_y &&
		   z < max_z && z > min_z 	)
		{
			cutPointCloud.setColFrom(newPtCount, centeredPointCloud, i);
			newPtCount++;	
		}
	}

	cerr << "Extract " << newPtCount << " points from the map" << endl;
	
	cutPointCloud.conservativeResize(newPtCount);
	cutPointCloud = transformation->compute(cutPointCloud, T.inverse()); 

	
	// Send the resulting point cloud in ROS format
	res.boundedMap = PointMatcher_ros::pointMatcherCloudToRosMsg<float>(cutPointCloud, mapFrame, ros::Time::now());
	return true;
}
Пример #20
0
int main(int argc, char** argv)
{
    // welcome message
    cout << "****************************************************" << endl;
    cout << "* Surface Matching demonstration : demonstrates the use of surface matching"
             " using point pair features." << endl;
    cout << "* The sample loads a model and a scene, where the model lies in a different"
             " pose than the training.\n* It then trains the model and searches for it in the"
             " input scene. The detected poses are further refined by ICP\n* and printed to the "
             " standard output." << endl;
    cout << "****************************************************" << endl;
    
    if (argc < 3)
    {
        help("Not enough input arguments");
        exit(1);
    }
    
#if (defined __x86_64__ || defined _M_X64)
    cout << "Running on 64 bits" << endl;
#else
    cout << "Running on 32 bits" << endl;
#endif
    
#ifdef _OPENMP
    cout << "Running with OpenMP" << endl;
#else
    cout << "Running without OpenMP and without TBB" << endl;
#endif
    
    string modelFileName = (string)argv[1];
    string sceneFileName = (string)argv[2];
    
    Mat pc = cv::ppf_match_3d::loadPLYSimple(modelFileName.c_str(), 1);
    
    // Now train the model
    cout << "Training..." << endl;
    int64 tick1 = cv::getTickCount();
    ppf_match_3d::PPF3DDetector detector(0.025, 0.05);
    detector.trainModel(pc);
    int64 tick2 = cv::getTickCount();
    cout << endl << "Training complete in "
         << (double)(tick2-tick1)/ cv::getTickFrequency()
         << " sec" << endl << "Loading model..." << endl;
         
    // Read the scene
    Mat pcTest = cv::ppf_match_3d::loadPLYSimple(sceneFileName.c_str(), 1);
    
    // Match the model to the scene and get the pose
    cout << endl << "Starting matching..." << endl;
    vector<cv::ppf_match_3d::Pose3DPtr> results;
    tick1 = cv::getTickCount();
    detector.match(pcTest, results, 1.0/40.0, 0.05);
    tick2 = cv::getTickCount();
    cout << endl << "PPF Elapsed Time " <<
         (tick2-tick1)/cv::getTickFrequency() << " sec" << endl;

    //check results size from match call above
    size_t results_size = results.size();
    cout << "Number of matching poses: " << results_size	;
    if (results_size == 0) {
        cout << endl << "No matching poses found. Exiting." << endl;
        exit(0);
    }

    // Get only first N results - but adjust to results size if num of results are less than that specified by N
    size_t N = 8;
    if (results_size < N) {
        cout << endl << "Reducing matching poses to be reported (as specified in code): "
             << N << " to the number of matches found: " << results_size << endl;
        N = results_size;
    }
    vector<cv::ppf_match_3d::Pose3DPtr> resultsSub(results.begin(),results.begin()+N);

    typedef double Scalar;
    typedef Eigen::Matrix<Scalar, 3, Eigen::Dynamic> Vertices;
    
    // Map the OpenCV matrix with Eigen:
    Eigen::Map<Eigen::Matrix<float, Eigen::Dynamic, Eigen::Dynamic, Eigen::RowMajor>> pcTestEigen(pcTest.ptr<float>(), pcTest.rows, pcTest.cols);
    Eigen::MatrixXf eigenScene = pcTestEigen.transpose();
    Vertices eigenScenePoints = eigenScene.topRows<3>().cast<Scalar>();
    
    
    
    //ICP icp(100, 0.005f, 2.5f, 8);
    int64 t1 = cv::getTickCount();
    
    // Register for all selected poses
    cout << endl << "Performing ICP on " << N << " poses..." << endl;
    std::vector<Vertices> transformedModels;
    for (auto& result: resultsSub)
    {
       cv::Mat pct = cv::ppf_match_3d::transformPCPose(pc, result->pose);
        // Map the OpenCV matrix with Eigen:
        Eigen::Map<Eigen::Matrix<float, Eigen::Dynamic, Eigen::Dynamic, Eigen::RowMajor>> pcModelEigen(pct.ptr<float>(), pct.rows, pct.cols);
        Vertices eigenModelPoints = pcModelEigen.transpose().topRows<3>().cast<double>();
        Vertices eigenModelNormals = pcModelEigen.transpose().middleRows<3>(3).cast<double>();
    
        //SICP::point_to_plane(eigenScenePoints, eigenModelPoints, eigenModelNormals);
        SICP::point_to_point(eigenModelPoints, eigenScenePoints);
    
        transformedModels.push_back(eigenModelPoints);
    
    }
    //icp.registerModelToScene(pc, pcTest, resultsSub);
    int64 t2 = cv::getTickCount();
    
    cout << endl << "ICP Elapsed Time " <<
         (t2-t1)/cv::getTickFrequency() << " sec" << endl;
         
    cout << "Poses: " << endl;
    
    Eigen::Map<Eigen::Matrix<float, Eigen::Dynamic, Eigen::Dynamic, Eigen::RowMajor>> pcModelEigen(pc.ptr<float>(), pc.rows, pc.cols);
    Vertices eigenModelPoints = pcModelEigen.transpose().topRows<3>().cast<double>();
    Vertices eigenModelNormals = pcModelEigen.transpose().middleRows<3>(3).cast<double>();
    bool saved = false;
    // debug first five poses
    for (auto& transformedModel : transformedModels)
    {
        Eigen::Affine3d transform = RigidMotionEstimator::point_to_point(eigenModelPoints, transformedModel);
        cout << "\nPose Result:\n " << transform.matrix() << endl;
        if (!saved)
        {
            Eigen::MatrixXf floatModel = transformedModel.transpose().cast<float>();
            cv::Mat cvTransformedModel;
            eigen2cv(floatModel, cvTransformedModel);
            cv::ppf_match_3d::writePLY(cvTransformedModel, "transformedModelOut.ply");
        }
    }
    
    return 0;
    
}
bool CloudMatcher::matchWithGuess(ethzasl_icp_mapper::MatchCloudsWithGuess::Request& req, ethzasl_icp_mapper::MatchCloudsWithGuess::Response& res)
{
	// get and check reference
	const DP referenceCloud(PointMatcher_ros::rosMsgToPointMatcherCloud<float>(req.reference));
	const unsigned referenceGoodCount(referenceCloud.features.cols());
	const unsigned referencePointCount(req.reference.width * req.reference.height);
	const double referenceGoodRatio(double(referenceGoodCount) / double(referencePointCount));
	
	if (referenceGoodCount == 0)
	{
		ROS_ERROR("I found no good points in the reference cloud");
		return false;
	}
	if (referenceGoodRatio < 0.5)
	{
		ROS_WARN_STREAM("Partial reference cloud! Missing " << 100 - referenceGoodRatio*100.0 << "% of the cloud (received " << referenceGoodCount << ")");
	}
	
	// get and check reading
	const DP readingCloud(PointMatcher_ros::rosMsgToPointMatcherCloud<float>(req.readings));
	const unsigned readingGoodCount(referenceCloud.features.cols());
	const unsigned readingPointCount(req.readings.width * req.readings.height);
	const double readingGoodRatio(double(readingGoodCount) / double(readingPointCount));
	
	if (readingGoodCount == 0)
	{
		ROS_ERROR("I found no good points in the reading cloud");
		return false;
	}
	if (readingGoodRatio < 0.5)
	{
		ROS_WARN_STREAM("Partial reference cloud! Missing " << 100 - readingGoodRatio*100.0 << "% of the cloud (received " << readingGoodCount << ")");
	}
	
	// check dimensions
	if (referenceCloud.features.rows() != readingCloud.features.rows())
	{
		ROS_ERROR_STREAM("Dimensionality missmatch: reference cloud is " << referenceCloud.features.rows()-1 << " while reading cloud is " << readingCloud.features.rows()-1);
		return false;
	}
	
        //PointMatcher type for Initial Guess
        PM::TransformationParameters TInitialGuess;

	tf::Transform transformInitialGuess;//Create a Transform Object
            
        //tf::transformMsgToTF(req.initialGuess, transformInitialGuess);//Convert ROS Transform msg to tf data

	//Eigen::Matrix4f  //Creat a Eigen Object
            
        Eigen::Affine3d initialGuessInEigenMatrix; //Eigen Object
	tf::transformMsgToEigen(req.initialGuess, initialGuessInEigenMatrix);
	
	//TInitialGuess = initialGuessInEigenMatrix.matrix().cast<float>();
        TInitialGuess = initialGuessInEigenMatrix.matrix().cast<float>();
	// call ICP
	try 
	{
		PM::TransformationParameters Ticp;	
		Ticp = icp(readingCloud, referenceCloud, TInitialGuess);
		geometry_msgs::Transform TicpMsg;		
		//const PM::TransformationParameters transform(icp(readingCloud, referenceCloud, TInitialGuess));
		tf::transformTFToMsg(PointMatcher_ros::eigenMatrixToTransform<float>(Ticp), TicpMsg);
		res.transform = TicpMsg;
		ROS_INFO_STREAM("match ratio: " << icp.errorMinimizer->getWeightedPointUsedRatio() << endl);
	}
	catch (PM::ConvergenceError error)
	{
		ROS_ERROR_STREAM("ICP failed to converge: " << error.what());
		return false;
	}
	
	return true;
}
Пример #22
0
bool
pcl::EnsensoGrabber::computeCalibrationMatrix (const std::vector<Eigen::Affine3d, Eigen::aligned_allocator<Eigen::Affine3d> > &robot_poses,
                                               std::string &json,
                                               const std::string setup,
                                               const std::string target,
                                               const Eigen::Affine3d &guess_tf,
                                               const bool pretty_format) const
{
  if ( (*root_)[itmVersion][itmMajor] < 2 && (*root_)[itmVersion][itmMinor] < 3)
    PCL_WARN ("EnsensoSDK 1.3.x fixes bugs into extrinsic calibration matrix optimization, please update your SDK!\n"
              "http://www.ensenso.de/support/sdk-download/\n");

  try
  {
    std::vector<Eigen::Affine3d, Eigen::aligned_allocator<Eigen::Affine3d> > robot_poses_mm (robot_poses);
    std::vector<std::string> robot_poses_json;
    robot_poses_json.resize (robot_poses.size ());
    for (size_t i = 0; i < robot_poses_json.size (); ++i)
    {
      robot_poses_mm[i].translation () *= 1000.0; // Convert meters in millimeters
      if (!matrixTransformationToJson (robot_poses_mm[i], robot_poses_json[i]))
        return (false);
    }

    NxLibCommand calibrate (cmdCalibrateHandEye);

    // Set Hand-Eye calibration parameters
    if (boost::iequals (setup, "Fixed"))
      calibrate.parameters ()[itmSetup].set (valFixed);
    else
      calibrate.parameters ()[itmSetup].set (valMoving);

    calibrate.parameters ()[itmTarget].set (target);

    // Set guess matrix
    if (guess_tf.matrix () != Eigen::Matrix4d::Identity ())
    {
      // Matrix > JSON > Angle axis
      NxLibItem tf ("/tmpTF");
      if (!matrixTransformationToJson (guess_tf, json))
        return (false);
      tf.setJson (json);

      // Rotation
      double theta = tf[itmRotation][itmAngle].asDouble ();  // Angle of rotation
      double x = tf[itmRotation][itmAxis][0].asDouble ();   // X component of Euler vector
      double y = tf[itmRotation][itmAxis][1].asDouble ();   // Y component of Euler vector
      double z = tf[itmRotation][itmAxis][2].asDouble ();   // Z component of Euler vector
      tf.erase(); // Delete tmpTF node

      calibrate.parameters ()[itmLink][itmRotation][itmAngle].set (theta);
      calibrate.parameters ()[itmLink][itmRotation][itmAxis][0].set (x);
      calibrate.parameters ()[itmLink][itmRotation][itmAxis][1].set (y);
      calibrate.parameters ()[itmLink][itmRotation][itmAxis][2].set (z);

      // Translation
      calibrate.parameters ()[itmLink][itmTranslation][0].set (guess_tf.translation ()[0] * 1000.0);
      calibrate.parameters ()[itmLink][itmTranslation][1].set (guess_tf.translation ()[1] * 1000.0);
      calibrate.parameters ()[itmLink][itmTranslation][2].set (guess_tf.translation ()[2] * 1000.0);
    }

    // Feed all robot poses into the calibration command
    for (size_t i = 0; i < robot_poses_json.size (); ++i)
    {
      // Very weird behavior here:
      // If you modify this loop, check that all the transformations are still here in the [itmExecute][itmParameters] node
      // because for an unknown reason sometimes the old transformations are erased in the tree ("null" in the tree)
      // Ensenso SDK 2.3.348: If not moved after guess calibration matrix, the vector is empty.
      calibrate.parameters ()[itmTransformations][i].setJson (robot_poses_json[i], false);
    }

    calibrate.execute ();  // Might take up to 120 sec (maximum allowed by Ensenso API)

    if (calibrate.successful ())
    {
      json = calibrate.result ().asJson (pretty_format);
      return (true);
    }
    else
    {
      json.clear ();
      return (false);
    }
  }
  catch (NxLibException &ex)
  {
    ensensoExceptionHandling (ex, "computeCalibrationMatrix");
    return (false);
  }
}
Пример #23
0
cloud_normal_t::Ptr
read_scan_with_normals(e57::Reader& reader, uint32_t scan_index,
                       const Eigen::Vector3d& demean_offset) {
    e57::Data3D header;
    reader.ReadData3D(scan_index, header);
    int64_t nColumn = 0, nRow = 0, nPointsSize = 0, nGroupsSize = 0,
            nCounts = 0;
    bool bColumnIndex = 0;
    reader.GetData3DSizes(scan_index, nRow, nColumn, nPointsSize, nGroupsSize,
                          nCounts, bColumnIndex);

    int64_t n_size = (nRow > 0) ? nRow : 1024;

    double* data_x = new double[n_size], * data_y = new double[n_size],
            * data_z = new double[n_size];
    double* nrm_x = new double[n_size], * nrm_y = new double[n_size],
            * nrm_z = new double[n_size];
    int8_t valid_normals = 0;
    auto block_read = reader.SetUpData3DPointsData(
        scan_index, n_size, data_x, data_y, data_z, NULL, NULL, NULL, NULL,
        NULL, NULL, NULL, nrm_x, nrm_y, nrm_z, &valid_normals);

    Eigen::Affine3d rotation;
    rotation =
        Eigen::Quaterniond(header.pose.rotation.w, header.pose.rotation.x,
                           header.pose.rotation.y, header.pose.rotation.z);
    Eigen::Affine3d registration;
    registration = Eigen::Translation<double, 3>(
                       header.pose.translation.x + demean_offset[0],
                       header.pose.translation.y + demean_offset[1],
                       header.pose.translation.z + demean_offset[2]) *
                   rotation;
    Eigen::Affine3d normal_transformation;
    normal_transformation = rotation.matrix().inverse().transpose();

    unsigned long size = 0;
    cloud_normal_t::Ptr scan(new cloud_normal_t());
    while ((size = block_read.read()) > 0) {
        for (unsigned long i = 0; i < size; i++) {
            point_normal_t p;
            Eigen::Vector3d pos(data_x[i], data_y[i], data_z[i]);
            pos = registration * pos;
            p.x = static_cast<float>(pos[0]);
            p.y = static_cast<float>(pos[1]);
            p.z = static_cast<float>(pos[2]);
            Eigen::Vector3d nrm(nrm_x[i], nrm_y[i], nrm_z[i]);
            nrm = normal_transformation * nrm;
            p.normal[0] = static_cast<float>(nrm[0]);
            p.normal[1] = static_cast<float>(nrm[1]);
            p.normal[2] = static_cast<float>(nrm[2]);
            scan->push_back(p);
        }
    }
    block_read.close();

    delete[] data_x;
    delete[] data_y;
    delete[] data_z;
    delete[] nrm_x;
    delete[] nrm_y;
    delete[] nrm_z;

    // get origin position
    scan->sensor_origin_ =
        Eigen::Vector4f(header.pose.translation.x + demean_offset[0],
                        header.pose.translation.y + demean_offset[1],
                        header.pose.translation.z + demean_offset[2], 1.0);

    return scan;
}
Пример #24
0
// Usage: ./Volumetricd.exe ../../data/monkey.obj 256 4 2 90
int main(int argc, char **argv)
{
	if (argc < 6)
	{
		std::cerr << "Missing parameters. Abort." 
			<< std::endl
			<< "Usage:  ./Volumetricd.exe ../../data/monkey.obj 256 8 2 90"
			<< std::endl;
		return EXIT_FAILURE;
	}
	Timer timer;
	const std::string filepath = argv[1];
	const int vol_size = atoi(argv[2]);
	const int vx_size = atoi(argv[3]);
	const int cloud_count = atoi(argv[4]);
	const int rot_interval = atoi(argv[5]);

	std::pair<std::vector<double>, std::vector<double>> depth_buffer;

	//
	// Projection and Modelview Matrices
	//
	Eigen::Matrix4d K = perspective_matrix(fov_y, aspect_ratio, near_plane, far_plane);
	std::pair<Eigen::Matrix4d, Eigen::Matrix4d>	T(Eigen::Matrix4d::Identity(), Eigen::Matrix4d::Identity());


	//
	// Creating volume
	//
	Eigen::Vector3d voxel_size(vx_size, vx_size, vx_size);
	Eigen::Vector3d volume_size(vol_size, vol_size, vol_size);
	Eigen::Vector3d voxel_count(volume_size.x() / voxel_size.x(), volume_size.y() / voxel_size.y(), volume_size.z() / voxel_size.z());
	//
	Eigen::Affine3d grid_affine = Eigen::Affine3d::Identity();
	grid_affine.translate(Eigen::Vector3d(0, 0, -256));
	grid_affine.scale(Eigen::Vector3d(1, 1, -1));	// z is negative inside of screen


	Grid grid(volume_size, voxel_size, grid_affine.matrix());


	//
	// Importing .obj
	//
	timer.start();
	std::vector<Eigen::Vector3d> points3DOrig, pointsTmp;
	import_obj(filepath, points3DOrig);
	timer.print_interval("Importing monkey    : ");
	std::cout << "Monkey point count  : " << points3DOrig.size() << std::endl;

	// 
	// Translating and rotating monkey point cloud 
	std::pair<std::vector<Eigen::Vector3d>, std::vector<Eigen::Vector3d>> cloud;
	//
	Eigen::Affine3d rotate = Eigen::Affine3d::Identity();
	Eigen::Affine3d translate = Eigen::Affine3d::Identity();
	translate.translate(Eigen::Vector3d(0, 0, -256));


	// 
	// Compute first cloud
	//
	for (Eigen::Vector3d p3d : points3DOrig)
	{
		Eigen::Vector4d rot = translate.matrix() * rotate.matrix() * p3d.homogeneous();
		rot /= rot.w();
		cloud.first.push_back(rot.head<3>());
	}
	//
	// Update grid with first cloud
	//
	timer.start();
	create_depth_buffer<double>(depth_buffer.first, cloud.first, K, Eigen::Matrix4d::Identity(), far_plane);
	timer.print_interval("CPU compute depth   : ");

	timer.start();
	update_volume(grid, depth_buffer.first, K, T.first);
	timer.print_interval("CPU Update volume   : ");

	//
	// Compute next clouds
	Eigen::Matrix4d cloud_mat = Eigen::Matrix4d::Identity();
	Timer iter_timer;
	for (int i = 1; i < cloud_count; ++i)
	{
		std::cout << std::endl << i << " : " << i * rot_interval << std::endl;
		iter_timer.start();

		// Rotation matrix
		rotate = Eigen::Affine3d::Identity();
		rotate.rotate(Eigen::AngleAxisd(DegToRad(i * rot_interval), Eigen::Vector3d::UnitY()));

		cloud.second.clear();
		for (Eigen::Vector3d p3d : points3DOrig)
		{
			Eigen::Vector4d rot = translate.matrix() * rotate.matrix() * p3d.homogeneous();
			rot /= rot.w();
			cloud.second.push_back(rot.head<3>());
		}

		//export_obj("../../data/cloud_cpu_2.obj", cloud.second);

		timer.start();
		create_depth_buffer<double>(depth_buffer.second, cloud.second, K, Eigen::Matrix4d::Identity(), far_plane);
		timer.print_interval("Compute depth buffer: ");

		//export_depth_buffer("../../data/cpu_depth_buffer_2.obj", depth_buffer.second);

		timer.start();
		Eigen::Matrix4d icp_mat;
		ComputeRigidTransform(cloud.first, cloud.second, icp_mat);
		timer.print_interval("Compute rigid transf: ");

		//std::cout << std::fixed << std::endl << "icp_mat " << std::endl << icp_mat << std::endl;

		// accumulate matrix
		cloud_mat = cloud_mat * icp_mat;

		//std::cout << std::fixed << std::endl << "cloud_mat " << std::endl << cloud_mat << std::endl;

		timer.start();
		//update_volume(grid, depth_buffer.second, K, cloud_mat.inverse());
		update_volume(grid, depth_buffer.second, K, cloud_mat.inverse());
		timer.print_interval("Update volume       : ");


		// copy second point cloud to first
		cloud.first = cloud.second;
		//depth_buffer.first = depth_buffer.second;

		iter_timer.print_interval("Iteration time      : ");
	}


	//std::cout << "------- // --------" << std::endl;
	//for (int i = 0; i <  grid.data.size(); ++i)
	//{
	//	const Eigen::Vector3d& point = grid.data[i].point;

	//	std::cout << point.transpose() << "\t\t" << grid.data[i].tsdf << " " << grid.data[i].weight << std::endl;
	//}
	//std::cout << "------- // --------" << std::endl;

//	timer.start();
//	export_volume("../../data/grid_volume_cpu.obj", grid.data);
//	timer.print_interval("Exporting volume    : ");
//	return 0;


	QApplication app(argc, argv);

	//
	// setup opengl viewer
	// 
	GLModelViewer glwidget;
	glwidget.resize(640, 480);
	glwidget.setPerspective(60.0f, 0.1f, 10240.0f);
	glwidget.move(320, 0);
	glwidget.setWindowTitle("Point Cloud");
	glwidget.setWeelSpeed(0.1f);
	glwidget.setDistance(-0.5f);
	glwidget.show();

	
	Eigen::Matrix4d to_origin = Eigen::Matrix4d::Identity();
	to_origin.col(3) << -(volume_size.x() / 2.0), -(volume_size.y() / 2.0), -(volume_size.z() / 2.0), 1.0;	// set translate


	std::vector<Eigen::Vector4f> vertices, colors;

	int i = 0;
	for (int z = 0; z <= volume_size.z(); z += voxel_size.z())
	{
		for (int y = 0; y <= volume_size.y(); y += voxel_size.y())
		{
			for (int x = 0; x <= volume_size.x(); x += voxel_size.x(), i++)
			{
				const float tsdf = grid.data.at(i).tsdf;

				//Eigen::Vector4d p = grid_affine.matrix() * to_origin * Eigen::Vector4d(x, y, z, 1);
				Eigen::Vector4d p = to_origin * Eigen::Vector4d(x, y, z, 1);
				p /= p.w();

				if (tsdf > 0.1)
				{
					vertices.push_back(p.cast<float>());
					colors.push_back(Eigen::Vector4f(0, 1, 0, 1));
				}
				else if (tsdf < -0.1)
				{
					vertices.push_back(p.cast<float>());
					colors.push_back(Eigen::Vector4f(1, 0, 0, 1));
				}
			}
		}
	}




	//
	// setup model
	// 
	std::shared_ptr<GLModel> model(new GLModel);
	model->initGL();
	model->setVertices(&vertices[0][0], vertices.size(), 4);
	model->setColors(&colors[0][0], colors.size(), 4);
	glwidget.addModel(model);


	//
	// setup kinect shader program
	// 
	std::shared_ptr<GLShaderProgram> kinectShaderProgram(new GLShaderProgram);
	if (kinectShaderProgram->build("color.vert", "color.frag"))
		model->setShaderProgram(kinectShaderProgram);

	return app.exec();
}