Esempio n. 1
0
void X3DConverter::startShape(const std::array<float, 12>& matrix) {

    // Finding axis/angle from matrix using Eigen for its bullet proof implementation.
    Eigen::Transform<float, 3, Eigen::Affine> t;
    t.setIdentity();
    for (unsigned int i = 0; i < 3; i++) {
        for (unsigned int j = 0; j < 4; j++) {
            t(i, j) = matrix[i+j*3];
        }
    }

    Eigen::Matrix3f rotationMatrix;
	Eigen::Matrix3f scaleMatrix;
    t.computeRotationScaling(&rotationMatrix, &scaleMatrix);
	Eigen::Quaternionf q;
	Eigen::AngleAxisf aa;
	q = rotationMatrix;
    aa = q;

	Eigen::Vector3f scale = scaleMatrix.diagonal();
	Eigen::Vector3f translation = t.translation();

    startNode(ID::Transform);
    m_writers.back()->setSFVec3f(ID::translation, translation.x(), translation.y() , translation.z());
    m_writers.back()->setSFRotation(ID::rotation, aa.axis().x(), aa.axis().y(), aa.axis().z(), aa.angle());
    m_writers.back()->setSFVec3f(ID::scale, scale.x(), scale.y(), scale.z());
    startNode(ID::Shape);
    startNode(ID::Appearance);
    startNode(ID::Material);
    m_writers.back()->setSFColor<vector<float> >(ID::diffuseColor, RVMColorHelper::color(m_materials.back()));
    endNode(ID::Material); // Material
    endNode(ID::Appearance); // Appearance

}
bool StereoSensorProcessor::computeVariances(
    const pcl::PointCloud<pcl::PointXYZRGB>::ConstPtr pointCloud,
    const Eigen::Matrix<double, 6, 6>& robotPoseCovariance,
    Eigen::VectorXf& variances)
{
  variances.resize(pointCloud->size());

  // Projection vector (P).
  const Eigen::RowVector3f projectionVector = Eigen::RowVector3f::UnitZ();

  // Sensor Jacobian (J_s).
  const Eigen::RowVector3f sensorJacobian = projectionVector * (rotationMapToBase_.transposed() * rotationBaseToSensor_.transposed()).toImplementation().cast<float>();

  // Robot rotation covariance matrix (Sigma_q).
  Eigen::Matrix3f rotationVariance = robotPoseCovariance.bottomRightCorner(3, 3).cast<float>();

  // Preparations for#include <pcl/common/transforms.h> robot rotation Jacobian (J_q) to minimize computation for every point in point cloud.
  const Eigen::Matrix3f C_BM_transpose = rotationMapToBase_.transposed().toImplementation().cast<float>();
  const Eigen::RowVector3f P_mul_C_BM_transpose = projectionVector * C_BM_transpose;
  const Eigen::Matrix3f C_SB_transpose = rotationBaseToSensor_.transposed().toImplementation().cast<float>();
  const Eigen::Matrix3f B_r_BS_skew = kindr::getSkewMatrixFromVector(Eigen::Vector3f(translationBaseToSensorInBaseFrame_.toImplementation().cast<float>()));

  for (unsigned int i = 0; i < pointCloud->size(); ++i)
  {
    // For every point in point cloud.

    // Preparation.
    pcl::PointXYZRGB point = pointCloud->points[i];
    double disparity = sensorParameters_.at("depth_to_disparity_factor")/point.z;
    Eigen::Vector3f pointVector(point.x, point.y, point.z); // S_r_SP
    float heightVariance = 0.0; // sigma_p

    // Measurement distance.
    float measurementDistance = pointVector.norm();

    // Compute sensor covariance matrix (Sigma_S) with sensor model.
    float varianceNormal = pow(sensorParameters_.at("depth_to_disparity_factor") / pow(disparity, 2), 2)
        * ((sensorParameters_.at("p_5") * disparity + sensorParameters_.at("p_2"))
            * sqrt(pow(sensorParameters_.at("p_3") * disparity + sensorParameters_.at("p_4") - getJ(i), 2)
                    + pow(240 - getI(i), 2)) + sensorParameters_.at("p_1"));
    float varianceLateral = pow(sensorParameters_.at("lateral_factor") * measurementDistance, 2);
    Eigen::Matrix3f sensorVariance = Eigen::Matrix3f::Zero();
    sensorVariance.diagonal() << varianceLateral, varianceLateral, varianceNormal;

    // Robot rotation Jacobian (J_q).
    const Eigen::Matrix3f C_SB_transpose_times_S_r_SP_skew = kindr::getSkewMatrixFromVector(Eigen::Vector3f(C_SB_transpose * pointVector));
    Eigen::RowVector3f rotationJacobian = P_mul_C_BM_transpose * (C_SB_transpose_times_S_r_SP_skew + B_r_BS_skew);

    // Measurement variance for map (error propagation law).
    heightVariance = rotationJacobian * rotationVariance * rotationJacobian.transpose();
    heightVariance += sensorJacobian * sensorVariance * sensorJacobian.transpose();

    // Copy to list.
    variances(i) = heightVariance;
  }

  return true;
}
bool LaserSensorProcessor::computeVariances(
		const pcl::PointCloud<pcl::PointXYZRGB>::ConstPtr pointCloud,
		const Eigen::Matrix<double, 6, 6>& robotPoseCovariance,
		Eigen::VectorXf& variances)
{
	variances.resize(pointCloud->size());

	// Projection vector (P).
	const Eigen::RowVector3f projectionVector = Eigen::RowVector3f::UnitZ();

	// Sensor Jacobian (J_s).
	const Eigen::RowVector3f sensorJacobian = projectionVector * (rotationMapToBase_.transposed() * rotationBaseToSensor_.transposed()).toImplementation().cast<float>();

	// Robot rotation covariance matrix (Sigma_q).
	Eigen::Matrix3f rotationVariance = robotPoseCovariance.bottomRightCorner(3, 3).cast<float>();

	// Preparations for robot rotation Jacobian (J_q) to minimize computation for every point in point cloud.
	const Eigen::Matrix3f C_BM_transpose = rotationMapToBase_.transposed().toImplementation().cast<float>();
	const Eigen::RowVector3f P_mul_C_BM_transpose = projectionVector * C_BM_transpose;
	const Eigen::Matrix3f C_SB_transpose = rotationBaseToSensor_.transposed().toImplementation().cast<float>();
	const Eigen::Matrix3f B_r_BS_skew = kindr::linear_algebra::getSkewMatrixFromVector(Eigen::Vector3f(translationBaseToSensorInBaseFrame_.toImplementation().cast<float>()));

	for (unsigned int i = 0; i < pointCloud->size(); ++i)
	{
		// For every point in point cloud.

		// Preparation.
		auto& point = pointCloud->points[i];
		Eigen::Vector3f pointVector(point.x, point.y, point.z); // S_r_SP
		float heightVariance = 0.0; // sigma_p

		// Measurement distance.
		float measurementDistance = pointVector.norm();

		// Compute sensor covariance matrix (Sigma_S) with sensor model.
		float varianceNormal = pow(sensorParameters_.at("min_radius"), 2);
		float varianceLateral = pow(sensorParameters_.at("beam_constant") + sensorParameters_.at("beam_angle") * measurementDistance, 2);
		Eigen::Matrix3f sensorVariance = Eigen::Matrix3f::Zero();
		sensorVariance.diagonal() << varianceLateral, varianceLateral, varianceNormal;

		// Robot rotation Jacobian (J_q).
		const Eigen::Matrix3f C_SB_transpose_times_S_r_SP_skew = kindr::linear_algebra::getSkewMatrixFromVector(Eigen::Vector3f(C_SB_transpose * pointVector));
		Eigen::RowVector3f rotationJacobian = P_mul_C_BM_transpose * (C_SB_transpose_times_S_r_SP_skew + B_r_BS_skew);

		// Measurement variance for map (error propagation law).
		heightVariance = rotationJacobian * rotationVariance * rotationJacobian.transpose();
		heightVariance += sensorJacobian * sensorVariance * sensorJacobian.transpose();

		// Copy to list.
		variances(i) = heightVariance;
	}

	return true;
}
Esempio n. 4
0
  void PwnTracker::processFrame(const DepthImage& depthImage_, 
				const Eigen::Isometry3f& sensorOffset_, 
				const Eigen::Matrix3f& cameraMatrix_,
				const Eigen::Isometry3f& initialGuess){
    int r,c;
    Eigen::Matrix3f scaledCameraMatrix = cameraMatrix_;
    _currentCloudOffset = sensorOffset_;
    _currentCameraMatrix = cameraMatrix_;
    _currentDepthImage = depthImage_;
    _currentCloud = makeCloud(r,c,scaledCameraMatrix,_currentCloudOffset,_currentDepthImage);
    
    bool newFrame = false;
    bool newRelation = false;
    PwnTrackerFrame* currentTrackerFrame=0;
    Eigen::Isometry3d relationMean;
    if (_previousCloud){
      _aligner->setCurrentSensorOffset(_currentCloudOffset);
      _aligner->setCurrentFrame(_currentCloud);
      _aligner->setReferenceSensorOffset(_previousCloudOffset);
      _aligner->setReferenceFrame(_previousCloud);

      _aligner->correspondenceFinder()->setImageSize(r,c);
      PinholePointProjector* alprojector=dynamic_cast<PinholePointProjector*>(_aligner->projector());
      alprojector->setCameraMatrix(scaledCameraMatrix);
      alprojector->setImageSize(r,c);

      Eigen::Isometry3f guess=_previousCloudTransform.inverse()*_globalT*initialGuess;
      _aligner->setInitialGuess(guess);
      double t0, t1;
      t0 = g2o::get_time();
      _aligner->align();

      t1 = g2o::get_time();
      std::cout << "Time: " << t1 - t0 << " seconds " << std::endl;
 
      cerr << "inliers: " << _aligner->inliers() << endl;
      // cerr << "chi2: " << _aligner->error() << endl;
      // cerr << "chi2/inliers: " << _aligner->error()/_aligner->inliers() << endl;
      // cerr << "initialGuess: " << t2v(guess).transpose() << endl;
      // cerr << "transform   : " << t2v(_aligner->T()).transpose() << endl;
      if (_aligner->inliers()>0){
    	_globalT = _previousCloudTransform*_aligner->T();
    	//cerr << "TRANSFORM FOUND" <<  endl;
      } else {
    	//cerr << "FAILURE" <<  endl;
    	_globalT = _globalT*guess;
      }
      convertScalar(relationMean, _aligner->T());
      if (! (_counter%50) ) {
    	Eigen::Matrix3f R = _globalT.linear();
    	Eigen::Matrix3f E = R.transpose() * R;
    	E.diagonal().array() -= 1;
    	_globalT.linear() -= 0.5 * R * E;
      }
      _globalT.matrix().row(3) << 0,0,0,1;
 
      newAlignmentCallbacks(_globalT, _aligner->T(), _aligner->inliers(), _aligner->error());
			   
      int maxInliers = r*c;
      float inliersFraction = (float) _aligner->inliers()/(float) maxInliers;
      cerr << "inliers/maxinliers/fraction: " << _aligner->inliers() << "/" << maxInliers << "/" << inliersFraction << endl;
      if (inliersFraction<_newFrameInliersFraction){
	newFrame = true;
	newRelation = true;

	// char filename[1024];
	// sprintf (filename, "frame-%05d.pwn", _numKeyframes);
	// frame->save(filename,1,true,_globalT);

	_numKeyframes ++;
	if (!_cache) 
	  delete _previousCloud;
	else{
	  _previousCloudHandle.release();
	}
	//_cache->unlock(_previousTrackerFrame);
	// _aligner->setReferenceSensorOffset(_currentCloudOffset);
	// _aligner->setReferenceFrame(_currentCloud);
	_previousCloud = _currentCloud;
	_previousCloudTransform = _globalT;
	// cerr << "new frame added (" << _numKeyframes <<  ")" << endl;
	// cerr << "inliers: " << _aligner->inliers() << endl;
	// cerr << "maxInliers: " << maxInliers << endl;
	// cerr << "chi2: " << _aligner->error() << endl;
	// cerr << "chi2/inliers: " << _aligner->error()/_aligner->inliers() << endl;
	// cerr << "initialGuess: " << t2v(guess).transpose() << endl;
	// cerr << "transform   : " << t2v(_aligner->T()).transpose() << endl;
	// cerr << "globalTransform   : " << t2v(_globalT).transpose() << endl;
      } else { // previous frame but offset is small
	delete _currentCloud;
	_currentCloud = 0;
      }
    } else { // first frame
      //ser.writeObject(*manager);
      newFrame = true;
      // _aligner->setReferenceSensorOffset(_currentCloudOffset);
      // _aligner->setReferenceFrame(_currentCloud);
      _previousCloud = _currentCloud;
      _previousCloudTransform = _globalT;
      _previousCloudOffset = _currentCloudOffset;
      _numKeyframes ++;
      /*Eigen::Isometry3f t = _globalT;
	geometry_msgs::Point p;
	p.x = t.translation().x();
	p.y = t.translation().y();
	p.z = t.translation().z();
	m_odometry.points.push_back(p);
      */
    }
    _counter++;

    if (newFrame) {
      //cerr << "maing new frame, previous: " << _previousTrackerFrame << endl;
      currentTrackerFrame = new PwnTrackerFrame(_manager);
      //currentTrackerFrame->cloud.set(cloud);
      currentTrackerFrame->cloud=_currentCloud;
      currentTrackerFrame->sensorOffset = _currentCloudOffset;
      boss_map::ImageBLOB* depthBLOB=new boss_map::ImageBLOB();
      DepthImage_convert_32FC1_to_16UC1(depthBLOB->cvImage(),_currentDepthImage);
      //depthImage.toCvMat(depthBLOB->cvImage());
      depthBLOB->adjustFormat();
      currentTrackerFrame->depthImage.set(depthBLOB);
      
      boss_map::ImageBLOB* normalThumbnailBLOB=new boss_map::ImageBLOB();
      boss_map::ImageBLOB* depthThumbnailBLOB=new boss_map::ImageBLOB();
      makeThumbnails(depthThumbnailBLOB->cvImage(), normalThumbnailBLOB->cvImage(), _currentCloud, 
		    _currentDepthImage.rows, 
		    _currentDepthImage.cols,
		    _currentCloudOffset,
		    _currentCameraMatrix, 
		    0.1);
      normalThumbnailBLOB->adjustFormat();
      depthThumbnailBLOB->adjustFormat();
      
      currentTrackerFrame->depthThumbnail.set(depthThumbnailBLOB);
      currentTrackerFrame->normalThumbnail.set(normalThumbnailBLOB);
      currentTrackerFrame->cameraMatrix = _currentCameraMatrix;
      currentTrackerFrame->imageRows = _currentDepthImage.rows;
      currentTrackerFrame->imageCols = _currentDepthImage.cols;
      Eigen::Isometry3d _iso;
      convertScalar(_iso, _globalT);
      currentTrackerFrame->setTransform(_iso);
      convertScalar(currentTrackerFrame->sensorOffset, _currentCloudOffset);
      currentTrackerFrame->setSeq(_seq++);
      _manager->addNode(currentTrackerFrame);
      //cerr << "AAAA" << endl;

      if (_cache)
	_currentCloudHandle=_cache->get(currentTrackerFrame);
      //cerr << "BBBB" << endl;
      //_cache->lock(currentTrackerFrame);
      newFrameCallbacks(currentTrackerFrame);
      currentTrackerFrame->depthThumbnail.set(0);
      currentTrackerFrame->normalThumbnail.set(0);
      currentTrackerFrame->depthImage.set(0);
    }

    if (newRelation) {
      PwnTrackerRelation* rel = new PwnTrackerRelation(_manager);
      rel->setTransform(relationMean);
      Matrix6d omega;
      convertScalar(omega, _aligner->omega());
      omega.setIdentity();
      omega *= 100;
      rel->setInformationMatrix(omega);
      rel->setTo(currentTrackerFrame);
      rel->setFrom(_previousTrackerFrame);
      //cerr << "saved relation" << _previousTrackerFrame << " " << currentTrackerFrame << endl;
      _manager->addRelation(rel);
      newRelationCallbacks(rel);
      //ser.writeObject(*rel);
    }

    if (currentTrackerFrame) {
      _previousTrackerFrame = currentTrackerFrame;
    }
  }
Esempio n. 5
0
int main(int argc, char **argv){

#if 0
  DOF6::TFLinkvf rot1,rot2;
  Eigen::Matrix4f tf1 = build_random_tflink(rot1,3);
  Eigen::Matrix4f tf2 = build_random_tflink(rot2,3);

  DOF6::DOF6_Source<DOF6::TFLinkvf,DOF6::TFLinkvf,float> abc(rot1.makeShared(), rot2.makeShared());
  abc.getRotation();
  abc.getTranslation();

  std::cout<<"tf1\n"<<tf1<<"\n";
  std::cout<<"tf2\n"<<tf2<<"\n";
  std::cout<<"tf1*2\n"<<tf1*tf2<<"\n";
  std::cout<<"tf2*1\n"<<tf2*tf1<<"\n";

  std::cout<<"tf1\n"<<rot1.getTransformation()<<"\n";
  std::cout<<"tf2\n"<<rot2.getTransformation()<<"\n";
  std::cout<<"tf1*2\n"<<(rot1+rot2).getTransformation()<<"\n";

  rot1.check();
  rot2.check();

  return 0;

  pcl::RotationFromCorrespondences rfc;
  Eigen::Vector3f n, nn, v,n2,n3,z,y,tv;
  float a = 0.1f;

  z.fill(0);y.fill(0);
  z(2)=1;y(1)=1;
  nn.fill(0);
  nn(0) = 1;
  Eigen::AngleAxisf aa(a,nn);

  nn.fill(100);

  n.fill(0);
  n(0) = 1;
  n2.fill(0);
  n2=n;
  n2(1) = 0.2;
  n3.fill(0);
  n3=n;
  n3(2) = 0.2;

  n2.normalize();
  n3.normalize();

  tv.fill(1);
  tv.normalize();

#if 0

#if 0
  rfc.add(n,aa.toRotationMatrix()*n+nn,
          1*n.cross(y),1*n.cross(z),
          1*(aa.toRotationMatrix()*n).cross(y),1*(aa.toRotationMatrix()*n).cross(z),
          1,1/sqrtf(3));
#else
  rfc.add(n,aa.toRotationMatrix()*n,
          0*n.cross(y),0*n.cross(z),
          0*(aa.toRotationMatrix()*n).cross(y),0*(aa.toRotationMatrix()*n).cross(z),
          1,0);
#endif

#if 1
  rfc.add(n2,aa.toRotationMatrix()*n2+nn,
          tv,tv,
          tv,tv,
          1,1);
#else
  rfc.add(n2,aa.toRotationMatrix()*n2+nn,
          1*n2.cross(y),1*n2.cross(z),
          1*(aa.toRotationMatrix()*n2).cross(y),1*(aa.toRotationMatrix()*n2).cross(z),
          1,1/sqrtf(3));
#endif

#else
  float f=1;
  Eigen::Vector3f cyl;
  cyl.fill(1);
  cyl(0)=1;
  Eigen::Matrix3f cylM;
  cylM.fill(0);
  cylM.diagonal() = cyl;
  rfc.add(n,aa.toRotationMatrix()*n,
          f*n.cross(y),f*n.cross(z),
          f*(aa.toRotationMatrix()*n).cross(y),f*(aa.toRotationMatrix()*n).cross(z),
          1,0);
  rfc.add(n2,aa.toRotationMatrix()*n2+nn,
          1*n2.cross(y),1*n2.cross(z),
          1*(aa.toRotationMatrix()*n2).cross(y),1*(aa.toRotationMatrix()*n2).cross(z),
          1,1);
#endif
  rfc.add(n3,aa.toRotationMatrix()*n3+nn,
          //tv,tv,
          //tv,tv,
          n3.cross(y),n3.cross(z),
          1*(aa.toRotationMatrix()*n3).cross(y),1*(aa.toRotationMatrix()*n3).cross(z),
          1,1);

  std::cout<<"comp matrix:\n"<<rfc.getTransformation()<<"\n\n";
  std::cout<<"real matrix:\n"<<aa.toRotationMatrix()<<"\n\n";

  return 0;

  //rfc.covariance_.normalize();
  rfc.covariance_ = (rfc.var_*rfc.covariance_.inverse().transpose()*rfc.covariance_);
  std::cout<<"comp matrix: "<<rfc.getTransformation()<<"\n\n";
  std::cout<<"real matrix: "<<aa.toRotationMatrix()*aa.toRotationMatrix()<<"\n\n";

  return 0;
#endif

  testing::InitGoogleTest(&argc, argv);
  return RUN_ALL_TESTS();
}
void rawCloudHandler(const sensor_msgs::PointCloud2ConstPtr& laserCloud)
{
	//double timeScanCur = laserCloud->header.stamp.toSec();
	ros::Time timestamp = laserCloud->header.stamp;
	//bool returnBool;

	pcl::fromROSMsg(*laserCloud, *laserCloudIn);

	if (visualizationFlag)
	{
	viewer->removeAllPointClouds(0);
	viewer->removeAllShapes(0);
	}

	pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloudRGB(new pcl::PointCloud<pcl::PointXYZRGB>);
	pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloudLabeled(new pcl::PointCloud<pcl::PointXYZRGB>);
	pcl::PointCloud<pcl::PointXYZI>::Ptr cloudRaw(new pcl::PointCloud<pcl::PointXYZI>);
	//	std::vector<std::string> labelsC1;
	//	std::vector<std::string> labelsC2;
	//	pcl::PointCloud<SAFEFeatures>::Ptr featureCloudC1Acc(new pcl::PointCloud<SAFEFeatures>);
	//	pcl::PointCloud<SAFEFeatures>::Ptr featureCloudC2Acc(new pcl::PointCloud<SAFEFeatures>);
	std::vector<std::string> labelsC1;
	std::vector<std::string> labelsC2;
	std::vector<std::string> labelsC3;
	std::vector<std::string> labels;
	pcl::PointCloud<AllFeatures>::Ptr featureCloudAcc(new pcl::PointCloud<AllFeatures>);
	pcl::PointCloud<AllFeatures>::Ptr featureCloudC1Acc(new pcl::PointCloud<AllFeatures>);
	pcl::PointCloud<AllFeatures>::Ptr featureCloudC2Acc(new pcl::PointCloud<AllFeatures>);
	pcl::PointCloud<AllFeatures>::Ptr featureCloudC3Acc(new pcl::PointCloud<AllFeatures>);
	std::vector<Eigen::Matrix3f> confMats;
	pcl::PointCloud<pcl::PointXYZRGB>::Ptr classificationCloud(new pcl::PointCloud<pcl::PointXYZRGB>);

	std::vector<DatasetContainer> dataset;

	std::string folder = "/home/mikkel/catkin_ws/src/trainingSet/";

	pcl::copyPointCloud(*laserCloudIn, *cloudRGB);
	pcl::copyPointCloud(*laserCloudIn, *cloudRaw);

	// Single colored
	if (visualizationFlag)
		{
		pcl::visualization::PointCloudColorHandlerCustom<pcl::PointXYZRGB> color1(cloudRGB, 0, 0, 255);
	viewer->addPointCloud<pcl::PointXYZRGB>(cloudRGB,color1,"cloudRGB",viewP(RawCloud));
		}
	//viewer->addText ("RawCloud", 10, 10, "vPP text", viewP(RawCloud));

	// Ground modeling
	//	pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloudStat(new pcl::PointCloud<pcl::PointXYZRGB>);
	//	pcl::PointCloud<pcl::PointXYZ>::Ptr planeDistCloud(new pcl::PointCloud<pcl::PointXYZ>);
	//	pcl::copyPointCloud(*cloudRGB,*cloudStat);
	//	pcl::copyPointCloud(*cloudRGB,*planeDistCloud);


	// Remove everything outside the two lines
	pcl::PointCloud<pcl::PointXYZI>::Ptr cloud_filtered(new pcl::PointCloud<pcl::PointXYZI>);
	//pcl::copyPointCloud(*cloudRaw, *cloud_filtered);

	// Rotation
	float thetaZ = theta*PI/180; // The angle of rotation in radians
	Eigen::Affine3f transform_2 = Eigen::Affine3f::Identity();
	transform_2.rotate (Eigen::AngleAxisf (thetaZ, Eigen::Vector3f::UnitZ()));
	pcl::PointCloud<pcl::PointXYZ>::Ptr transformed_cloud (new pcl::PointCloud<pcl::PointXYZ> ());
	pcl::transformPointCloud (*cloudRaw, *cloud_filtered, transform_2);

	// Passthrough
	pcl::PassThrough<pcl::PointXYZI> passX;
	passX.setInputCloud (cloud_filtered);
	passX.setFilterFieldName ("x");
	passX.setFilterLimits (-0.5, 100.0);
	//pass.setFilterLimitsNegative (true);
	passX.filter (*cloud_filtered);

	pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud_filteredRGB(new pcl::PointCloud<pcl::PointXYZRGB>);
	if (receivedHough)
	{
		pcl::PassThrough<pcl::PointXYZI> pass;
		pass.setInputCloud (cloud_filtered);
		pass.setFilterFieldName ("y");
		pass.setFilterLimits (r1+WALL_CLEARANCE, r2-WALL_CLEARANCE);
		//pass.setFilterLimitsNegative (true);
		pass.filter (*cloud_filtered);
	}

	pcl::copyPointCloud(*cloud_filtered, *cloud_filteredRGB);
	if (visualizationFlag)
		{
	pcl::visualization::PointCloudColorHandlerCustom<pcl::PointXYZRGB> color9(cloudRGB, 0, 0, 255);
	viewer->addPointCloud<pcl::PointXYZRGB>(cloud_filteredRGB,color9,"LinesFiltered",viewP(LinesFiltered));
	viewer->setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_POINT_SIZE, 2, "LinesFiltered");
	viewer->addText ("LinesFiltered", 10, 10, fontsize, 1, 1, 1, "LinesFiltered text", viewP(LinesFiltered));
		}


	// Grid Minimum
//	pcl::PointCloud<pcl::PointXYZI>::Ptr gridCloud(new pcl::PointCloud<pcl::PointXYZI>);
//	pcl::GridMinimum<pcl::PointXYZI> gridm(1.0); // Set grid resolution
//	gridm.setInputCloud(cloud_filtered);
//	gridm.filter(*gridCloud);

	//*** Transform point cloud to adjust for a Ground Plane ***//
	pcl::ModelCoefficients ground_coefficients;
	pcl::PointIndices ground_indices;
	pcl::SACSegmentation<pcl::PointXYZI> ground_finder;
	ground_finder.setOptimizeCoefficients(true);
	ground_finder.setModelType(pcl::SACMODEL_PLANE);
	ground_finder.setMethodType(pcl::SAC_RANSAC);
	ground_finder.setDistanceThreshold(0.15);
	ground_finder.setInputCloud(cloud_filtered);
	ground_finder.segment(ground_indices, ground_coefficients);

	// Convert plane normal vector from type ModelCoefficients to Vector4f
	Eigen::Vector3f np = Eigen::Vector3f(ground_coefficients.values[0],ground_coefficients.values[1],ground_coefficients.values[2]);
	// Find rotation vector, u, and angle, theta
	Eigen::Vector3f u = np.cross(Eigen::Vector3f(0,0,1));
	u.normalize();
	float theta = acos(np.dot(Eigen::Vector3f(0,0,1)));
	// Construct transformation matrix (rotation matrix from axis and angle)
	Eigen::Affine3f tf = Eigen::Affine3f::Identity();
	float d = ground_coefficients.values[3];
	tf.translation() << d*np[0], d*np[1], d*np[2];
	tf.rotate (Eigen::AngleAxisf (theta, u));
	// Execute the transformation
	pcl::PointCloud<pcl::PointXYZI>::Ptr transformedCloud (new pcl::PointCloud<pcl::PointXYZI> ());
	pcl::transformPointCloud(*cloud_filtered, *transformedCloud, tf);


	// Compute statistical moments for least significant direction in neighborhood
#if (OLD_METHOD)
	pcl::NeighborhoodFeatures<pcl::PointXYZI, AllFeatures> features;
	pcl::PointCloud<AllFeatures>::Ptr featureCloud(new pcl::PointCloud<AllFeatures>);
	features.setInputCloud(transformedCloud);
	pcl::search::KdTree<pcl::PointXYZI>::Ptr search_tree5( new pcl::search::KdTree<pcl::PointXYZI>());
	features.setSearchMethod(search_tree5);
	//principalComponentsAnalysis.setRadiusSearch(0.6);
	//features.setKSearch(30);
	features.setRadiusSearch(0.01); // This value does not do anything! Look inside "NeighborgoodFeatures.h" to see adaptive radius calculation.
	features.compute(*featureCloud);

	//	ros::Time tFeatureCalculation2 = ros::Time::now();
	//	ros::Duration tFeatureCalculation = tFeatureCalculation2-tPreprocessing2;
	//	if (executionTimes==true)
	//		ROS_INFO("Feature calculation time = %i",tFeatureCalculation.nsec);



	visualizeFeature(*cloudRGB, *featureCloud, 0, viewP(GPDistMean), "GPDistMean");
	visualizeFeature(*cloudRGB, *featureCloud, 1, viewP(GPDistMin), "GPDistMin");
	visualizeFeature(*cloudRGB, *featureCloud, 2, viewP(GPDistPoint), "GPDistPoint");
	visualizeFeature(*cloudRGB, *featureCloud, 3, viewP(GPDistVar), "GPDistVar");
	visualizeFeature(*cloudRGB, *featureCloud, 4, viewP(PCA1), "PCA1"); // 4 = PCA1
	visualizeFeature(*cloudRGB, *featureCloud, 5, viewP(PCA2), "PCA2"); // 3 = GPVar
	visualizeFeature(*cloudRGB, *featureCloud, 6, viewP(PCA3), "PCA3"); // 0 = GPMean
	visualizeFeature(*cloudRGB, *featureCloud, 7, viewP(PCANX), "PCANX");
	visualizeFeature(*cloudRGB, *featureCloud, 8, viewP(PCANY), "PCANY");
	visualizeFeature(*cloudRGB, *featureCloud, 9, viewP(PCANZ), "PCANZ"); // 9 = PCANZ
	visualizeFeature(*cloudRGB, *featureCloud, 10, viewP(PointDist), "PointDist");
	visualizeFeature(*cloudRGB, *featureCloud, 11, viewP(RSS), "RSS");
	visualizeFeature(*cloudRGB, *featureCloud, 12, viewP(Reflectance), "Reflectance");

	ROS_INFO("Computed all neighborhood features");

	std::vector<float>* centroid = new std::vector<float>();
	std::vector<float>* stds = new std::vector<float>();

	//*** Testing ***//
	if (training == false)
	{
		Eigen::Matrix3f confMat = Eigen::Matrix3f::Zero();
		if (testdata==true)
		{
			*featureCloud = *featureCloudAcc;
		}

		pcl::copyPointCloud(*cloudRGB,*classificationCloud);

		// Load feature normalization parameters
		std::ifstream input_file((folder + "centroid").c_str());
		std::istream_iterator<float> start(input_file), end;
		std::vector<float> numbers(start, end);
		std::copy(numbers.begin(), numbers.end(), std::back_inserter(*centroid));
		std::ifstream input_file2((folder + "stds").c_str());
		std::istream_iterator<float> start2(input_file2), end2;
		std::vector<float> numbers2(start2, end2);
		std::copy(numbers2.begin(), numbers2.end(), std::back_inserter(*stds));


		// Normalize features
		//		if (pcl::io::savePCDFile ("testFeaturesNonNormalized.pcd", *featureCloud) != 0)
		//			return (false);

		ros::Time tNormalization1 = ros::Time::now();
		normalizeFeatures(*featureCloud,*featureCloud, &centroid, &stds);
		ros::Time tNormalization2 = ros::Time::now();
		ros::Duration tNormalization = tNormalization2-tNormalization1;
		if (executionTimes==true)
			ROS_INFO("Normalization time = %f",(float)(tNormalization.nsec)/1000000);

		pcl::PointIndices::Ptr unclassifiedIndices (new pcl::PointIndices ());
		//		if (pcl::io::savePCDFile ("testFeatures.pcd", *featureCloud) != 0)
		//			return (false);

		CvSVM SVM;
		//int dim = sizeof(featureCloud->points[0])/sizeof(float);
		int dim = useFeatures.size();//sizeof(useFeatures)/sizeof(int);

		SVM.load((folder + "svm").c_str());
		//SVM.load((folder + "svm2014-11-06-11-22-59").c_str());
		//SVM.load((folder + "svm2014-11-07-14-00-31").c_str());
		//SVM.load((folder + "svm2014-11-07-13-16-28").c_str());

		ros::Time tClassification1 = ros::Time::now();
		//int nMissingLabels = 0;
		for (size_t i=0;i<classificationCloud->points.size();i++)
		{
			float dataSVM[dim];
			for (int d=0;d<dim;d++)
			{
				//dataSVM[d] = featureCloud->points[i].data[d];
				dataSVM[d] = featureCloud->points[i].data[useFeatures[d]];
			}
			Mat labelsMat(1, dim, CV_32FC1, dataSVM);

			float response = SVM.predict(labelsMat);

			if (response == 1.0)
				classificationCloud->points[i].b = 255;
			else if (response == 2.0)
				classificationCloud->points[i].g = 255;
			else if (response == 3.0)
				classificationCloud->points[i].r = 255;
			else
				ROS_INFO("Another class label = %f",response);

			if (testdata==true)
			{
				int groundTruth;
				if (labels[i].compare("ground") == 0)
					groundTruth = 1;
				else if (labels[i].compare("vegetation") == 0)
					groundTruth = 2;
				else if (labels[i].compare("object") == 0)
					groundTruth = 3;
				confMat(groundTruth-1,(int)response-1)++;
			}
		}
		ros::Time tClassification2 = ros::Time::now();
		ros::Duration tClassification = tClassification2-tClassification1;
		if (executionTimes==true)
			ROS_INFO("Classification time = %f",(float)(tClassification.nsec)/100000);

		//ROS_INFO("Number of missing class labels = %i", nMissingLabels);

		pcl::visualization::PointCloudColorHandlerRGBField<pcl::PointXYZRGB> colorC(classificationCloud);
		viewer->addPointCloud<pcl::PointXYZRGB>(classificationCloud,colorC,"Classification",viewP(Classification));
		viewer->setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_POINT_SIZE, 2, "Classification");
		viewer->addText ("Classification", 10, 10, fontsize, 1, 1, 1, "Classification text", viewP(Classification));

		// Test set - calculate performance statistics
		if (testdata == true)
		{
			std::cout << confMat << std::endl;
			confMats.push_back(confMat);

			if (true)//k==files.size()-1)
			{
				ofstream myfile;
				string resultsFolder = "/home/mikkel/catkin_ws/src/Results/";
				myfile.open ((resultsFolder + "run_number_here.txt").c_str());

				// Distance threshold
				myfile << "DistanceThreshold:" << distThr << ";\n";

				// Neighborhood parameters
				myfile << "Neighborhood:" << rmin << ";" << rmindist << ";" << rmax << ";" << rmaxdist << ";\n";

				// Features
				myfile << "Features:";
				for (int d=0;d<dim;d++)
					myfile << useFeatures[d] << ";";
				myfile << "\n";

				myfile << "ConfusionMatrix:";
				for (size_t i=0;i<confMats.size();i++)
				{
					for (int r=0;r<confMats[i].rows();r++)
						for (int c=0;c<confMats[i].cols();c++)
							myfile << confMats[i](r,c) << ";";
					myfile << "\n";
				}

				myfile << "Accuracy:" << confMat.diagonal().sum()/confMat.sum() << ";\n";


				myfile.close();
			}

		}
		featureCloudAcc->clear();
		labels.clear();

#else

		for (size_t i=0;i<transformedCloud->size();i++)
		{
			pcl::PointXYZI pTmp2 = transformedCloud->points[i];
			pcl::PointXYZRGB pTmp;
			pTmp.x = pTmp2.x;
			pTmp.y = pTmp2.y;
			pTmp.z = pTmp2.z;
			pTmp.r = 255;
			pTmp.g = 255;
			if (pTmp.z > 0.1) // Object
			{
				classificationCloud->push_back(pTmp);
			}
		}

		if (visualizationFlag)
		{
			pcl::visualization::PointCloudColorHandlerRGBField<pcl::PointXYZRGB> colorC(classificationCloud);
			viewer->addPointCloud<pcl::PointXYZRGB>(classificationCloud,colorC,"Classification",viewP(Classification));
			viewer->setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_POINT_SIZE, 2, "Classification");
			viewer->addText ("Classification", 10, 10, fontsize, 1, 1, 1, "Classification text", viewP(Classification));
		}


#endif





		// REGION GROWING
		//ROS_INFO("region growing 5");
		pcl::PointCloud <pcl::PointXYZRGB>::Ptr objectCloud(new pcl::PointCloud<pcl::PointXYZRGB>);
		//pcl::PointCloud <pcl::PointXYZRGB>::Ptr cloud (new pcl::PointCloud <pcl::PointXYZRGB>);

		for (size_t i=0;i<classificationCloud->size();i++)
		{
			pcl::PointXYZRGB pTmp = classificationCloud->points[i];
			if ((pTmp.r == 255) || (pTmp.g == 255)) // Object
			{
				objectCloud->push_back(pTmp);
			}
		}

		pcl::RegionGrowing<pcl::PointXYZRGB, pcl::Normal> reg;

		reg.setInputCloud (objectCloud);
		//reg.setIndices (indices);
		pcl::search::Search<pcl::PointXYZRGB>::Ptr tree = boost::shared_ptr<pcl::search::Search<pcl::PointXYZRGB> > (new pcl::search::KdTree<pcl::PointXYZRGB>);
		reg.setSearchMethod (tree);
		//reg.setDistanceThreshold (0.0001f);
		//reg.setResidualThreshold(0.01f);
		//ROS_INFO("res = %f",reg.getSmoothnessThreshold());
		//reg.setPointColorThreshold (6);
		//reg.setRegionColorThreshold (5);
		reg.setMinClusterSize (1);
		reg.setResidualThreshold(min_cluster_distance);
		reg.setCurvatureTestFlag(false);
		//		reg.setResidualTestFlag(true);
		//		reg.setNormalTestFlag(false);
		//		reg.setSmoothModeFlag(false);
		std::vector <pcl::PointIndices> clusters;
		reg.extract (clusters);

		//ROS_INFO("clusters = %d", clusters.size());

		//std::cout << "testefter" << std::endl;

		pcl::PointCloud <pcl::PointXYZRGB>::Ptr colored_cloud = reg.getColoredCloud ();

		pcl::PointCloud <pcl::PointXYZRGB>::Ptr clustersFilteredCloud(new pcl::PointCloud<pcl::PointXYZRGB>);
		std::vector <pcl::PointIndices> clustersFiltered;
		//pcl::PointXYZRGB min_pt, max_pt;
		Eigen::Vector4f min_pt, max_pt;
		obstacle_detection::boundingbox bbox;
		obstacle_detection::boundingboxes bboxes;
		bboxes.header.stamp = timestamp;
		bboxes.header.frame_id = "/velodyne";

		if (visualizationFlag)
			viewer->removeAllShapes(viewP(SegmentsFiltered));


		for (size_t i=0;i<clusters.size();i++)
		{
			//ROS_INFO("cluster size = %d",clusters[i].indices.size());
			if (clusters[i].indices.size() > 100)
			{
				clustersFiltered.push_back(clusters[i]);

				for (size_t pp=0;pp<clusters[i].indices.size();pp++)
				{
					clustersFilteredCloud->push_back(colored_cloud->points[clusters[i].indices[pp]]);
				}

				// Calculate bounding box
				pcl::getMinMax3D(*colored_cloud, clusters[i], min_pt, max_pt);
				bbox.start.x = min_pt[0];
				bbox.start.y = min_pt[1];
				bbox.start.z = min_pt[2];
				bbox.width.x = max_pt[0]-min_pt[0];
				bbox.width.y = max_pt[1]-min_pt[1];
				bbox.width.z = max_pt[2]-min_pt[2];
				bbox.header.stamp = timestamp;
				bbox.header.frame_id = "/velodyne";

				bboxes.boundingboxes.push_back(bbox);

				if (visualizationFlag)
					viewer->addCube (min_pt[0], max_pt[0], min_pt[1], max_pt[1], min_pt[2], max_pt[2], 1.0f, 1.0f, 1.0f, (boost::format("%04d") % i).str().c_str(), viewP(SegmentsFiltered));
			}


		}

		pubBBoxesPointer->publish(bboxes);
		//
		//		//pcl::visualization::CloudViewer viewer ("Cluster viewer");
		//		//viewer.showCloud (colored_cloud);
		//
		if (visualizationFlag)
		{
		pcl::visualization::PointCloudColorHandlerRGBField<pcl::PointXYZRGB> colorC2(colored_cloud);
		viewer->addPointCloud<pcl::PointXYZRGB>(colored_cloud,colorC2,"Segments",viewP(Segments));
		viewer->setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_POINT_SIZE, 2, "Segments");
		viewer->addText ("Segments", 10, 10, fontsize, 1, 1, 1, "Segments text", viewP(Segments));


		pcl::visualization::PointCloudColorHandlerRGBField<pcl::PointXYZRGB> colorC3(clustersFilteredCloud);
		viewer->addPointCloud<pcl::PointXYZRGB>(clustersFilteredCloud,colorC3,"SegmentsFiltered",viewP(SegmentsFiltered));
		viewer->setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_POINT_SIZE, 2, "SegmentsFiltered");
		viewer->addText ("Segments", 10, 10, fontsize, 1, 1, 1, "SegmentsFiltered text", viewP(SegmentsFiltered));
		}
		//
		//
		//		// BOUNDING BOXES



		//		viewer->addCube (min_pt.x, max_pt.x, min_pt.y, max_pt.y, min_pt.z, max_pt.z);





		//		std_msgs::Float64 testF;
		//		//testF.data = 10;
		//		testF.data = 10;
		//



		// Compute principal directions
		//		Eigen::Vector4f pcaCentroid;
		//		pcl::compute3DCentroid(*clustersFilteredCloud, pcaCentroid);
		//		Eigen::Matrix3f covariance;
		//		computeCovarianceMatrixNormalized(*clustersFilteredCloud, pcaCentroid, covariance);
		//		Eigen::SelfAdjointEigenSolver<Eigen::Matrix3f> eigen_solver(covariance, Eigen::ComputeEigenvectors);
		//		Eigen::Matrix3f eigenVectorsPCA = eigen_solver.eigenvectors();
		//		eigenVectorsPCA.col(2) = eigenVectorsPCA.col(0).cross(eigenVectorsPCA.col(1));
		//
		//		// Transform the original cloud to the origin where the principal components correspond to the axes.
		//		Eigen::Matrix4f projectionTransform(Eigen::Matrix4f::Identity());
		//		projectionTransform.block<3,3>(0,0) = eigenVectorsPCA.transpose();
		//		projectionTransform.block<3,1>(0,3) = -1.f * (projectionTransform.block<3,3>(0,0) * pcaCentroid.head<3>());
		//		pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloudPointsProjected (new pcl::PointCloud<pcl::PointXYZRGB>);
		//		pcl::transformPointCloud(*clustersFilteredCloud, *cloudPointsProjected, projectionTransform);
		//		// Get the minimum and maximum points of the transformed cloud.
		//		pcl::PointXYZRGB minPoint, maxPoint;
		//		pcl::getMinMax3D(*cloudPointsProjected, minPoint, maxPoint);
		//		const Eigen::Vector3f meanDiagonal = 0.5f*(maxPoint.getVector3fMap() + minPoint.getVector3fMap());
		//
		//		// Final transform
		//		const Eigen::Quaternionf bboxQuaternion(eigenVectorsPCA); //Quaternions are a way to do rotations https://www.youtube.com/watch?v=mHVwd8gYLnI
		//		const Eigen::Vector3f bboxTransform = eigenVectorsPCA * meanDiagonal + pcaCentroid.head<3>();
		//		viewer->addCube(bboxTransform, bboxQuaternion, maxPoint.x - minPoint.x, maxPoint.y - minPoint.y, maxPoint.z - minPoint.z, "bbox");


























		//pcl::visualization::PointCloudColorHandlerCustom<pcl::PointXYZRGB> colorTTF1(featureCloudTest, 0, 255, 0);
		//	pcl::visualization::PointCloudColorHandlerRGBField<pcl::PointXYZRGB> colorTTF1(featureCloudTest);
		//	PCL_INFO("featureCloudTest size = %i",featureCloudTest->points.size());
		//	pcl::visualization::PointCloudColorHandlerCustom<pcl::PointXYZRGB> colorTTF2(featureCloudTrain, 255, 0, 0);
		//	viewer->addPointCloud<pcl::PointXYZRGB>(featureCloudTest,colorTTF1,"TestTrainFeatures1",viewP(TestTrainFeatures));
		//	viewer->addPointCloud<pcl::PointXYZRGB>(featureCloudTrain,colorTTF2,"TestTrainFeatures2",viewP(TestTrainFeatures));
		//	viewer->addText ("TestTrainFeatures", 10, 10, "TestTrainFeatures text", viewP(TestTrainFeatures));

		//		sensor_msgs::PointCloud2 processedCloud;
		//		pcl::toROSMsg(*classificationCloud, processedCloud);
		//		processedCloud.header.stamp = ros::Time::now();//processedCloud->header.stamp;
		//		processedCloud.header.frame_id = "/velodyne";
		//		pubProcessedPointCloudPointer->publish(processedCloud);
		//
		//		pcl::PointCloud<pcl::PointXYZRGB>::Ptr groundCloud2D(new pcl::PointCloud<pcl::PointXYZRGB>);
		//		pcl::PointCloud<pcl::PointXYZRGB>::Ptr objectCloud2D(new pcl::PointCloud<pcl::PointXYZRGB>);
		//
		//		for (size_t i=0;i<classificationCloud->size();i++)
		//		{
		//			pcl::PointXYZRGB pTmp = classificationCloud->points[i];
		//			pTmp.z = 0; // 3D -> 2D
		//			if ((pTmp.r == 255) || (pTmp.g == 255)) // Object
		//			{
		//				objectCloud2D->push_back(pTmp);
		//			}
		//			else if (pTmp.b == 255) // Object
		//			{
		//				groundCloud2D->push_back(pTmp);
		//			}
		//		}
		//
		//		//std::vector<int> indexVector;
		//		unsigned int leafNodeCounter = 0;
		//		unsigned int voxelDensity = 0;
		//		unsigned int totalPoints = 0;
		//		//int occupancyW = OCCUPANCY_WIDTH/OCCUPANCY_GRID_RESOLUTION;
		//		//int occupancyH = OCCUPANCY_DEPTH/OCCUPANCY_GRID_RESOLUTION;
		//		std::vector<unsigned int> ocGroundGrid(OCCUPANCY_WIDTH*OCCUPANCY_DEPTH, 0);
		//		std::vector<unsigned int> ocObjectGrid(OCCUPANCY_WIDTH*OCCUPANCY_DEPTH, 0);
		//		std::vector<signed char> ocGridFinal(OCCUPANCY_WIDTH*OCCUPANCY_DEPTH,-1);
		//
		//		// Ground grid
		//		pcl::octree::OctreePointCloudDensity< pcl::PointXYZRGB > octreeGround (OCCUPANCY_GRID_RESOLUTION);
		//		octreeGround.setInputCloud (groundCloud2D);
		//		pcl::PointXYZ bot(-OCCUPANCY_DEPTH_M/2,-OCCUPANCY_WIDTH_M/2,-0.5);
		//		pcl::PointXYZ top(OCCUPANCY_DEPTH_M/2,OCCUPANCY_WIDTH_M/2,0.5);
		//		octreeGround.defineBoundingBox(bot.x, bot.y, bot.z, top.x, top.y, top.z);    // I have already calculated the BBox of my point cloud
		//		octreeGround.addPointsFromInputCloud ();
		//		pcl::octree::OctreePointCloudDensity<pcl::PointXYZRGB>::LeafNodeIterator it1;
		//		pcl::octree::OctreePointCloudDensity<pcl::PointXYZRGB>::LeafNodeIterator it1_end = octreeGround.leaf_end();
		//		int minx = 1000;
		//		int miny = 1000;
		//		int maxx = -1000;
		//		int maxy = -1000;
		//		for (it1 = octreeGround.leaf_begin(); it1 != it1_end; ++it1)
		//		{
		//			pcl::octree::OctreePointCloudDensityContainer& container = it1.getLeafContainer();
		//			voxelDensity = container.getPointCounter();
		//			Eigen::Vector3f min_pt, max_pt;
		//			octreeGround.getVoxelBounds (it1, min_pt, max_pt);
		//			//ROS_INFO("x = %f, y = %f, z = %f, x2 = %f, y2 = %f, z2 = %f",min_pt[0],min_pt[1],min_pt[2],max_pt[0],max_pt[1],max_pt[2]);
		//			totalPoints += voxelDensity;
		//			if (min_pt[0] < minx)
		//				minx = min_pt[0];
		//			if (min_pt[0] > maxx)
		//				maxx = min_pt[0];
		//			if (min_pt[1] < miny)
		//				miny = min_pt[1];
		//			if (min_pt[1] > maxy)
		//				maxy = min_pt[1];
		//
		//			int r = OCCUPANCY_DEPTH-(min_pt[0]+OCCUPANCY_DEPTH_M/2)/OCCUPANCY_GRID_RESOLUTION;
		//			int c = OCCUPANCY_WIDTH-(min_pt[1]+OCCUPANCY_WIDTH_M/2)/OCCUPANCY_GRID_RESOLUTION;
		//
		//			//			if ((r == 51) || (c==51))
		//			//				ROS_INFO("r = %i,c = %i",r,c);
		//
		//			//if (((int)min_pt[0] == -1) || ((int)min_pt[1]==-1))
		//			//ROS_INFO("min_pt[0] = %f, min_pt[1]=%f",min_pt[0],min_pt[1]);
		//			if (!((r < 0) || (c < 0) || (r > OCCUPANCY_DEPTH-1) || (c > OCCUPANCY_WIDTH-1)))
		//				ocGroundGrid[r+OCCUPANCY_DEPTH*c] = voxelDensity;
		//			//ROS_INFO("density = %i, r = %i, c=%i",voxelDensity, r, c);
		//			//ROS_INFO("x (%f) -> r (%i), y (%f) -> c (%i)",min_pt[0],r,min_pt[1],c);
		//			leafNodeCounter++;
		//		}
		//
		//
		//		// Object grid
		//		pcl::octree::OctreePointCloudDensity< pcl::PointXYZRGB > octreeObject (OCCUPANCY_GRID_RESOLUTION);
		//		octreeObject.setInputCloud (objectCloud2D);
		//		octreeObject.defineBoundingBox(bot.x, bot.y, bot.z, top.x, top.y, top.z);    // I have already calculated the BBox of my point cloud
		//		octreeObject.addPointsFromInputCloud ();
		//		pcl::octree::OctreePointCloudDensity<pcl::PointXYZRGB>::LeafNodeIterator it2;
		//		pcl::octree::OctreePointCloudDensity<pcl::PointXYZRGB>::LeafNodeIterator it2_end = octreeObject.leaf_end();
		//		minx = 1000;
		//		miny = 1000;
		//		maxx = -1000;
		//		maxy = -1000;
		//		leafNodeCounter = 0;
		//		voxelDensity = 0;
		//		totalPoints = 0;
		//		int t1 = 0;
		//		int t2 = 0;
		//		int t3 = 0;
		//		for (it2 = octreeObject.leaf_begin(); it2 != it2_end; ++it2)
		//		{
		//			pcl::octree::OctreePointCloudDensityContainer& container = it2.getLeafContainer();
		//			voxelDensity = container.getPointCounter();
		//			Eigen::Vector3f min_pt, max_pt;
		//			octreeObject.getVoxelBounds (it2, min_pt, max_pt);
		//			//ROS_INFO("x = %f, y = %f, z = %f, x2 = %f, y2 = %f, z2 = %f",min_pt[0],min_pt[1],min_pt[2],max_pt[0],max_pt[1],max_pt[2]);
		//			totalPoints += voxelDensity;
		//			if (min_pt[0] < minx)
		//				minx = min_pt[0];
		//			if (min_pt[0] > maxx)
		//				maxx = min_pt[0];
		//			if (min_pt[1] < miny)
		//				miny = min_pt[1];
		//			if (min_pt[1] > maxy)
		//				maxy = min_pt[1];
		//
		//			int r = OCCUPANCY_DEPTH-(min_pt[0]+OCCUPANCY_DEPTH_M/2)/OCCUPANCY_GRID_RESOLUTION;
		//			int c = OCCUPANCY_WIDTH-(min_pt[1]+OCCUPANCY_WIDTH_M/2)/OCCUPANCY_GRID_RESOLUTION;
		//			if (!((r < 0) || (c < 0) || (r > OCCUPANCY_DEPTH-1) || (c > OCCUPANCY_WIDTH-1)))
		//				ocObjectGrid[r+OCCUPANCY_DEPTH*c] = voxelDensity;
		//			//ROS_INFO("object points = %i -> %i: x (%f) -> r (%i), y (%f) -> c (%i)",voxelDensity,ocGridFinal[r+OCCUPANCY_DEPTH*c],min_pt[0],r,min_pt[1],c);
		//			//ocGrid[r+OCCUPANCY_DEPTH*c] = voxelDensity;
		//			//ROS_INFO("density = %i, r = %i, c=%i",voxelDensity, r, c);
		//
		//			leafNodeCounter++;
		//		}
		//
		//		for (int r=0;r<OCCUPANCY_DEPTH;r++)
		//		{
		//			for (int c=0;c<OCCUPANCY_WIDTH;c++)
		//			{
		//				if ((ocGroundGrid[r+OCCUPANCY_DEPTH*c] == 0) && (ocObjectGrid[r+OCCUPANCY_DEPTH*c] <= 1))
		//				{
		//					ocGridFinal[r+OCCUPANCY_DEPTH*c] = -1.0; // 0.5 default likelihood
		//					t1++;
		//				}
		//				else if ((ocObjectGrid[r+OCCUPANCY_DEPTH*c] == 0))// && (ocGroundGrid[r+OCCUPANCY_DEPTH*c] > 0))
		//				{
		//					ocGridFinal[r+OCCUPANCY_DEPTH*c] = OCCUPANCY_MIN_PROB;
		//					t2++;
		//				}
		//				else
		//				{
		//					ocGridFinal[r+OCCUPANCY_DEPTH*c] = 50+(OCCUPANCY_MAX_PROB-50)*ocObjectGrid[r+OCCUPANCY_DEPTH*c]/(ocObjectGrid[r+OCCUPANCY_DEPTH*c]+ocGroundGrid[r+OCCUPANCY_DEPTH*c]);
		//					t3++;
		//				}
		//			}
		//		}
		//
		//		nav_msgs::OccupancyGrid occupancyGrid;
		//		occupancyGrid.info.resolution = OCCUPANCY_GRID_RESOLUTION;
		//		occupancyGrid.header.stamp = timestamp;//ros::Time::now();
		//		occupancyGrid.header.frame_id = "/velodyne";
		//		occupancyGrid.info.width = OCCUPANCY_WIDTH;
		//		occupancyGrid.info.height = OCCUPANCY_DEPTH;
		//		//geometry_msgs::Pose lidarPose;
		//		geometry_msgs::Point lidarPoint;
		//		lidarPoint.x = 0;
		//		lidarPoint.y = 0;
		//		lidarPoint.z = 0;
		//		geometry_msgs::Quaternion lidarQuaternion;
		//		lidarQuaternion.w = 1;
		//		lidarQuaternion.x = 0;
		//		lidarQuaternion.y = 0;
		//		lidarQuaternion.z = 0;
		//		occupancyGrid.info.origin.position = lidarPoint;
		//		occupancyGrid.info.origin.orientation = lidarQuaternion;
		//		occupancyGrid.data = ocGridFinal;
		//
		//		//ROS_INFO("total points = %i, total leaves = %i",totalPoints,leafNodeCounter);
		//		//ROS_INFO("minx = %i, maxx = %i, miny = %i, maxy = %i",minx,maxx,miny,maxy);
		//		//ROS_INFO("t1 = %i, t2 = %i, t3 = %i",t1,t2,t3);
		//
		//
		//		pubOccupancyGridPointer->publish(occupancyGrid);


		//		int l = 0;
		//		while (*++itLeafs)
		//		{
		//		    //Iteratively explore only the leaf nodes..
		//		    std::vector<PointXYZ> points;
		//		    itLeafs->getData(points);
		//		    l++;
		//		}
		//
		//		ROS_INFO("l = %i",l);

		//pcl::octree::OctreePointCloudSearch<pcl::PointXYZ> octree (OCCUPANCY_GRID_RESOLUTION);

		//		sensor_msgs::PointCloud2::Ptr cloud_filtered (new sensor_msgs::PointCloud2 ());
		//		pcl::VoxelGrid<sensor_msgs::PointCloud2> sor;
		//		sor.setInputCloud (processedCloud);
		//		sor.setLeafSize (0.01f, 0.01f, 0.01f);
		//		sor.filter (*cloud_filtered);
		if (n==0)
		{
			//viewer->setCameraPosition(-20,0,10,1,0,2,1,0,2,v1);
			//viewer->setCameraPosition(-20,0,10,1,0,2,1,0,2,v2);
			//viewer->loadCameraParameters("pcl_video.cam");
		}
#if (OLD_METHOD)
	}
#endif

	if (visualizationFlag)
		viewer->spinOnce();

	// Save screenshot
//	std::stringstream tmp;
//	tmp << n;
//	viewer->saveScreenshot("/home/mikkel/images/" + (boost::format("%04d") % n).str() + ".png");
//	n++;
}
  void TwoDepthImageAlignerNode::processNode(MapNode* node_){
   cerr << "START ITERATION" << endl;
   std::vector<Serializable*> crearedObjects;
    SensingFrameNode* sensingFrame = dynamic_cast<SensingFrameNode*>(node_);
    if (! sensingFrame)
      return;
    
    PinholeImageData* image = dynamic_cast<PinholeImageData*>(sensingFrame->sensorData(_topic));
    if (! image)
      return;
    cerr << "got image"  << endl;
    
    Eigen::Isometry3d _sensorOffset = _config->sensorOffset(image->baseSensor());
    
    // cerr << "sensorOffset: " << endl;
    // cerr << _sensorOffset.matrix() << endl;

    Eigen::Isometry3f sensorOffset;
    convertScalar(sensorOffset,_sensorOffset);
    sensorOffset.matrix().row(3) << 0,0,0,1;

    Eigen::Matrix3d _cameraMatrix = image->cameraMatrix();
    
    ImageBLOB* blob = image->imageBlob().get();
    
    DepthImage depthImage;
    depthImage.fromCvMat(blob->cvImage());
    int r=depthImage.rows();
    int c=depthImage.cols();
    
    DepthImage scaledImage;
    Eigen::Matrix3f cameraMatrix;
    convertScalar(cameraMatrix,_cameraMatrix);
    
    //computeScaledParameters(r,c,cameraMatrix,_scale);
    PinholePointProjector* projector=dynamic_cast<PinholePointProjector*>(_converter->projector());
    cameraMatrix(2,2)=1;
    projector->setCameraMatrix(cameraMatrix);
    projector->setImageSize(depthImage.rows(), depthImage.cols());
    projector->scale(1.0/_scale);

    DepthImage::scale(scaledImage,depthImage,_scale);
    pwn::Frame* frame = new pwn::Frame;
    _converter->compute(*frame,scaledImage, sensorOffset);
  
    MapNodeBinaryRelation* odom=0;

    std::vector<MapNode*> oneNode(1);
    oneNode[0]=sensingFrame;
    MapNodeUnaryRelation* imu = extractRelation<MapNodeUnaryRelation>(oneNode);
    
    if (_previousFrame){
      _aligner->setReferenceSensorOffset(_aligner->currentSensorOffset());
      _aligner->setCurrentSensorOffset(sensorOffset);
      _aligner->setReferenceFrame(_previousFrame);
      _aligner->setCurrentFrame(frame);
      
      PinholePointProjector* projector=(PinholePointProjector*)(_aligner->projector());
      projector->setCameraMatrix(cameraMatrix);
      projector->setImageSize(depthImage.rows(), depthImage.cols());
      projector->scale(1.0/_scale);
      _aligner->correspondenceFinder()->setImageSize(projector->imageRows(), projector->imageCols());

      /*
	cerr << "correspondenceFinder: "  << r << " " << c << endl; 
	cerr << "sensorOffset" << endl;
	cerr <<_aligner->currentSensorOffset().matrix() << endl;
	cerr <<_aligner->referenceSensorOffset().matrix() << endl;
	cerr << "cameraMatrix" << endl;
	cerr << projector->cameraMatrix() << endl;
      */

      std::vector<MapNode*> twoNodes(2);
      twoNodes[0]=_previousSensingFrameNode;
      twoNodes[1]=sensingFrame;
      odom = extractRelation<MapNodeBinaryRelation>(twoNodes);
      cerr << "odom:" << odom << " imu:" << imu << endl;

      Eigen::Isometry3f guess= Eigen::Isometry3f::Identity();
      _aligner->clearPriors();
      if (odom){
      	Eigen::Isometry3f mean;
      	Eigen::Matrix<float,6,6> info;
      	convertScalar(mean,odom->transform());
	mean.matrix().row(3) << 0,0,0,1;
	convertScalar(info,odom->informationMatrix());
	//cerr << "odom: " << t2v(mean).transpose() << endl;
	_aligner->addRelativePrior(mean,info);
 	//guess = mean;
      } 

      if (imu){
      	Eigen::Isometry3f mean;
      	Eigen::Matrix<float,6,6> info;
      	convertScalar(mean,imu->transform());
      	convertScalar(info,imu->informationMatrix());
	mean.matrix().row(3) << 0,0,0,1;
	//cerr << "imu: " << t2v(mean).transpose() << endl;
	_aligner->addAbsolutePrior(_globalT,mean,info);
      }
      _aligner->setInitialGuess(guess);
      cerr << "Frames: " << _previousFrame << " " << frame << endl;

      
      // projector->setCameraMatrix(cameraMatrix);
      // projector->setTransform(Eigen::Isometry3f::Identity());
      // Eigen::MatrixXi debugIndices(r,c);
      // DepthImage debugImage(r,c);
      // projector->project(debugIndices, debugImage, frame->points());

      _aligner->align();
      
      // sprintf(buf, "img-dbg-%05d.pgm",j);
      // debugImage.save(buf);
      //sprintf(buf, "img-ref-%05d.pgm",j);
      //_aligner->correspondenceFinder()->referenceDepthImage().save(buf);
      //sprintf(buf, "img-cur-%05d.pgm",j);
      //_aligner->correspondenceFinder()->currentDepthImage().save(buf);

      cerr << "inliers: " << _aligner->inliers() << endl;
      cerr << "chi2: " << _aligner->error() << endl;
      cerr << "chi2/inliers: " << _aligner->error()/_aligner->inliers() << endl;
      cerr << "initialGuess: " << t2v(guess).transpose() << endl;
      cerr << "transform   : " << t2v(_aligner->T()).transpose() << endl;
      if (_aligner->inliers()>-1){
 	_globalT = _globalT*_aligner->T();
	cerr << "TRANSFORM FOUND" <<  endl;
      } else {
	cerr << "FAILURE" <<  endl;
	_globalT = _globalT*guess;
      }
      if (! (_counter%50) ) {
	Eigen::Matrix3f R = _globalT.linear();
	Eigen::Matrix3f E = R.transpose() * R;
	E.diagonal().array() -= 1;
	_globalT.linear() -= 0.5 * R * E;
      }
      _globalT.matrix().row(3) << 0,0,0,1;
      cerr << "globalTransform   : " << t2v(_globalT).transpose() << endl;

      // char buf[1024];
      // sprintf(buf, "frame-%05d.pwn",_counter);
      // frame->save(buf, 1, true, _globalT);


      cerr << "creating alias" << endl;
      // create an alias for the previous node
      MapNodeAlias* newRoot = new MapNodeAlias(_previousSensingFrameNode,_previousSensingFrameNode->manager());
      _previousSensingFrameNode->manager()->addNode(newRoot);
      
      cerr << "creating alias relation" << endl;
      MapNodeAliasRelation* aliasRel = new MapNodeAliasRelation(newRoot,_previousSensingFrameNode->manager());
      aliasRel->nodes()[0] = newRoot;
      aliasRel->nodes()[1] = _previousSensingFrameNode;
      _previousSensingFrameNode->manager()->addRelation(aliasRel);
      
      cerr << "reparenting old elements" << endl;
      // assign all the used relations to the alias node
      if (imu){
	imu->setOwner(newRoot);
	imu->manager()->addRelation(imu);
      }

      if (odom){
	odom->setOwner(newRoot);
	odom->manager()->addRelation(imu);
      }
      
      cerr << "adding result" << endl;
      
      Relation* newRel = new Relation(_aligner, _converter, 
	       _previousSensingFrameNode, sensingFrame,
	       _topic, _aligner->inliers(), _aligner->error(), _manager);
      newRel->setOwner(newRoot);
      newRel->nodes()[0] = newRoot;
      newRel->nodes()[1] = _previousSensingFrameNode;
      Eigen::Isometry3d iso;
      convertScalar(iso,_aligner->T());
      newRel->setTransform(iso);
      newRel->setInformationMatrix(Eigen::Matrix<double, 6,6>::Identity());
      newRel->manager()->addRelation(newRel);
      

      *os << _globalT.translation().transpose() << endl;

      // create a new alias node for the prior element
      // bind the alias with the prior node

      // add to the alias the relations that have been used to
      // determine the transformation

      // add the alias to the map
      // add the new transformation to the map
      
    } else {
      _aligner->setCurrentSensorOffset(sensorOffset);
      _globalT = Eigen::Isometry3f::Identity();
      if (imu){
      	Eigen::Isometry3f mean;
      	convertScalar(mean,imu->transform());
	_globalT = mean;
      }
    }
    
    cerr << "deleting previous frame" << endl;
    if (_previousFrame)
      delete _previousFrame;
    
    cerr << "deleting blob frame" << endl;
    delete blob;

    cerr << "bookkeeping update" << endl;
    _previousSensingFrameNode = sensingFrame;
    _previousFrame = frame;
    _counter++;
    
    cerr << "END ITERATION" << endl;
  }