bool
VirtualTrackball::mouseMoved ( const MouseEvent* event )
{
        if ( !event->isLeftButtonPressed() ) return false;
        const Eigen::Vector3f p0 = this->_oldp;
        const Eigen::Vector3f p1 = this->project_on_sphere ( event->x(), event->y() );
        this->_oldp = p1;
        if ( ( p0 - p1 ).norm() < this->_eps ) return false; // do nothing
		//何か間違ってそうなので訂正してみる
        float radian = std::acos( p0.dot ( p1 ) )*0.5;
        const float cost = std::cos(radian);
        const float sint = std::sin(radian);
        //const float cost = p0.dot ( p1 );
        //const float sint = std::sqrt ( 1 - cost * cost );
        const Eigen::Vector3f axis = p0.cross ( p1 ).normalized();
        const Eigen::Quaternionf q ( -cost, sint * axis.x(), sint * axis.y(), sint * axis.z() );
        if( ( q.x()!=q.x() )|| ( q.y()!=q.y() )|| ( q.z()!=q.z() )|| ( q.w()!=q.w() ) ) return false;

        /*
        Eigen::Vector3f bmin , bmax;
        Mesh mesh;
        mesh = this->_model.getMesh();
        mesh.getBoundingBox(bmin,bmax);
        Eigen::Vector3f mc = (bmin + bmax)*0.5;
        Eigen::Vector3f c = Eigen::Matrix3f(q) * ( this->_model.getCamera().getCenter() - mc ) + mc ;
        this->_model.setCameraPosition(c.x(),c.y(),c.z());*/

        //this->_model.addRotation ( q );
        return true;
}
Ejemplo n.º 2
0
Eigen::Quaternionf AdafruitWidget::getQuaternion()
{
    Eigen::Quaternionf quat = myIMU->returnPose();
    QString label = QString("%1 / %2 / %3 / %4").arg(quat.w()).arg(quat.x()).arg(quat.y()).arg(quat.z());
    ui->label_Quaternion->setText(label);
    return myIMU->returnPose();
}
void UrdfModelMarker::addInvisibleMeshMarkerControl(visualization_msgs::InteractiveMarker &int_marker, boost::shared_ptr<const Link> link, const std_msgs::ColorRGBA &color){
  visualization_msgs::InteractiveMarkerControl control;
  //    ps.pose = UrdfPose2Pose(link->visual->origin);
  visualization_msgs::Marker marker;

  //if (use_color) marker.color = color;
  marker.type = visualization_msgs::Marker::CYLINDER;
  double scale=0.01;
  marker.scale.x = scale;
  marker.scale.y = scale * 1;
  marker.scale.z = scale * 40;
  marker.color = color;
  //marker.pose = stamped.pose;
  //visualization_msgs::InteractiveMarkerControl control;
  boost::shared_ptr<Joint> parent_joint = link->parent_joint;
  Eigen::Vector3f origin_x(0,0,1);
  Eigen::Vector3f dest_x(parent_joint->axis.x, parent_joint->axis.y, parent_joint->axis.z);
  Eigen::Quaternionf qua;

  qua.setFromTwoVectors(origin_x, dest_x);
  marker.pose.orientation.x = qua.x();
  marker.pose.orientation.y = qua.y();
  marker.pose.orientation.z = qua.z();
  marker.pose.orientation.w = qua.w();

  control.markers.push_back( marker );
  control.interaction_mode = visualization_msgs::InteractiveMarkerControl::BUTTON;
  control.always_visible = true;

  int_marker.controls.push_back(control);
  return;
}
Ejemplo n.º 4
0
/*
 * Gets the current rotation matrix.
 *
 * Returns a pointer to the consecutive values that are used as the elements
 * of a rotation matrix. The elements are specified in column order. That is,
 * if we have 16 elements and we are specifying a 4 x 4 matrix, then the first
 * 4 elements represent the first column, and so on.
 *
 * The actual form of the rotation matrix is specified in HW3 notes. All we
 * need to do is get the current rotation quaternion and translate it to
 * matrix form.
 */
float *get_rotation_matrix() {
    Eigen::Quaternionf q = get_current_rotation();
    float qs = q.w();
    float qx = q.x();
    float qy = q.y();
    float qz = q.z();

    float *matrix = new float[16];

    MatrixXf m(4, 4);
    m <<
        1 - 2 * qy * qy - 2 * qz * qz, 2 * (qx * qy - qz * qs),
            2 * (qx * qz + qy * qs), 0,
        2 * (qx * qy + qz * qs), 1 - 2 * qx * qx - 2 * qz * qz,
            2 * (qy * qz - qx * qs), 0,
        2 * (qx * qz - qy * qs), 2 * (qy * qz + qx * qs),
            1 - 2 * qx * qx - 2 * qy * qy, 0,
        0, 0, 0, 1;

    // Manually copy eigen data into float array, otherwise we run into
    // memory issues
    int count = 0;
    for (int col = 0; col < 4; col++) {
        for (int row = 0; row < 4; row++) {
            matrix[count] = m(row, col);
            count++;
        }
    }
    return matrix;
}
Ejemplo n.º 5
0
bool CommandSubscriber::getForcedTfFrame(Eigen::Affine3f & result) const
  {
  tf::StampedTransform transform;
  try
    {
    m_tf_listener.lookupTransform(m_forced_tf_frame_reference,m_forced_tf_frame_name,ros::Time(0),transform);
    }
    catch (tf::TransformException ex)
      {
      ROS_ERROR("kinfu: hint was forced by TF, but retrieval failed because: %s",ex.what());
      return false;
      }

  Eigen::Vector3f vec;
  vec.x() = transform.getOrigin().x();
  vec.y() = transform.getOrigin().y();
  vec.z() = transform.getOrigin().z();
  Eigen::Quaternionf quat;
  quat.x() = transform.getRotation().x();
  quat.y() = transform.getRotation().y();
  quat.z() = transform.getRotation().z();
  quat.w() = transform.getRotation().w();

  result.linear() = Eigen::AngleAxisf(quat).matrix();
  result.translation() = vec;
  return true;
  }
Ejemplo n.º 6
0
void makeJointMarker(std::string jointName)
{
	boost::shared_ptr<const urdf::Joint> targetJoint = huboModel.getJoint(jointName);

	// The marker must be created in the parent frame so you don't get feedback when you move it
	visualization_msgs::InteractiveMarker marker;

	marker.scale = .125;
	marker.name = jointName;
	marker.header.frame_id = targetJoint->parent_link_name;

	geometry_msgs::Pose controlPose = hubo_motion_ros::toPose( targetJoint->parent_to_joint_origin_transform);
	marker.pose = controlPose;

	visualization_msgs::InteractiveMarkerControl control;

	Eigen::Quaternionf jointAxis;
	Eigen::Vector3f axisVector = hubo_motion_ros::toEVector3(targetJoint->axis);
	jointAxis.setFromTwoVectors(Eigen::Vector3f::UnitX(), axisVector);

	control.orientation.w = jointAxis.w();
	control.orientation.x = jointAxis.x();
	control.orientation.y = jointAxis.y();
	control.orientation.z = jointAxis.z();

	control.always_visible = true;
	control.interaction_mode = visualization_msgs::InteractiveMarkerControl::ROTATE_AXIS;
	control.orientation_mode = visualization_msgs::InteractiveMarkerControl::INHERIT;

	marker.controls.push_back(control);

	gIntServer->insert(marker);
	gIntServer->setCallback(marker.name, &processFeedback);
}
Ejemplo n.º 7
0
bool CObjTreePlugin::srvGetPlane(srs_env_model::GetPlane::Request &req, srs_env_model::GetPlane::Response &res)
{
    const objtree::Object *object = m_octree.object(req.object_id);
    //Object hasn't been found
    if(!object) return true;
    if(object->type() != objtree::Object::PLANE) return true;

    const objtree::Plane *plane = (const objtree::Plane*)object;

    res.plane.id = req.object_id;
    res.plane.pose.position.x = plane->pos().x;
    res.plane.pose.position.y = plane->pos().y;
    res.plane.pose.position.z = plane->pos().z;

    //Quaternion from normal
    Eigen::Vector3f normal(plane->normal().x, plane->normal().y, plane->normal().z);
    Eigen::Quaternionf q;
    q.setFromTwoVectors(upVector, normal.normalized());

    res.plane.pose.orientation.x = q.x();
    res.plane.pose.orientation.y = q.y();
    res.plane.pose.orientation.z = q.z();
    res.plane.pose.orientation.w = q.w();

    res.plane.scale.x = plane->boundingMax().x-plane->boundingMin().x;
    res.plane.scale.y = plane->boundingMax().y-plane->boundingMin().y;
    res.plane.scale.z = plane->boundingMax().z-plane->boundingMin().z;

    return true;
}
Ejemplo n.º 8
0
void EKFGyroAcc::reset(Eigen::Quaternionf att)
{
    this->x.setZero();
    this->x(0, 0) = att.w();
    this->x(1, 0) = att.x();
    this->x(2, 0) = att.y();
    this->x(3, 0) = att.z();
}
void PlayWindow::on_pushButton_Quat_clicked()
{
    Eigen::Quaternionf cal = ada->GetQuat();
    ui->label_W->setText(QString::number(cal.w()));
    ui->label_X->setText(QString::number(cal.x()));
    ui->label_Y->setText(QString::number(cal.y()));
    ui->label_Z->setText(QString::number(cal.z()));

    if (ui->checkBox_Save->isChecked())
    {
        QFile file(ui->lineEdit_Path->text().append("Adafruit Results ").append(QDateTime::currentDateTime().toString()));
        file.open(QFile::ReadWrite);
        QString result = QString("%1 %2 %3 %4").arg(cal.w()).arg(cal.x()).arg(cal.y()).arg(cal.z());
        file.write(result.toUtf8());
        file.close();
    }

    return;
}
Ejemplo n.º 10
0
// Callback function for "segment_plans" service
bool segment (segment_plans_objects::PlantopSegmentation::Request  &req,
		         	segment_plans_objects::PlantopSegmentation::Response &res) {		         	
	cout << "Query received !" << endl;
	if (current_cloud->empty()) {
		res.result = 1;
		cout << "Error : segment_plans_objects : no current cloud" << endl;
		return false;
	}
	cout << "Segmenting..." << endl;
	res.result = 4;
  if (segmenter.segment_plans (current_cloud) == 0)
		res.result = 2;
	else if (segmenter.segment_clusters (current_cloud) == 0)
		res.result = 3;
	else if (segmenter.segment_rgb (current_rgb) == 0)
		res.result = 3;
	cout << "Segmentation done." << endl;

	// TODO Remplir le header pour la pose de la table
	Eigen::Vector3f t = segmenter.get_prefered_plan_translation ();
	res.table.pose.header = current_cloud->header;
	res.table.pose.pose.position.x = t(0);
	res.table.pose.pose.position.y = t(1);
	res.table.pose.pose.position.z = t(2);
	Eigen::Quaternionf q = segmenter.get_prefered_plan_orientation ();
	// Be carefull here, eigen quaternion ordering is : w, x, y, z
	// while ROS is : x, y, z, w
	res.table.pose.pose.orientation.x = q.x();
	res.table.pose.pose.orientation.y = q.y();
	res.table.pose.pose.orientation.z = q.z();
	res.table.pose.pose.orientation.w = q.w();
	Eigen::Vector4f min, max;
	min = segmenter.get_prefered_plan_min ();
	max = segmenter.get_prefered_plan_max ();
	res.table.x_min = min(0);
	res.table.x_max = max(0);
	res.table.y_min = min(1);
	res.table.y_max = max(1);

	for (size_t i=0; i < segmenter.get_num_clusters(); ++i) {
		sensor_msgs::PointCloud2 tmp;
		pcl::toROSMsg(*(segmenter.get_cluster(i)), tmp);
		res.clusters.push_back(tmp);
	}

	for (size_t i=0; i < segmenter.get_num_rgbs(); ++i) {
		cv_bridge::CvImagePtr cv_ptr (new cv_bridge::CvImage);
		//cv_ptr->header = ; TODO fill the header with current image header
		cv_ptr->encoding = enc::BGR8;
		cv_ptr->image = *(segmenter.get_rgb_cluster(i));
		res.cluster_images.push_back(*cv_ptr->toImageMsg());
	}
	return true;
}
 void imageCallback(const sensor_msgs::ImageConstPtr& image_msg)
 {
   try
   {
     input_bridge_ = cv_bridge::toCvCopy(image_msg, "mono8");
     output_bridge_ = cv_bridge::toCvCopy(image_msg, "bgr8");
   }
   catch (cv_bridge::Exception& ex)
   {
     ROS_ERROR("[calibrate] Failed to convert image");
     return;
   }
 
   Eigen::Vector3f translation;
   Eigen::Quaternionf orientation;
   
   if (!pattern_detector_.detectPattern(input_bridge_->image, translation, orientation, output_bridge_->image))
   {
     ROS_INFO("[calibrate] Couldn't detect checkerboard, make sure it's visible in the image.");
     return;
   }
   
   tf::Transform target_transform;
   tf::StampedTransform base_transform;
   try
   {
     ros::Time acquisition_time = image_msg->header.stamp;
     ros::Duration timeout(1.0 / 30.0);
                                  
     target_transform.setOrigin( tf::Vector3(translation.x(), translation.y(), translation.z()) );
     target_transform.setRotation( tf::Quaternion(orientation.x(), orientation.y(), orientation.z(), orientation.w()) );
     tf_broadcaster_.sendTransform(tf::StampedTransform(target_transform, image_msg->header.stamp, image_msg->header.frame_id, target_frame));
   }
   catch (tf::TransformException& ex)
   {
     ROS_WARN("[calibrate] TF exception:\n%s", ex.what());
     return;
   }
   publishCloud(ideal_points_, target_transform, image_msg->header.frame_id);
   
   overlayPoints(ideal_points_, target_transform, output_bridge_);
   
   // Publish calibration image
   pub_.publish(output_bridge_->toImageMsg());
   
   pcl_ros::transformPointCloud(ideal_points_, image_points_, target_transform);
   
   cout << "Got an image callback!" << endl;
   
   calibrate(image_msg->header.frame_id);
   
   ros::shutdown();
 }
Ejemplo n.º 12
0
// This formula comes from Sebastian O.H. Madgwick's 2010 paper:
// "An efficient orientation filter for inertial and inertial/magnetic sensor arrays"
// https://www.samba.org/tridge/UAV/madgwick_internal_report.pdf
void
psmove_alignment_compute_objective_jacobian(
	const Eigen::Quaternionf &q, const Eigen::Vector3f &d, Eigen::Matrix<float, 4, 3> &J)
{
	/*
	* The Jacobian of a function is a matrix of partial derivatives that relates rates of changes in inputs to outputs
	*
	* In this case the inputs are the components of the quaternion q (qw, qx, qy, and qz)
	* and the outputs are the components of the error vector f(fx, fy, fz)
	* Where f= q^-1*[0 dx dy dz]*q - s, d= initial field vector, s= target field vector
	*
	* Since there are 4 input vars (q1,q2,q3,q4) and 3 output vars (fx, fy, fz)
	* The Jacobian is a 3x4 matrix that looks like this:
	*
	* | df_x/dq_1 df_x/dq_2 df_x/dq_3 df_x/dq_4 |
	* | df_y/dq_1 df_y/dq_2 df_y/dq_3 df_y/dq_4 |
	* | df_z/dq_1 df_z/dq_2 df_z/dq_3 df_z/dq_4 |
	*/

	const float two_dxq1 = 2.f*d.x()*q.w();
	const float two_dxq2 = 2.f*d.x()*q.x();
	const float two_dxq3 = 2.f*d.x()*q.y();
	const float two_dxq4 = 2.f*d.x()*q.z();

	const float two_dyq1 = 2.f*d.y()*q.w();
	const float two_dyq2 = 2.f*d.y()*q.x();
	const float two_dyq3 = 2.f*d.y()*q.y();
	const float two_dyq4 = 2.f*d.y()*q.z();

	const float two_dzq1 = 2.f*d.z()*q.w();
	const float two_dzq2 = 2.f*d.z()*q.x();
	const float two_dzq3 = 2.f*d.z()*q.y();
	const float two_dzq4 = 2.f*d.z()*q.z();

	J(0, 0) = two_dyq4 - two_dzq3;                 J(0, 1) = -two_dxq4 + two_dzq2;                J(0, 2) = two_dxq3 - two_dyq2;
	J(1, 0) = two_dyq3 + two_dzq4;                 J(1, 1) = two_dxq3 - 2.f*two_dyq2 + two_dzq1;  J(1, 2) = two_dxq4 - two_dyq1 - 2.f*two_dzq2;
	J(2, 0) = -2.f*two_dxq3 + two_dyq2 - two_dzq1; J(2, 1) = two_dxq2 + two_dzq4;                 J(2, 2) = two_dxq1 + two_dyq4 - 2.f*two_dzq3;
	J(3, 0) = -2.f*two_dxq4 + two_dyq1 + two_dzq2; J(3, 1) = -two_dxq1 - 2.f*two_dyq4 + two_dzq3; J(3, 2) = two_dxq2 + two_dyq3;
}
Ejemplo n.º 13
0
int main( int argc, char* argv[] ) {

  //
  Eigen::Vector3f trans;
  Eigen::Quaternionf q;
  double a, b, c;
  std::string gOriginalFile;  

  int v;
  while( (v=getopt(argc,argv,"x:y:z:r:p:q:n:a:b:c:f:h")) != -1 ) {
  switch(v) {
  case 'f': { gOriginalFile.assign(optarg); } break;
  case 'x': { trans(0) = atof(optarg); } break;
  case 'y': { trans(1) = atof(optarg); } break;
  case 'z': { trans(2) = atof(optarg); } break;
  case 'r': { q.x() = atof(optarg); } break;
  case 'p': { q.y() = atof(optarg); } break;
  case 'q': { q.z() = atof(optarg); } break;
  case 'n': { q.w() = atof(optarg); } break;
  case 'a': { a = atof(optarg); } break;
  case 'b': { b = atof(optarg); } break;
  case 'c': { c = atof(optarg); } break;
  }
  }
  

  boost::shared_ptr<pcl::visualization::PCLVisualizer> viewer (new pcl::visualization::PCLVisualizer ("Viz"));
  viewer->initCameraParameters ();
  viewer->addCoordinateSystem(0.2);
  // Add pointcloud
  pcl::PointCloud<pcl::PointXYZRGBA>::Ptr cloud( new pcl::PointCloud<pcl::PointXYZRGBA>() );
  pcl::PointCloud<pcl::PointXYZRGBA>::Ptr table( new pcl::PointCloud<pcl::PointXYZRGBA>() );
  pcl::io::loadPCDFile<pcl::PointXYZRGBA> (gOriginalFile, *cloud);
  pcl::io::loadPCDFile<pcl::PointXYZRGBA> ("data_july_18/table_points.pcd", *table);
  viewer->addPointCloud( cloud, "cloud_original");
  viewer->addPointCloud( table, "table");
  // Add point
  pcl::PointXYZ cp;
  cp.x = trans(0); cp.y = trans(1); cp.z = trans(2);
  viewer->addSphere( cp, 0.005, "sphere" );
  // Add cube
  viewer->addCube( trans, q, 2*a,2*b,2*c);

  
  while (!viewer->wasStopped ()) {
    viewer->spinOnce (100);
    boost::this_thread::sleep (boost::posix_time::microseconds (100000));
  }

}
  void detectCB(const sensor_msgs::ImageConstPtr& image_msg, const sensor_msgs::CameraInfoConstPtr& info_msg)
  {
    // make sure that the action is active                            
    if (!as_.isActive())
      return;

    ROS_INFO("%s: Received image, performing detection", action_name_.c_str());
    // Convert image message

    try
    {
      img_bridge_ = cv_bridge::toCvCopy(image_msg, "mono8");
    }
    catch (cv_bridge::Exception& ex)
    {
      ROS_ERROR("[calibrate] Failed to convert image");
      return;
    }


    ROS_INFO("%s: created cv::Mat", action_name_.c_str());
    cam_model_.fromCameraInfo(info_msg);
    detector_.setCameraMatrices(cam_model_.intrinsicMatrix(), cam_model_.distortionCoeffs());

    Eigen::Vector3f translation;
    Eigen::Quaternionf orientation;

    if (detector_.detectPattern(img_bridge_->image, translation, orientation))
    {
      ROS_INFO("detected fiducial");
      tf::Transform fiducial_transform;
      fiducial_transform.setOrigin( tf::Vector3(translation.x(), translation.y(), translation.z()) );
      fiducial_transform.setRotation( tf::Quaternion(orientation.x(), orientation.y(), orientation.z(), orientation.w()) );

      tf::StampedTransform fiducial_transform_stamped(fiducial_transform, image_msg->header.stamp, cam_model_.tfFrame(), "fiducial_frame");
      tf_broadcaster_.sendTransform(fiducial_transform_stamped);

      tf::Pose fiducial_pose;
      fiducial_pose.setRotation(tf::Quaternion(0, 0, 0, 1.0));
      fiducial_pose = fiducial_transform * fiducial_pose;
      tf::Stamped<tf::Pose> fiducial_pose_stamped(fiducial_pose, image_msg->header.stamp, cam_model_.tfFrame());

      tf::poseStampedTFToMsg(fiducial_pose_stamped, result_.pose);
      as_.setSucceeded(result_);
      pub_timer_.stop();
      sub_.shutdown();
    }

  }
Ejemplo n.º 15
0
void addCoordinateFrame (geometry_msgs::Pose pose) {
	cloud_viewer.addCoordinateSystem (0.5);
	Eigen::Vector3f t;
	t(0) = pose.position.x;
	t(1) = pose.position.y;
	t(2) = -pose.position.z; // PCL seems to have a strange Z axis
	Eigen::Quaternionf q;
	q.x() = pose.orientation.x;
	q.y() = pose.orientation.y;
	q.z() = pose.orientation.z;
	q.w() = pose.orientation.w;
	Eigen::Matrix3f R = q.toRotationMatrix();
	Eigen::Vector3f r = R.eulerAngles(0, 1, 2);
	Eigen::Affine3f A;
	pcl::getTransformation (t(0), t(1), t(2), r(0), r(1), r(2), A);
	cloud_viewer.addCoordinateSystem (0.2, A);
}
Ejemplo n.º 16
0
TEST (PCL, WarpPointRigid6DFloat)
{
  WarpPointRigid6D<PointXYZ, PointXYZ, float> warp;
  Eigen::Quaternionf q (0.4455f, 0.9217f, 0.3382f, 0.3656f);
  q.normalize ();
  Eigen::Vector3f t (0.82550f, 0.11697f, 0.44864f);

  Eigen::VectorXf p (6);
  p[0] = t.x (); p[1] = t.y (); p[2] = t.z (); p[3] = q.x (); p[4] = q.y (); p[5] = q.z ();
  warp.setParam (p);

  PointXYZ pin (1, 2, 3), pout;
  warp.warpPoint (pin, pout);
  EXPECT_NEAR (pout.x, 4.15963f, 1e-5);
  EXPECT_NEAR (pout.y, -1.51363f, 1e-5);
  EXPECT_NEAR (pout.z, 0.922648f, 1e-5);
}
  void ImageMessageListener::cameraInfoCallback(const sensor_msgs::CameraInfoConstPtr& msg) {
    cerr << "got camera matrix for topic: " << _camera_info_topic << endl;
    if (_listener) {
      char buffer[1024];
      buffer[0] = 0;

      tf::StampedTransform transform;
      try{
        _listener->waitForTransform(_base_link_frame_id, msg->header.frame_id, 
                                    msg->header.stamp, 
                                    ros::Duration(0.5) );
        _listener->lookupTransform (_base_link_frame_id, msg->header.frame_id, 
                                    msg->header.stamp, 
                                    transform);
      }
      catch (tf::TransformException ex){
        ROS_ERROR("%s",ex.what());
        return;
      }

      Eigen::Quaternionf q;
      q.x() = transform.getRotation().x();
      q.y() = transform.getRotation().y();
      q.z() = transform.getRotation().z();
      q.w() = transform.getRotation().w();
      _camera_offset.linear()=q.toRotationMatrix();
      _camera_offset.translation()=Eigen::Vector3f(transform.getOrigin().x(),
                                                   transform.getOrigin().y(),
                                                   transform.getOrigin().z());
    }      
    int i;
    for (int r=0; r<3; r++)
      for (int c=0; c<3; c++, i++)
        _K(r,c) = msg->K[i];
    _K.row(2) << 0,0,1;
    _has_camera_matrix = true;
    _camera_info_subscriber.shutdown();
  }
Ejemplo n.º 18
0
bool Geometry::GetMVBB(const Geometry::VertexSet &vertices, Box &box) {
    ApproxMVBB::Vector3List v;
    for (Geometry::VertexSet::const_iterator it = vertices.begin(); it!=vertices.end();++it){
        pcl::PointXYZ p;
        Vertex2Point(*it,p);
        v.emplace_back(p.x,p.y,p.z);
    }
    ApproxMVBB::Matrix3Dyn points(3,v.size());
    for(int i=0;i<v.size();i++)
        points.col(i)=v[i];
    //NOTE parameters see:https://github.com/gabyx/ApproxMVBB
    ApproxMVBB::OOBB oobb = ApproxMVBB::approximateMVBB(points,0.001,8,5,0,5);
    Eigen::Vector3f centroid(
            (const float &) ((oobb.m_minPoint.x()+oobb.m_maxPoint.x())/2),
            (const float &) ((oobb.m_minPoint.y()+oobb.m_maxPoint.y())/2),
            (const float &) ((oobb.m_minPoint.z()+oobb.m_maxPoint.z())/2));
    //upper/lower point in OOBB frame
    double width = oobb.m_maxPoint.x()-oobb.m_minPoint.x();
    double height = oobb.m_maxPoint.y()-oobb.m_minPoint.y();
    double depth = oobb.m_maxPoint.z()-oobb.m_minPoint.z();
    if (width<=0 || height<=0 ||depth<=0)
        return false;
//    coordinate transformation A_IK matrix from OOBB frame K to world frame I
    Eigen::Quaternionf q;
    q.x()= (float) oobb.m_q_KI.x();
    q.y()= (float) oobb.m_q_KI.y();
    q.z()= (float) oobb.m_q_KI.z();
    q.w()= (float) oobb.m_q_KI.w();
    centroid=q.matrix()*centroid;
//    a translation to apply to the cube from 0,0,0
    box.centroid=centroid;
//    a quaternion-based rotation to apply to the cube
    box.quanternion=q.inverse();
    box.depth=depth;
    box.height=height;
    box.width=width;
    return true;
}
Ejemplo n.º 19
0
void UrdfModelMarker::addMoveMarkerControl(visualization_msgs::InteractiveMarker &int_marker, boost::shared_ptr<const Link> link, bool root){
  visualization_msgs::InteractiveMarkerControl control;
  if(root){
    im_helpers::add6DofControl(int_marker,false);
    for(int i=0; i<int_marker.controls.size(); i++){
      int_marker.controls[i].always_visible = true;
    }
  }else{
    boost::shared_ptr<Joint> parent_joint = link->parent_joint;
    Eigen::Vector3f origin_x(1,0,0);
    Eigen::Vector3f dest_x(parent_joint->axis.x, parent_joint->axis.y, parent_joint->axis.z);
    Eigen::Quaternionf qua;

    qua.setFromTwoVectors(origin_x, dest_x);
    control.orientation.x = qua.x();
    control.orientation.y = qua.y();
    control.orientation.z = qua.z();
    control.orientation.w = qua.w();

    int_marker.scale = 0.5;

    switch(parent_joint->type){
    case Joint::REVOLUTE:
    case Joint::CONTINUOUS:
      control.interaction_mode = visualization_msgs::InteractiveMarkerControl::ROTATE_AXIS;
      int_marker.controls.push_back(control);
      break;
    case Joint::PRISMATIC:
      control.interaction_mode = visualization_msgs::InteractiveMarkerControl::MOVE_AXIS;
      int_marker.controls.push_back(control);
      break;
    default:
      break;
    }
  }
}
/**
 * @brief Callback for feedback subscriber for getting the transformation of moved markers
 *
 * @param feedback subscribed from geometry_map/map/feedback
 */
void ShapeVisualization::setShapePosition(const visualization_msgs::InteractiveMarkerFeedbackConstPtr& feedback)//,const cob_3d_mapping_msgs::Shape& shape)
{

  cob_3d_mapping_msgs::ShapeArray map_msg;
  map_msg.header.frame_id="/map";
  map_msg.header.stamp = ros::Time::now();

  int shape_id,index;
  index=-1;
  stringstream name(feedback->marker_name);

  Eigen::Quaternionf quat;

  Eigen::Matrix3f rotationMat;
  Eigen::MatrixXf rotationMatInit;

  Eigen::Vector3f normalVec;
  Eigen::Vector3f normalVecNew;
  Eigen::Vector3f newCentroid;
  Eigen::Matrix4f transSecondStep;


  if (feedback->marker_name != "Text"){
    name >> shape_id ;
    cob_3d_mapping::Polygon p;

    for(int i=0;i<sha.shapes.size();++i)
    {
    	if (sha.shapes[i].id == shape_id)
	{
		index = i;
	}

    }


    // temporary fix.
    //do nothing if index of shape is not found
    // this is not supposed to occur , but apparently it does
    if(index==-1){

    ROS_WARN("shape not in map array");
    return;
	}


    cob_3d_mapping::fromROSMsg (sha.shapes.at(index), p);

    if (feedback->event_type == 2 && feedback->menu_entry_id == 5){
      quatInit.x() = (float)feedback->pose.orientation.x ;           //normalized
      quatInit.y() = (float)feedback->pose.orientation.y ;
      quatInit.z() = (float)feedback->pose.orientation.z ;
      quatInit.w() = (float)feedback->pose.orientation.w ;

      oldCentroid (0) = (float)feedback->pose.position.x ;
      oldCentroid (1) = (float)feedback->pose.position.y ;
      oldCentroid (2) = (float)feedback->pose.position.z ;

      quatInit.normalize() ;

      rotationMatInit = quatInit.toRotationMatrix() ;

      transInit.block(0,0,3,3) << rotationMatInit ;
      transInit.col(3).head(3) << oldCentroid(0) , oldCentroid(1), oldCentroid(2) ;
      transInit.row(3) << 0,0,0,1 ;

      transInitInv = transInit.inverse() ;
      Eigen::Affine3f affineInitFinal (transInitInv) ;
      affineInit = affineInitFinal ;

      std::cout << "transInit : " << "\n"    << affineInitFinal.matrix() << "\n" ;
    }

    if (feedback->event_type == 5){
      /* the name of the marker is arrows_shape_.id, we need to erase the "arrows_" part */
      //string strName(feedback->marker_name);
      //strName.erase(strName.begin(),strName.begin()+7);
//      stringstream name(strName);
	stringstream name(feedback->marker_name);

      name >> shape_id ;
      cob_3d_mapping::Polygon p;
      cob_3d_mapping::fromROSMsg (sha.shapes.at(index), p);

      quat.x() = (float)feedback->pose.orientation.x ;           //normalized
      quat.y() = (float)feedback->pose.orientation.y ;
      quat.z() = (float)feedback->pose.orientation.z ;
      quat.w() = (float)feedback->pose.orientation.w ;

      quat.normalize() ;

      rotationMat = quat.toRotationMatrix() ;

      normalVec << sha.shapes.at(index).params[0],                   //normalized
          sha.shapes.at(index).params[1],
          sha.shapes.at(index).params[2];

      newCentroid << (float)feedback->pose.position.x ,
          (float)feedback->pose.position.y ,
          (float)feedback->pose.position.z ;


      transSecondStep.block(0,0,3,3) << rotationMat ;
      transSecondStep.col(3).head(3) << newCentroid(0) , newCentroid(1), newCentroid(2) ;
      transSecondStep.row(3) << 0,0,0,1 ;

      Eigen::Affine3f affineSecondStep(transSecondStep) ;

      std::cout << "transfrom : " << "\n"    << affineSecondStep.matrix() << "\n" ;

      Eigen::Affine3f affineFinal(affineSecondStep*affineInit) ;
      Eigen::Matrix4f matFinal = (transSecondStep*transInitInv) ;

      normalVecNew    = (matFinal.block(0,0,3,3))* normalVec;
      //      newCentroid  = transFinal *OldCentroid ;


      sha.shapes.at(index).centroid.x = newCentroid(0) ;
      sha.shapes.at(index).centroid.y = newCentroid(1) ;
      sha.shapes.at(index).centroid.z = newCentroid(2) ;


      sha.shapes.at(index).params[0] = normalVecNew(0) ;
      sha.shapes.at(index).params[1] = normalVecNew(1) ;
      sha.shapes.at(index).params[2] = normalVecNew(2) ;


      std::cout << "transfromFinal : " << "\n"    << affineFinal.matrix() << "\n" ;

      pcl::PointCloud<pcl::PointXYZ> pc;
      pcl::PointXYZ pt;
      sensor_msgs::PointCloud2 pc2;

      for(unsigned int j=0; j<p.contours.size(); j++)
      {
        for(unsigned int k=0; k<p.contours[j].size(); k++)
        {
          p.contours[j][k] = affineFinal * p.contours[j][k];
          pt.x = p.contours[j][k][0] ;
          pt.y = p.contours[j][k][1] ;
          pt.z = p.contours[j][k][2] ;
          pc.push_back(pt) ;
        }
      }

      pcl::toROSMsg (pc, pc2);
      sha.shapes.at(index).points.clear() ;
      sha.shapes.at(index).points.push_back (pc2);

      // uncomment when using test_shape_array

      //      for(unsigned int i=0;i<sha.shapes.size();i++){
      //        map_msg.header = sha.shapes.at(i).header ;
      //        map_msg.shapes.push_back(sha.shapes.at(i)) ;
      //      }
      //      shape_pub_.publish(map_msg);

      // end uncomment

      modified_shapes_.shapes.push_back(sha.shapes.at(index));
    }
Ejemplo n.º 21
0
void chatterCallback2(const std_msgs::String::ConstPtr& msg)
{

    ROS_INFO("I heard: [%s]", msg->data.c_str());

    vector<string> sweep_xmls = semantic_map_load_utilties::getSweepXmls<PointType>(msg->data.c_str());

    for (auto sweep_xml : sweep_xmls) {
        int slash_pos = sweep_xml.find_last_of("/");
        std::string sweep_folder = sweep_xml.substr(0, slash_pos) + "/";
        printf("folder: %s\n",sweep_folder.c_str());

        QStringList objectFiles = QDir(sweep_folder.c_str()).entryList(QStringList("*object*.xml"));
        vector<ObjectData> objects = semantic_map_load_utilties::loadAllDynamicObjectsFromSingleSweep<PointType>(sweep_folder+"room.xml");

        int object_id;
        for (unsigned int i=0; i< objects.size(); i++){
            if (objects[i].objectScanIndices.size() != 0){object_id = i;}
        }
        int index = objectFiles[object_id].toStdString().find_last_of(".");
        string object_root_name = objectFiles[object_id].toStdString().substr(0,index);

        for (auto object : objects){
            if (!object.objectScanIndices.size()){continue;}

            printf("%i\n",int(object.vAdditionalViews.size()));

            std::vector<cv::Mat> viewrgbs;
            std::vector<cv::Mat> viewdepths;
            std::vector<cv::Mat> viewmasks;
            std::vector<tf::StampedTransform > viewtfs;
            std::vector<Eigen::Matrix4f> viewposes;
            //std::stringstream ss_pose_file; ss_pose_file<<sweep_folder<<"/object_"<<object_id<<"/";
            char buf [1024];
            sprintf(buf,"%s/object_%i/poses.txt",sweep_folder.c_str(),object_id);
            std::vector<Eigen::Matrix4f> poses = getRegisteredViewPoses(std::string(buf), object.vAdditionalViews.size());
            cout<<"Loaded "<<poses.size()<<" registered poses."<<endl;

            int step = std::max(1,int(0.5+double(object.vAdditionalViews.size())/10.0));
            for (unsigned int i=0; i < 2000 && i<object.vAdditionalViews.size(); i+=step){
                CloudPtr cloud = object.vAdditionalViews[i];

                cv::Mat mask;
                mask.create(cloud->height,cloud->width,CV_8UC1);
                unsigned char * maskdata = (unsigned char *)mask.data;

                cv::Mat rgb;
                rgb.create(cloud->height,cloud->width,CV_8UC3);
                unsigned char * rgbdata = (unsigned char *)rgb.data;

                cv::Mat depth;
                depth.create(cloud->height,cloud->width,CV_16UC1);
                unsigned short * depthdata = (unsigned short *)depth.data;

                unsigned int nr_data = cloud->height * cloud->width;
                for(unsigned int j = 0; j < nr_data; j++){
                    maskdata[j] = 0;
                    PointType p = cloud->points[j];
                    rgbdata[3*j+0]	= p.b;
                    rgbdata[3*j+1]	= p.g;
                    rgbdata[3*j+2]	= p.r;
                    depthdata[j]	= short(5000.0 * p.z);
                }

                cout<<"Processing AV "<<i<<endl;

                stringstream av_ss; av_ss<<object_root_name; av_ss<<"_additional_view_"; av_ss<<i; av_ss<<".txt";
                string complete_av_mask_name = sweep_folder + av_ss.str();
                ifstream av_mask_in(complete_av_mask_name);
                if (!av_mask_in.is_open()){
                    cout<<"COULD NOT FIND AV MASK FILE "<<complete_av_mask_name<<endl;
                    continue;
                }

                CloudPtr av_mask(new Cloud);

                int av_index;
                while (av_mask_in.is_open() && !av_mask_in.eof()){
                    av_mask_in>>av_index;
                    av_mask->push_back(object.vAdditionalViews[i]->points[av_index]);
                    maskdata[av_index] = 255;
                }

                viewrgbs.push_back(rgb);
                viewdepths.push_back(depth);
                viewmasks.push_back(mask);
                viewtfs.push_back(object.vAdditionalViewsTransforms[i]);
                viewposes.push_back(poses[i+1]);

                cv::namedWindow("rgbimage",	cv::WINDOW_AUTOSIZE);
                cv::imshow(		"rgbimage",	rgb);
                cv::namedWindow("depthimage",	cv::WINDOW_AUTOSIZE);
                cv::imshow(		"depthimage",	depth);
                cv::namedWindow("mask",	cv::WINDOW_AUTOSIZE);
                cv::imshow(		"mask",	mask);
                cv::waitKey(30);
            }

            if(viewrgbs.size() > 0){
                std::vector<int> fid;
                std::vector<int> fadded;
                for(unsigned int j = 0; j < viewrgbs.size(); j++){
                    cv::Mat rgbimage		= viewrgbs[j];
                    cv::Mat depthimage		= viewdepths[j];
                    tf::StampedTransform tf	= viewtfs[j];
                    Eigen::Matrix4f m		= viewposes[j];
                    geometry_msgs::TransformStamped tfstmsg;
                    tf::transformStampedTFToMsg (tf, tfstmsg);

                    geometry_msgs::Transform tfmsg = tfstmsg.transform;

                    printf("start adding frame %i\n",j);

                    Eigen::Quaternionf q = Eigen::Quaternionf(Eigen::Affine3f(m).rotation());

                    geometry_msgs::Pose		pose;
                    //		  pose.orientation		= tfmsg.rotation;
                    //		  pose.position.x		= tfmsg.translation.x;
                    //		  pose.position.y		= tfmsg.translation.y;
                    //		  pose.position.z		= tfmsg.translation.z;
                    pose.orientation.x	= q.x();
                    pose.orientation.y	= q.y();
                    pose.orientation.z	= q.z();
                    pose.orientation.w	= q.w();
                    pose.position.x		= m(0,3);
                    pose.position.y		= m(1,3);
                    pose.position.z		= m(2,3);


                    cv_bridge::CvImage rgbBridgeImage;
                    rgbBridgeImage.image = rgbimage;
                    rgbBridgeImage.encoding = "bgr8";

                    cv_bridge::CvImage depthBridgeImage;
                    depthBridgeImage.image = depthimage;
                    depthBridgeImage.encoding = "mono16";

                    quasimodo_msgs::index_frame ifsrv;
                    ifsrv.request.frame.capture_time = ros::Time();
                    ifsrv.request.frame.pose		= pose;
                    ifsrv.request.frame.frame_id	= -1;
                    ifsrv.request.frame.rgb			= *(rgbBridgeImage.toImageMsg());
                    ifsrv.request.frame.depth		= *(depthBridgeImage.toImageMsg());

                    if (index_frame_client.call(ifsrv)){//Add frame to model server
                        int frame_id = ifsrv.response.frame_id;
                        fadded.push_back(j);
                        fid.push_back(frame_id);
                        ROS_INFO("frame_id%i", frame_id );
                    }else{ROS_ERROR("Failed to call service index_frame");}
                }

                for(unsigned int j = 0; j < fadded.size(); j++){
                    cv::Mat mask	= viewmasks[fadded[j]];

                    cv_bridge::CvImage maskBridgeImage;
                    maskBridgeImage.image = mask;
                    maskBridgeImage.encoding = "mono8";

                    quasimodo_msgs::model_from_frame mff;
                    mff.request.mask		= *(maskBridgeImage.toImageMsg());
                    mff.request.isnewmodel	= (j == (fadded.size()-1));
                    mff.request.frame_id	= fid[j];

                    if (model_from_frame_client.call(mff)){//Build model from frame
                        int model_id = mff.response.model_id;
                        if(model_id > 0){
                            ROS_INFO("model_id%i", model_id );
                        }
                    }else{ROS_ERROR("Failed to call service index_frame");}
                }
            }
        }
    }
}
void
ShapeMarker::createMarker (visualization_msgs::InteractiveMarkerControl& im_ctrl)
{
  marker.id = shape_.id;

  marker.header = shape_.header;
  //std::cout << marker.header.frame_id << std::endl;
  //marker.header.stamp = ros::Time::now() ;

  marker.type = visualization_msgs::Marker::TRIANGLE_LIST;
  marker.ns = "shape visualization";
  marker.action = visualization_msgs::Marker::ADD;
  marker.lifetime = ros::Duration ();

  //set color
  marker.color.g = shape_.color.g;
  marker.color.b = shape_.color.b;
  marker.color.r = shape_.color.r;
  if (arrows_ || deleted_){
    marker.color.a = 0.5;
  }
  else
  {
    marker.color.a = shape_.color.a;
    //      marker.color.r = shape_.color.r;
  }

  //set scale
  marker.scale.x = 1;
  marker.scale.y = 1;
  marker.scale.z = 1;

  /* transform shape points to 2d and store 2d point in triangle list */
  TPPLPartition pp;
  list<TPPLPoly> polys, tri_list;

  Eigen::Vector3f v, normal, origin;

  if(shape_.type== cob_3d_mapping_msgs::Shape::CYLINDER)
  {
    cob_3d_mapping::Cylinder c;
    cob_3d_mapping::fromROSMsg (shape_, c);
    c.ParamsFromShapeMsg();
    // make trinagulated cylinder strip
    //transform cylinder in local coordinate system
    c.makeCyl2D();
    c.TransformContours(c.transform_from_world_to_plane);
    //c.transform2tf(c.transform_from_world_to_plane);
    //TODO: WATCH OUT NO HANDLING FOR MULTY CONTOUR CYLINDERS AND HOLES
    TPPLPoly poly;
    TPPLPoint pt;


    for(size_t j=0;j<c.contours.size();j++){

      poly.Init(c.contours[j].size());
      poly.SetHole (shape_.holes[j]);


      for(size_t i=0;i<c.contours[j].size();++i){

        pt.x=c.contours[j][i][0];
        pt.y=c.contours[j][i][1];

        poly[i]=pt;

      }
      if (shape_.holes[j])
        poly.SetOrientation (TPPL_CW);
      else
        poly.SetOrientation (TPPL_CCW);
      polys.push_back(poly);
    }
    // triangualtion itno monotone triangles
    pp.Triangulate_EC (&polys, &tri_list);

    transformation_inv_ = c.transform_from_world_to_plane.inverse();
    // optional refinement step
    list<TPPLPoly> refined_tri_list;
    triangle_refinement(tri_list,refined_tri_list);
    tri_list=refined_tri_list;

  }
  if(shape_.type== cob_3d_mapping_msgs::Shape::POLYGON)
  {
    cob_3d_mapping::Polygon p;

    if (shape_.params.size () == 4)
    {
      cob_3d_mapping::fromROSMsg (shape_, p);
      normal (0) = shape_.params[0];
      normal (1) = shape_.params[1];
      normal (2) = shape_.params[2];
      origin (0) = shape_.centroid.x;
      origin (1) = shape_.centroid.y;
      origin (2) = shape_.centroid.z;
      v = normal.unitOrthogonal ();

      pcl::getTransformationFromTwoUnitVectorsAndOrigin (v, normal, origin, transformation_);
      transformation_inv_ = transformation_.inverse ();
    }

    for (size_t i = 0; i < shape_.points.size (); i++)
    {
      pcl::PointCloud<pcl::PointXYZ> pc;
      TPPLPoly poly;
      pcl::fromROSMsg (shape_.points[i], pc);
      poly.Init (pc.points.size ());
      poly.SetHole (shape_.holes[i]);

      for (size_t j = 0; j < pc.points.size (); j++)
      {
        poly[j] = msgToPoint2D (pc[j]);
      }
      if (shape_.holes[i])
        poly.SetOrientation (TPPL_CW);
      else
        poly.SetOrientation (TPPL_CCW);

      polys.push_back (poly);
    }
    pp.Triangulate_EC (&polys, &tri_list);

  }//Polygon

  if(tri_list.size()==0)
  {
    ROS_WARN("Could not triangulate, will not display this shape! (ID: %d)", shape_.id);
  }
  //ROS_INFO(" creating markers for this shape.....");

  marker.points.resize (/*it->GetNumPoints ()*/tri_list.size()*3);
  TPPLPoint pt;
  int ctr=0;
  for (std::list<TPPLPoly>::iterator it = tri_list.begin (); it != tri_list.end (); it++)
  {

    //draw each triangle
    switch(shape_.type)
    {
      case(cob_3d_mapping_msgs::Shape::POLYGON):
      {
        for (long i = 0; i < it->GetNumPoints (); i++)
        {
          pt = it->GetPoint (i);
          marker.points[3*ctr+i].x = pt.x;
          marker.points[3*ctr+i].y = pt.y;
          marker.points[3*ctr+i].z = 0;
          //if(shape_.id == 39) std::cout << pt.x << "," << pt.y << std::endl;
        }
        //std::cout << marker.points.size() << std::endl;
      }
      case(cob_3d_mapping_msgs::Shape::CYLINDER):
      {
        for (long i = 0; i < it->GetNumPoints (); i++)
        {
          pt = it->GetPoint(i);

          //apply rerolling of cylinder analogous to cylinder class
          if(shape_.params.size()!=10){
            break;
          }

          double alpha=pt.x/shape_.params[9];


          marker.points[3*ctr+i].x = shape_.params[9]*sin(-alpha);
          marker.points[3*ctr+i].y = pt.y;
          marker.points[3*ctr+i].z = shape_.params[9]*cos(-alpha);

          ////Keep Cylinder flat - Debuging
          //marker.points[i].x = pt.x;
          //marker.points[i].y = pt.y;
          //marker.points[i].z = 0;
        }
      }
    }
    ctr++;
  }
  //set pose
  Eigen::Quaternionf quat (transformation_inv_.rotation ());
  Eigen::Vector3f trans (transformation_inv_.translation ());

  marker.pose.position.x = trans (0);
  marker.pose.position.y = trans (1);
  marker.pose.position.z = trans (2);

  marker.pose.orientation.x = quat.x ();
  marker.pose.orientation.y = quat.y ();
  marker.pose.orientation.z = quat.z ();
  marker.pose.orientation.w = quat.w ();

  im_ctrl.markers.push_back (marker);

  //  if(!arrows_) {
  // Added For displaying the arrows on Marker Position
  marker_.pose.position.x = marker.pose.position.x ;
  marker_.pose.position.y = marker.pose.position.y ;
  marker_.pose.position.z = marker.pose.position.z ;

  marker_.pose.orientation.x = marker.pose.orientation.x ;
  marker_.pose.orientation.y = marker.pose.orientation.y ;
  marker_.pose.orientation.z = marker.pose.orientation.z ;
  // end
}
  bool SnapIt::processModelCylinder(jsk_pcl_ros::CallSnapIt::Request& req,
                                    jsk_pcl_ros::CallSnapIt::Response& res) {
    pcl::PointCloud<pcl::PointXYZ>::Ptr candidate_points (new pcl::PointCloud<pcl::PointXYZ>);
    pcl::PointIndices::Ptr inliers(new pcl::PointIndices);
    Eigen::Vector3f n, C_orig;
    if (!extractPointsInsideCylinder(req.request.center,
                                     req.request.direction,
                                     req.request.radius,
                                     req.request.height,
                                     inliers, n, C_orig,
                                     1.3)) {
      return false;
    }
    if (inliers->indices.size() < 3) {
      NODELET_ERROR("not enough points inside of the target_plane");
      return false;
    }
    
    geometry_msgs::PointStamped centroid;
    centroid.point.x = C_orig[0];
    centroid.point.y = C_orig[1];
    centroid.point.z = C_orig[2];
    centroid.header = req.request.header;
    
    
    pcl::ExtractIndices<pcl::PointXYZ> extract;
    extract.setInputCloud(input_);
    extract.setIndices(inliers);
    extract.filter(*candidate_points);
    
    publishPointCloud(debug_candidate_points_pub_, candidate_points);


    // first, to remove plane we estimate the plane
    pcl::ModelCoefficients::Ptr plane_coefficients (new pcl::ModelCoefficients);
    pcl::PointIndices::Ptr plane_inliers (new pcl::PointIndices);
    extractPlanePoints(candidate_points, plane_inliers, plane_coefficients,
                       n, req.request.eps_angle);
    if (plane_inliers->indices.size() == 0) {
      NODELET_ERROR ("plane estimation failed");
      return false;
    }

    // remove the points blonging to the plane
    pcl::PointCloud<pcl::PointXYZ>::Ptr points_inside_pole_wo_plane (new pcl::PointCloud<pcl::PointXYZ>);
    extract.setInputCloud (candidate_points);
    extract.setIndices (plane_inliers);
    extract.setNegative (true);
    extract.filter (*points_inside_pole_wo_plane);

    publishPointCloud(debug_candidate_points_pub2_, points_inside_pole_wo_plane);
    
    // normal estimation
    pcl::PointCloud<pcl::Normal>::Ptr cloud_normals (new pcl::PointCloud<pcl::Normal>);
    pcl::NormalEstimation<pcl::PointXYZ, pcl::Normal> ne;
    pcl::search::KdTree<pcl::PointXYZ>::Ptr tree (new pcl::search::KdTree<pcl::PointXYZ> ());
    ne.setSearchMethod (tree);
    ne.setInputCloud (points_inside_pole_wo_plane);
    ne.setKSearch (50);
    ne.compute (*cloud_normals);
    
    
    // segmentation
    pcl::ModelCoefficients::Ptr cylinder_coefficients (new pcl::ModelCoefficients);
    pcl::PointIndices::Ptr cylinder_inliers (new pcl::PointIndices);
    // Create the segmentation object
    pcl::SACSegmentationFromNormals<pcl::PointXYZ, pcl::Normal> seg;
    // Optional
    seg.setOptimizeCoefficients (true);
    // Mandatory
    seg.setModelType (pcl::SACMODEL_CYLINDER);
    seg.setMethodType (pcl::SAC_RANSAC);
    seg.setRadiusLimits (0.01, req.request.radius * 1.2);
    seg.setDistanceThreshold (0.05);
    
    seg.setInputCloud(points_inside_pole_wo_plane);
    seg.setInputNormals (cloud_normals);
    seg.setMaxIterations (10000);
    seg.setNormalDistanceWeight (0.1);
    seg.setAxis(n);
    if (req.request.eps_angle != 0.0) {
      seg.setEpsAngle(req.request.eps_angle);
    }
    else {
      seg.setEpsAngle(0.35);
    }
    seg.segment (*cylinder_inliers, *cylinder_coefficients);
    if (cylinder_inliers->indices.size () == 0)
    {
      NODELET_ERROR ("Could not estimate a cylinder model for the given dataset.");
      return false;
    }

    debug_centroid_pub_.publish(centroid);
    
    pcl::PointCloud<pcl::PointXYZ>::Ptr cylinder_points (new pcl::PointCloud<pcl::PointXYZ>);
    extract.setInputCloud (points_inside_pole_wo_plane);
    extract.setIndices (cylinder_inliers);
    extract.setNegative (false);
    extract.filter (*cylinder_points);

    publishPointCloud(debug_candidate_points_pub3_, cylinder_points);

    Eigen::Vector3f n_prime;
    Eigen::Vector3f C_new;
    for (size_t i = 0; i < 3; i++) {
      C_new[i] = cylinder_coefficients->values[i];
      n_prime[i] = cylinder_coefficients->values[i + 3];
    }

    double radius = fabs(cylinder_coefficients->values[6]);
    // inorder to compute centroid, we project all the points to the center line.
    // and after that, get the minimum and maximum points in the coordinate system of the center line
    double min_alpha = DBL_MAX;
    double max_alpha = -DBL_MAX;
    for (size_t i = 0; i < cylinder_points->points.size(); i++ ) {
      pcl::PointXYZ q = cylinder_points->points[i];
      double alpha = (q.getVector3fMap() - C_new).dot(n_prime);
      if (alpha < min_alpha) {
        min_alpha = alpha;
      }
      if (alpha > max_alpha) {
        max_alpha = alpha;
      }
    }
    // the center of cylinder
    Eigen::Vector3f C_new_prime = C_new + (max_alpha + min_alpha) / 2.0 * n_prime;
    
    Eigen::Vector3f n_cross = n.cross(n_prime);
    if (n.dot(n_prime)) {
      n_cross = - n_cross;
    }
    double theta = asin(n_cross.norm());
    Eigen::Quaternionf trans (Eigen::AngleAxisf(theta, n_cross.normalized()));
    Eigen::Vector3f C = trans * C_orig;
    Eigen::Affine3f A = Eigen::Translation3f(C) * Eigen::Translation3f(C_new_prime - C) * trans * Eigen::Translation3f(C_orig).inverse();
    tf::poseEigenToMsg((Eigen::Affine3d)A, res.transformation);

    geometry_msgs::PointStamped centroid_transformed;
    centroid_transformed.point.x = C_new_prime[0];
    centroid_transformed.point.y = C_new_prime[1];
    centroid_transformed.point.z = C_new_prime[2];
    centroid_transformed.header = req.request.header;
    debug_centroid_after_trans_pub_.publish(centroid_transformed);

    // publish marker
    visualization_msgs::Marker marker;
    marker.type = visualization_msgs::Marker::CYLINDER;
    marker.scale.x = radius;
    marker.scale.y = radius;
    marker.scale.z = (max_alpha - min_alpha);
    marker.pose.position.x = C_new_prime[0];
    marker.pose.position.y = C_new_prime[1];
    marker.pose.position.z = C_new_prime[2];

    // n_prime -> z
    // n_cross.normalized() -> x
    Eigen::Vector3f z_axis = n_prime.normalized();
    Eigen::Vector3f y_axis = n_cross.normalized();
    Eigen::Vector3f x_axis = (y_axis.cross(z_axis)).normalized();
    Eigen::Matrix3f M;
    for (size_t i = 0; i < 3; i++) {
      M(i, 0) = x_axis[i];
      M(i, 1) = y_axis[i];
      M(i, 2) = z_axis[i];
    }
    
    Eigen::Quaternionf q (M);
    marker.pose.orientation.x = q.x();
    marker.pose.orientation.y = q.y();
    marker.pose.orientation.z = q.z();
    marker.pose.orientation.w = q.w();
    marker.color.g = 1.0;
    marker.color.a = 1.0;
    marker.header = input_header_;
    marker_pub_.publish(marker);
    
    return true;
  }
Ejemplo n.º 24
0
void simulate_callback (const pcl::visualization::KeyboardEvent &event,
                        void* viewer_void)
{
  pcl::visualization::PCLVisualizer::Ptr viewer = *static_cast<pcl::visualization::PCLVisualizer::Ptr *> (viewer_void);
  // I choose v for virtual as s for simulate is takwen
  if (event.getKeySym () == "v" && event.keyDown ())
  {
    std::cout << "v was pressed => simulate cloud" << std::endl;

    std::vector<pcl::visualization::Camera> cams;
    viewer->getCameras(cams);
    
    if (cams.size() !=1){
      std::cout << "n cams not 1 exiting\n"; // for now in case ...
     return; 
    }
   // cout << "n cams: " << cams.size() << "\n";
    pcl::visualization::Camera cam = cams[0];
    


	      
	      Eigen::Affine3f pose;
	      pose = viewer->getViewerPose() ;
	      
	      
     std::cout << cam.pos[0] << " " 
               << cam.pos[1] << " " 
               << cam.pos[2] << " p\n";	      
	      
	     Eigen::Matrix3f m;
	     m =pose.rotation();
	     //All axies use right hand rule. x=red axis, y=green axis, z=blue axis z direction is point into the screen. z \ \ \ -----------> x | | | | | | y 
	      
 cout << pose(0,0)  << " " << pose(0,1) << " " << pose(0,2)  << " " << pose(0,3) << " x0\n";
 cout << pose(1,0) << " " << pose(1,1) << " " << pose(1,2) << " " << pose(1,3) << " x1\n";
  cout << pose(2,0) << " " << pose(2,1)  << " " << pose(2,2) << " " << pose(2,3)<< "x2\n";

  double yaw,pitch, roll;
  wRo_to_euler(m,yaw,pitch,roll);
  
 printf("RPY: %f %f %f\n", roll*180/M_PI,pitch*180/M_PI,yaw*180/M_PI);    

// matrix->GetElement(1,0);
  
 cout << m(0,0)  << " " << m(0,1) << " " << m(0,2)  << " "  << " x0\n";
 cout << m(1,0) << " " << m(1,1) << " " << m(1,2) << " "  << " x1\n";
  cout << m(2,0) << " " << m(2,1)  << " " << m(2,2) << " "<< "x2\n\n";
  
  Eigen::Quaternionf rf;
  rf = Eigen::Quaternionf(m);
  
  
   Eigen::Quaterniond r(rf.w(),rf.x(),rf.y(),rf.z());
  
   Eigen::Isometry3d init_pose;
   init_pose.setIdentity();
   init_pose.translation() << cam.pos[0], cam.pos[1], cam.pos[2];  
   //Eigen::Quaterniond m = euler_to_quat(-1.54, 0, 0);
   init_pose.rotate(r);  
//   
  
    std::stringstream ss;
  print_Isometry3d(init_pose,ss);
  std::cout << "init_pose: " << ss.str() << "\n";
  
  
  
  
  viewer->addCoordinateSystem (1.0,pose,"reference");
  
  
  
    double tic = getTime();
    std::stringstream ss2;
    ss2.precision(20);
    ss2 << "simulated_pcl_" << tic << ".pcd";
    capture(init_pose);
    cout << (getTime() -tic) << " sec\n";  
  }
}
Ejemplo n.º 25
0
void simulate_callback (const pcl::visualization::KeyboardEvent &event,
                        void* viewer_void)
{
  boost::shared_ptr<pcl::visualization::PCLVisualizer> viewer = *static_cast<boost::shared_ptr<pcl::visualization::PCLVisualizer> *> (viewer_void);
  // I choose v for virtual as s for simulate is takwen
  if (event.getKeySym () == "v" && event.keyDown ())
  {
    std::cout << "v was pressed => simulate cloud" << std::endl;

    std::vector<pcl::visualization::Camera> cams;
    viewer->getCameras(cams);
    
    if (cams.size() !=1){
      std::cout << "n cams not 1 exiting\n"; // for now in case ...
     return; 
    }
   // cout << "n cams: " << cams.size() << "\n";
    pcl::visualization::Camera cam = cams[0];
    


	      
	      Eigen::Affine3f pose;
	      pose = viewer->getViewerPose() ;
	      
	      
     std::cout << cam.pos[0] << " " 
               << cam.pos[1] << " " 
               << cam.pos[2] << " p\n";	      
	      
	     Eigen::Matrix3f m;
	     m =pose.rotation();
	     //All axies use right hand rule. x=red axis, y=green axis, z=blue axis z direction is point into the screen. z \ \ \ -----------> x | | | | | | y 
	      
 cout << pose(0,0)  << " " << pose(0,1) << " " << pose(0,2)  << " " << pose(0,3) << " x0\n";
 cout << pose(1,0) << " " << pose(1,1) << " " << pose(1,2) << " " << pose(1,3) << " x1\n";
  cout << pose(2,0) << " " << pose(2,1)  << " " << pose(2,2) << " " << pose(2,3)<< "x2\n";

  double yaw,pitch, roll;
  wRo_to_euler(m,yaw,pitch,roll);
  
 printf("RPY: %f %f %f\n", roll*180/M_PI,pitch*180/M_PI,yaw*180/M_PI);    

// matrix->GetElement(1,0);
  
 cout << m(0,0)  << " " << m(0,1) << " " << m(0,2)  << " "  << " x0\n";
 cout << m(1,0) << " " << m(1,1) << " " << m(1,2) << " "  << " x1\n";
  cout << m(2,0) << " " << m(2,1)  << " " << m(2,2) << " "<< "x2\n\n";
  
  Eigen::Quaternionf rf;
  rf = Eigen::Quaternionf(m);
  
  
   Eigen::Quaterniond r(rf.w(),rf.x(),rf.y(),rf.z());
  
   Eigen::Isometry3d init_pose;
   init_pose.setIdentity();
   init_pose.translation() << cam.pos[0], cam.pos[1], cam.pos[2];  
   //Eigen::Quaterniond m = euler_to_quat(-1.54, 0, 0);
   init_pose.rotate(r);  
//   
  
    std::stringstream ss;
  print_Isometry3d(init_pose,ss);
  std::cout << "init_pose: " << ss.str() << "\n";
  
  
  
  
  viewer->addCoordinateSystem (1.0,pose);
  
  
  
    double tic = getTime();
    std::stringstream ss2;
    ss2.precision(20);
    ss2 << "simulated_pcl_" << tic << ".pcd";
    capture(init_pose,ss2.str());
    cout << (getTime() -tic) << " sec\n";  
  
  
  
  // these three variables determine the position and orientation of
    // the camera.
	      
	      
//     double lookat[3]; - focal location
//     double eye[3]; - my location
//     double up[3]; - updirection
     
	      
	      
//    std::cout << view[0]  << "," << view[1]  << "," << view[2] 
    
//    cameras.back ().view[2] = renderer->GetActiveCamera ()->GetOrientationWXYZ()[2];    
    
//ViewTransform->GetOrientationWXYZ();    
    
  //  Superclass::OnKeyUp ();
    
//       vtkSmartPointer<vtkCamera> cam = event.Interactor->GetRenderWindow ()->GetRenderers ()->GetFirstRenderer ()->GetActiveCamera ();
//       double clip[2], focal[3], pos[3], view[3];
//       cam->GetClippingRange (clip);
/*      cam->GetFocalPoint (focal);
      cam->GetPosition (pos);
      cam->GetViewUp (view);
      int *win_pos = Interactor->GetRenderWindow ()->GetPosition ();
      int *win_size = Interactor->GetRenderWindow ()->GetSize ();
      std::cerr << clip[0]  << "," << clip[1]  << "/" << focal[0] << "," << focal[1] << "," << focal[2] << "/" <<
                   pos[0]   << "," << pos[1]   << "," << pos[2]   << "/" << view[0]  << "," << view[1]  << "," << view[2] << "/" <<
                   cam->GetViewAngle () / 180.0 * M_PI  << "/" << win_size[0] << "," << win_size[1] << "/" << win_pos[0] << "," << win_pos[1]
                << endl;    
  */  
  }
}
bool send2(brio_assembly_vision_new::TrasformStampedRequest  &req, brio_assembly_vision_new::TrasformStampedResponse &res)
{
    request_a_new_cluster =true;
    if(find_cloud_from_kinect_head==true && final ==false)
    {
        brio_assembly_vision_new::Container * cont = new brio_assembly_vision_new::Container();
        while((cont=client_call())==NULL)
        {
            std::this_thread::sleep_for (std::chrono::seconds(1)); //wait for a good response
        }

        if(first_time==true)
        {
            initial_cluster_size=cont->date_container.size();
        }
        first_time=false;

        if(cont->date_container.size()>0) //after moving objects size is decreased
        {
            int i=0; //start with the lowest index
            std::string object_with_shape_requested = model[model_step];
            while(cont->date_container[i].piece_type!=object_with_shape_requested && i<cont->date_container.size()-1) //
            {
                i++;
            }
            //end while when cluster_vector[i] has the requested shape or when the search index is bigger then cluster_vector
            if(i<=cont->date_container.size()-1) // if i<=cluster_vector.size() then we did find requested object at indices i
            {
                Eigen::Vector3d z_axe,x_or_y_axe;
                Eigen::Vector4d translation;
                z_axe=estimate_plane_normals(cloud);
                int i_center = cont->date_container[i].center_index;
                int i_head = cont->date_container[i].head_conn_index;
                x_or_y_axe= obj_axe_2_points_next(i_head,i_center);
                pcl::PointXYZRGB center = cloud->at(cont->date_container[i].center_index);
                translation[0] = center.x;
                translation[1] = center.y;
                translation[2] = center.z;

                calculate_transformation(x_or_y_axe,z_axe ,translation);

                ROS_INFO("Send TransformedPose");
                Eigen::Quaternionf quat;
                quat = createQuaternion();
                res.msg.child_frame_id = "brio_piece_frame";
                res.msg.header.frame_id = "head_mount_kinect_rgb_optical_frame";
                res.msg.transform.translation.x = transformata_finala(0,3);
                res.msg.transform.translation.y = transformata_finala(1,3);
                res.msg.transform.translation.z = transformata_finala(2,3);

                res.msg.transform.rotation.w = (double)quat.w();
                res.msg.transform.rotation.x = (double)quat.x();
                res.msg.transform.rotation.y = (double)quat.y();
                res.msg.transform.rotation.z = (double)quat.z();

                if(model_step<initial_cluster_size-1)
                    model_step++;
                else
                {   final=true;
                    //return false;
                }

                return true;
            }
  void onGMM(const gaussian_mixture_model::GaussianMixture & mix)
  {
    visualization_msgs::MarkerArray msg;
    ROS_INFO("gmm_rviz_converter: Received message.");

    uint i;

    for (i = 0; i < mix.gaussians.size(); i++)
    {
      visualization_msgs::Marker marker;

      marker.header.frame_id = m_frame_id;
      marker.header.stamp = ros::Time::now();
      marker.ns = m_rviz_namespace;
      marker.id = i;
      marker.type = visualization_msgs::Marker::SPHERE;
      marker.action = visualization_msgs::Marker::ADD;
      marker.lifetime = ros::Duration();

      Eigen::Vector3f coords;
      for (uint ir = 0; ir < NUM_OUTPUT_COORDINATES; ir++)
        coords[ir] = m_conversion_mask[ir]->GetMean(m_conversion_mask,mix.gaussians[i]);
      marker.pose.position.x = coords.x();
      marker.pose.position.y = coords.y();
      marker.pose.position.z = coords.z();

      Eigen::Matrix3f covmat;
      for (uint ir = 0; ir < NUM_OUTPUT_COORDINATES; ir++)
        for (uint ic = 0; ic < NUM_OUTPUT_COORDINATES; ic++)
          covmat(ir,ic) = m_conversion_mask[ir]->GetCov(m_conversion_mask,mix.gaussians[i],ic);

      Eigen::EigenSolver<Eigen::Matrix3f> evsolver(covmat);

      Eigen::Matrix3f eigenvectors = evsolver.eigenvectors().real();
      if (eigenvectors.determinant() < 0.0)
        eigenvectors.col(0) = - eigenvectors.col(0);
      Eigen::Matrix3f rotation = eigenvectors;
      Eigen::Quaternionf quat = Eigen::Quaternionf(Eigen::AngleAxisf(rotation));

      marker.pose.orientation.x = quat.x();
      marker.pose.orientation.y = quat.y();
      marker.pose.orientation.z = quat.z();
      marker.pose.orientation.w = quat.w();

      Eigen::Vector3f eigenvalues = evsolver.eigenvalues().real();
      Eigen::Vector3f scale = Eigen::Vector3f(eigenvalues.array().abs().sqrt());
      if (m_normalize)
        scale.normalize();
      marker.scale.x = mix.weights[i] * scale.x() * m_scale;
      marker.scale.y = mix.weights[i] * scale.y() * m_scale;
      marker.scale.z = mix.weights[i] * scale.z() * m_scale;

      marker.color.a = 1.0;
      rainbow(float(i) / float(mix.gaussians.size()),marker.color.r,marker.color.g,marker.color.b);

      msg.markers.push_back(marker);
    }

    // this a waste of resources, but we need to delete old markers in some way
    // someone should add a "clear all" command to rviz
    // (using expiration time is not an option, here)
    for ( ; i < m_max_markers; i++)
    {
      visualization_msgs::Marker marker;
      marker.id = i;
      marker.action = visualization_msgs::Marker::DELETE;
      marker.lifetime = ros::Duration();
      marker.header.frame_id = m_frame_id;
      marker.header.stamp = ros::Time::now();
      marker.ns = m_rviz_namespace;
      msg.markers.push_back(marker);
    }

    m_pub.publish(msg);
    ROS_INFO("gmm_rviz_converter: Sent message.");
  }
Ejemplo n.º 28
0
 std::string Transform::quatToString(Eigen::Quaternionf quat) {
     std::stringstream ss;
     ss << "[x: " << quat.x() << ", y: " << quat.y() << ", z: " << quat.z()
     << ", w: " << quat.w() << "]" << std::endl;
     return ss.str();
 }
void callbackClusteredClouds(const clustered_clouds_msgs::ClusteredCloudsConstPtr& msg)
{
#if PUBLISH_TRANSFORM
  static tf::TransformBroadcaster tf_br;
#endif

  g_marker_array.markers.clear();
  g_marker_id = 0;

  pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud_out(new pcl::PointCloud<pcl::PointXYZRGB>);
  std::vector<tf::Vector3> hand_positions, arm_directions;
  mutex_config.lock();
  for(size_t i = 0; i < msg->clouds.size(); i++)
  {
    bool cloud_with_rgb_data = false;
    std::string field_list = pcl::getFieldsList (msg->clouds[i]);    
    if(field_list.rfind("rgb") != std::string::npos)
    {
      cloud_with_rgb_data = true;
    }



    if(cloud_with_rgb_data)
    {
      pcl::PointCloud<pcl::PointXYZRGB>::Ptr hand_cloud(new pcl::PointCloud<pcl::PointXYZRGB>);
      //pcl::PointCloud<pcl::PointXYZRGB>::Ptr finger_cloud(new pcl::PointCloud<pcl::PointXYZRGB>);
      tf::Vector3 hand_position, arm_position, arm_direction;
      bool found = checkCloud<pcl::PointXYZRGB>(msg->clouds[i],
                                                hand_cloud,
                                                //finger_cloud,
                                                msg->header.frame_id,
                                                hand_position,
                                                arm_position,
                                                arm_direction);
      if(found)
      {
        hand_positions.push_back(hand_position);
        arm_directions.push_back(arm_direction);
        *cloud_out += *hand_cloud;
      }
    }
    //else
    //{
      //checkCloud<pcl::PointXYZ>(msg->clouds[i], msg->header.frame_id);
    //}
  }
  mutex_config.unlock();



  if(hand_positions.size() > g_hand_trackers.size())
  {
    std::vector<tf::Vector3> hand_positions_tmp = hand_positions;
    cv::Mat result;
    for (size_t i = 0; i < g_hand_trackers.size(); i++)
    {
      result = g_hand_trackers[i].first.lastResult();
      tf::Vector3 point1(result.at<float>(0), result.at<float>(1), result.at<float>(2));
      int index = closestPoint(point1, hand_positions_tmp);
      //remove
      hand_positions_tmp.erase(hand_positions_tmp.begin() + index);
    }

    //add new tracker
    for(size_t i = 0; i < hand_positions_tmp.size(); i++)
    {
      KalmanFilter3d tracker;
      cv::Point3f point(hand_positions_tmp[i].x(),
                        hand_positions_tmp[i].y(),
                        hand_positions_tmp[i].z());
      tracker.initialize(1.0 / 30.0, point, 1e-2, 1e-1, 1e-1);
      TrackerWithHistory t;
      t.first = tracker;
      g_hand_trackers.push_back(t);
    }
  }

  cv::Mat result;
  std::vector<tf::Vector3> results;
  interaction_msgs::ArmsPtr arms_msg(new interaction_msgs::Arms);
  arms_msg->arms.resize(g_hand_trackers.size());
  for (size_t i = 0; i < g_hand_trackers.size(); i++)
  {
    g_hand_trackers[i].first.predict(result);
    results.push_back(tf::Vector3(result.at<float>(0), result.at<float>(1), result.at<float>(2)));
    arms_msg->arms[i].hand.rotation.w = 1;
  }

  int index;
  cv::Point3f measurement;
  tf::Quaternion q_hand;
  Eigen::Quaternionf q;
  tf::Quaternion q_rotate;
  q_rotate.setEuler(0, 0, M_PI);
  int last_index = -1;
  for(size_t i = 0; i < hand_positions.size(); i++)
  {
    index = closestPoint(hand_positions[i], results);
    if(last_index == index)
      continue;
    last_index = index;
    measurement.x = hand_positions[i].x();
    measurement.y = hand_positions[i].y();
    measurement.z = hand_positions[i].z();
    g_hand_trackers[index].first.update(measurement, result);
    results[index] = tf::Vector3(result.at<float>(0), result.at<float>(1), result.at<float>(2));

    q.setFromTwoVectors(Eigen::Vector3f(1, 0, 0),
                        Eigen::Vector3f(arm_directions[i].x(), arm_directions[i].y(), arm_directions[i].z()));
    tf::Quaternion q_tf(q.x(), q.y(), q.z(), q.w());
    q_hand = q_tf * q_rotate;
    arms_msg->arms[index].hand.rotation.x = q_hand.x();
    arms_msg->arms[index].hand.rotation.y = q_hand.y();
    arms_msg->arms[index].hand.rotation.z = q_hand.z();
    arms_msg->arms[index].hand.rotation.w = q_hand.w();
  }

  for(size_t i = 0; i < results.size(); i++)
  {
    g_hand_trackers[i].first.updateState();
    arms_msg->arms[i].arm_id = g_hand_trackers[i].first.id();
    arms_msg->arms[i].hand.translation.x = results[i].x();
    arms_msg->arms[i].hand.translation.y = results[i].y();
    arms_msg->arms[i].hand.translation.z = results[i].z();

    //prepare markers if needed
    if(g_marker_array_pub.getNumSubscribers() != 0)
    {
      geometry_msgs::Point marker_point;
      marker_point.x = results[i].x();
      marker_point.y = results[i].y();
      marker_point.z = results[i].z();
      g_hand_trackers[i].second.push_back(marker_point);
      if(g_hand_trackers[i].second.size() > HAND_HISTORY_SIZE)
        g_hand_trackers[i].second.pop_front();
    }

#if PUBLISH_TRANSFORM
    tf::Transform hand_tf;
    hand_tf.setOrigin(results[i]);

    if(index != -1)
    {
      hand_tf.setRotation(tf::Quaternion(arms_msg->arms[i].hand.rotation.x,
                                         arms_msg->arms[i].hand.rotation.y,
                                         arms_msg->arms[i].hand.rotation.z,
                                         arms_msg->arms[i].hand.rotation.w));
    }
    else
    {
      hand_tf.setRotation(tf::Quaternion(0, 0, 0, 1));
    }

    std::stringstream ss;
    ss << "hand" << g_hand_trackers[i].first.id();

    tf_br.sendTransform(tf::StampedTransform(hand_tf, ros::Time::now(), msg->header.frame_id, ss.str()));
#endif

  }

  //remove dead tracker
  std::vector<TrackerWithHistory>::iterator it = g_hand_trackers.begin();
  while(it != g_hand_trackers.end())
  {
    if(it->first.getState() == KalmanFilter3d::DIE)
    {
      ROS_DEBUG("remove tracker %d", it->first.id());
      it = g_hand_trackers.erase(it);      
    }
    else
    {
      it++;      
    }
  }

  //prepare markers if needed
  if(g_marker_array_pub.getNumSubscribers() != 0)
  {
    Marker marker;
    marker.type = Marker::LINE_STRIP;
    marker.lifetime = ros::Duration(0.1);
    marker.header.frame_id = msg->header.frame_id;
    marker.scale.x = 0.005;
    marker.pose.position.x = 0;
    marker.pose.position.y = 0;
    marker.pose.position.z = 0;
    marker.pose.orientation.x = 0;
    marker.pose.orientation.y = 0;
    marker.pose.orientation.z = 0;
    marker.pose.orientation.w = 1;
    marker.ns = "point_history";

    for (size_t i = 0; i < g_hand_trackers.size(); i++)
    {
      marker.points.clear();
      if (g_hand_trackers[i].second.size() != 0)
      {
        marker.id = g_marker_id++;
        std::list<geometry_msgs::Point>::iterator it;
        for (it = g_hand_trackers[i].second.begin(); it != g_hand_trackers[i].second.end(); it++)
        {
          marker.points.push_back(*it);
        }
        marker.color.r = 1;
        marker.color.g = 0;
        marker.color.b = 0;
        marker.color.a = 1.0;
        g_marker_array.markers.push_back(marker);
      }
    }
  }


  if(g_arms_pub.getNumSubscribers() != 0)
  {
    arms_msg->header.stamp = ros::Time::now();
    arms_msg->header.frame_id = msg->header.frame_id;
    g_arms_pub.publish(arms_msg);
  }

  if(g_cloud_pub.getNumSubscribers() != 0)
  {
    sensor_msgs::PointCloud2 cloud_out_msg;
    pcl::toROSMsg(*cloud_out, cloud_out_msg);
    cloud_out_msg.header.stamp = ros::Time::now();
    cloud_out_msg.header.frame_id = msg->header.frame_id;
    g_cloud_pub.publish(cloud_out_msg);
  }

  if((g_marker_array_pub.getNumSubscribers() != 0) && (!g_marker_array.markers.empty()))
  {
    g_marker_array_pub.publish(g_marker_array);
  }
}
  void ImageMessageListener::imageCallback(const sensor_msgs::ImageConstPtr& in_msg) {
    static int counter = 0;     // counter to add periodic line breaks
    _image_deque.push_back(*in_msg);
    if (_image_deque.size()<_deque_length){
      return;
    }
    sensor_msgs::Image msg = _image_deque.front();
    _image_deque.pop_front();
    
    
    if (_listener) {
      tf::StampedTransform transform;
      try{
        _listener->waitForTransform(_odom_frame_id, 
                                    _base_link_frame_id, 
                                    msg.header.stamp, 
                                    ros::Duration(0.5) );
        _listener->lookupTransform (_odom_frame_id, 
                                    _base_link_frame_id, 
                                    msg.header.stamp, 
                                    transform);
      }
      catch (tf::TransformException ex){
        ROS_ERROR("%s",ex.what());
      }
      Eigen::Quaternionf q;
      q.x() = transform.getRotation().x();
      q.y() = transform.getRotation().y();
      q.z() = transform.getRotation().z();
      q.w() = transform.getRotation().w();
      _odom_pose.linear()=q.toRotationMatrix();
      _odom_pose.translation()=Eigen::Vector3f(transform.getOrigin().x(),
                                               transform.getOrigin().y(),
                                               transform.getOrigin().z());
    }      


    cv_bridge::CvImageConstPtr bridge;
    bridge = cv_bridge::toCvCopy(msg,msg.encoding);

  
    if (! _has_camera_matrix){
      cerr << "received: " << _image_topic << " waiting for camera matrix" << endl;
      return;
    }
    _cv_image = bridge->image.clone();
 
  
    PinholeImageMessage* img_msg = new PinholeImageMessage(_image_topic, msg.header.frame_id,  msg.header.seq, msg.header.stamp.toSec());
    img_msg->setImage(_cv_image);
    img_msg->setOffset(_camera_offset);
    img_msg->setCameraMatrix(_K);
    img_msg->setDepthScale(_depth_scale);
  
    if (_imu_interpolator && _listener) {
      Eigen::Isometry3f imu = Eigen::Isometry3f::Identity();
      Eigen::Quaternionf q_imu;
      if (!_imu_interpolator->getOrientation(q_imu, msg.header.stamp.toSec()))
        return;
      imu.linear() = q_imu.toRotationMatrix();
      if (_verbose) {
        cerr << "i";
        counter++;
        if( counter % 1024 == 0 )
          cerr << endl;
      }
      img_msg->setImu(imu);
    } else if (_listener) {
      img_msg->setOdometry(_odom_pose);
      if (_verbose)
        cerr << "o";
        counter++;
        if( counter % 1024 == 0 )
          cerr << endl;
    } else {
      if (_verbose)
        cerr << "x";
        counter++;
        if( counter % 1024 == 0 )
          cerr << endl;
    }
    _sorter->insertMessage(img_msg);
  }