Esempio n. 1
0
/*
 * Gets the current rotation matrix.
 *
 * Returns a pointer to the consecutive values that are used as the elements
 * of a rotation matrix. The elements are specified in column order. That is,
 * if we have 16 elements and we are specifying a 4 x 4 matrix, then the first
 * 4 elements represent the first column, and so on.
 *
 * The actual form of the rotation matrix is specified in HW3 notes. All we
 * need to do is get the current rotation quaternion and translate it to
 * matrix form.
 */
float *get_rotation_matrix() {
    Eigen::Quaternionf q = get_current_rotation();
    float qs = q.w();
    float qx = q.x();
    float qy = q.y();
    float qz = q.z();

    float *matrix = new float[16];

    MatrixXf m(4, 4);
    m <<
        1 - 2 * qy * qy - 2 * qz * qz, 2 * (qx * qy - qz * qs),
            2 * (qx * qz + qy * qs), 0,
        2 * (qx * qy + qz * qs), 1 - 2 * qx * qx - 2 * qz * qz,
            2 * (qy * qz - qx * qs), 0,
        2 * (qx * qz - qy * qs), 2 * (qy * qz + qx * qs),
            1 - 2 * qx * qx - 2 * qy * qy, 0,
        0, 0, 0, 1;

    // Manually copy eigen data into float array, otherwise we run into
    // memory issues
    int count = 0;
    for (int col = 0; col < 4; col++) {
        for (int row = 0; row < 4; row++) {
            matrix[count] = m(row, col);
            count++;
        }
    }
    return matrix;
}
Esempio n. 2
0
 geometry_msgs::PoseStamped SnapIt::alignPose(
   Eigen::Affine3f& pose, jsk_recognition_utils::ConvexPolygon::Ptr convex)
 {
   Eigen::Affine3f aligned_pose(pose);
   Eigen::Vector3f original_point(pose.translation());
   Eigen::Vector3f projected_point;
   convex->project(original_point, projected_point);
   
   Eigen::Vector3f normal = convex->getNormal();
   Eigen::Vector3f old_normal;
   old_normal[0] = pose(0, 2);
   old_normal[1] = pose(1, 2);
   old_normal[2] = pose(2, 2);
   Eigen::Quaternionf rot;
   if (normal.dot(old_normal) < 0) {
     normal = - normal;
   }
   rot.setFromTwoVectors(old_normal, normal);
   aligned_pose = aligned_pose * rot;
   aligned_pose.translation() = projected_point;
   Eigen::Affine3d aligned_posed;
   convertEigenAffine3(aligned_pose, aligned_posed);
   geometry_msgs::PoseStamped ret;
   tf::poseEigenToMsg(aligned_posed, ret.pose);
   return ret;
 }
Esempio n. 3
0
bool CommandSubscriber::getForcedTfFrame(Eigen::Affine3f & result) const
  {
  tf::StampedTransform transform;
  try
    {
    m_tf_listener.lookupTransform(m_forced_tf_frame_reference,m_forced_tf_frame_name,ros::Time(0),transform);
    }
    catch (tf::TransformException ex)
      {
      ROS_ERROR("kinfu: hint was forced by TF, but retrieval failed because: %s",ex.what());
      return false;
      }

  Eigen::Vector3f vec;
  vec.x() = transform.getOrigin().x();
  vec.y() = transform.getOrigin().y();
  vec.z() = transform.getOrigin().z();
  Eigen::Quaternionf quat;
  quat.x() = transform.getRotation().x();
  quat.y() = transform.getRotation().y();
  quat.z() = transform.getRotation().z();
  quat.w() = transform.getRotation().w();

  result.linear() = Eigen::AngleAxisf(quat).matrix();
  result.translation() = vec;
  return true;
  }
Esempio n. 4
0
Eigen::Quaternionf rollcorrect(Eigen::Quaternionf rotation){
    Vector3f local_z_axis = rotation.inverse() * -Vector3f::UnitZ();
    Vector3f local_x_axis = rotation.inverse() * Vector3f::UnitX();
    Vector3f local_y_axis = rotation.inverse() * Vector3f::UnitY();

    Vector3f z_proj_zx = local_z_axis; z_proj_zx(1) = 0; z_proj_zx.normalize();
    Vector3f x_proj_zx = local_x_axis; x_proj_zx(1) = 0; x_proj_zx.normalize();
    Vector3f y_proj_zx = local_y_axis; y_proj_zx(1) = 0; y_proj_zx.normalize();

    double pitch = angle(local_z_axis, z_proj_zx);
    double roll = angle(local_x_axis, x_proj_zx);

    int proj_side = local_x_axis.cross(x_proj_zx).dot(local_z_axis) > 0 ? -1 : 1;
    int side_up = local_y_axis.dot(Vector3f::UnitY()) > 0 ? 1 : -1;

    if(side_up == -1){
        roll = M_PI - roll;
    }

    roll = roll * proj_side * side_up;

    double norm_pitch = normalizeRad(pitch);
    double correction_factor = (M_PI_2 - fabs(norm_pitch)) / M_PI_2;
    correction_factor = pow(correction_factor - 0.5, 1);

    if(pitch > 0.7){
        correction_factor = 0;
    }

    AngleAxis<float> roll_correction(correction_factor*-roll, Vector3f::UnitZ());
    rotation = roll_correction * rotation;

    return rotation;
}
Eigen::Quaternionf AdafruitWidget::getQuaternion()
{
    Eigen::Quaternionf quat = myIMU->returnPose();
    QString label = QString("%1 / %2 / %3 / %4").arg(quat.w()).arg(quat.x()).arg(quat.y()).arg(quat.z());
    ui->label_Quaternion->setText(label);
    return myIMU->returnPose();
}
Esempio n. 6
0
  void DrawableCloud::draw() {
    if(_parameter->show() && _cloud) {
      glPushMatrix();
      glMultMatrixf(_transformation.data());
      if(_drawablePoints)
	_drawablePoints->draw();
      if(_drawableNormals)
	_drawableNormals->draw();
      if(_drawableCovariances)
	_drawableCovariances->draw();
      if(_drawableCorrespondences)
	_drawableCorrespondences->draw();

      glPushMatrix();
      Eigen::Isometry3f sensorOffset;
      sensorOffset.translation() = Eigen::Vector3f(0.0f, 0.0f, 0.0f);
      Eigen::Quaternionf quaternion = Eigen::Quaternionf(-.5f, -0.5f, 0.5f, 0.5f);
      sensorOffset.linear() = quaternion.toRotationMatrix();
      sensorOffset.matrix().row(3) << 0.0f, 0.0f, 0.0f, 1.0f;
      glMultMatrixf(sensorOffset.data());
      glColor4f(1,0,0,0.5);
      glPopMatrix();

      glPopMatrix();
    }
  }
void ConvexPolygon::projectOnPlane(
    const Eigen::Affine3f& pose, Eigen::Affine3f& output)
{
    Eigen::Vector3f p(pose.translation());
    Eigen::Vector3f output_p;
    projectOnPlane(p, output_p);
    // ROS_INFO("[ConvexPolygon::projectOnPlane] p: [%f, %f, %f]",
    //          p[0], p[1], p[2]);
    // ROS_INFO("[ConvexPolygon::projectOnPlane] output_p: [%f, %f, %f]",
    //          output_p[0], output_p[1], output_p[2]);
    Eigen::Quaternionf rot;
    rot.setFromTwoVectors(pose.rotation() * Eigen::Vector3f::UnitZ(),
                          coordinates().rotation() * Eigen::Vector3f::UnitZ());
    Eigen::Quaternionf coords_rot(coordinates().rotation());
    Eigen::Quaternionf pose_rot(pose.rotation());
    // ROS_INFO("[ConvexPolygon::projectOnPlane] rot: [%f, %f, %f, %f]",
    //          rot.x(), rot.y(), rot.z(), rot.w());
    // ROS_INFO("[ConvexPolygon::projectOnPlane] coords_rot: [%f, %f, %f, %f]",
    //          coords_rot.x(), coords_rot.y(), coords_rot.z(), coords_rot.w());
    // ROS_INFO("[ConvexPolygon::projectOnPlane] pose_rot: [%f, %f, %f, %f]",
    //          pose_rot.x(), pose_rot.y(), pose_rot.z(), pose_rot.w());
    // ROS_INFO("[ConvexPolygon::projectOnPlane] normal: [%f, %f, %f]", normal_[0], normal_[1], normal_[2]);
    // Eigen::Affine3f::Identity() *
    // output.translation() = Eigen::Translation3f(output_p);
    // output.rotation() = rot * pose.rotation();
    //output = Eigen::Translation3f(output_p) * rot * pose.rotation();
    output = Eigen::Affine3f(rot * pose.rotation());
    output.pretranslate(output_p);
    // Eigen::Vector3f projected_point = output * Eigen::Vector3f(0, 0, 0);
    // ROS_INFO("[ConvexPolygon::projectOnPlane] output: [%f, %f, %f]",
    //          projected_point[0], projected_point[1], projected_point[2]);
}
Esempio n. 8
0
void EKFGyroAcc::reset(Eigen::Quaternionf att)
{
    this->x.setZero();
    this->x(0, 0) = att.w();
    this->x(1, 0) = att.x();
    this->x(2, 0) = att.y();
    this->x(3, 0) = att.z();
}
TEST(MEKFGyroAccMagTestGroup, vect_measurement_basis)
{
    Eigen::Vector3f v_i(1, 2, 3); // expectation of v in inertial frame
    Eigen::Quaternionf q = MEKFGyroAccMag::vect_measurement_basis(v_i);
    Eigen::Vector3f v_m = q._transformVector(v_i.normalized());
    // the expectation of the measurement is [1,0,0]' in the meas. frame
    CHECK_TRUE(v_m.isApprox(Eigen::Vector3f(1, 0, 0)));
}
Esempio n. 10
0
 void Plane::project(const Eigen::Affine3f& pose, Eigen::Affine3f& output)
 {
   Eigen::Vector3f p(pose.translation());
   Eigen::Vector3f output_p;
   project(p, output_p);
   Eigen::Quaternionf rot;
   rot.setFromTwoVectors(pose.rotation() * Eigen::Vector3f::UnitZ(),
                         coordinates().rotation() * Eigen::Vector3f::UnitZ());
   output = Eigen::Affine3f::Identity() * Eigen::Translation3f(output_p) * rot;
 }
Esempio n. 11
0
	bool update_map(rm_localization::UpdateMap::Request &req,
			rm_localization::UpdateMap::Response &res) {

		boost::mutex::scoped_lock lock(closest_keyframe_update_mutex);

		Eigen::Vector3f intrinsics;
		memcpy(intrinsics.data(), req.intrinsics.data(), 3 * sizeof(float));

		bool update_intrinsics = intrinsics[0] != 0.0f;

		if (update_intrinsics) {
			ROS_INFO("Updated camera intrinsics");
			this->intrinsics = intrinsics;
			ROS_INFO_STREAM("New intrinsics" << std::endl << this->intrinsics);
		}

		for (size_t i = 0; i < req.idx.size(); i++) {

			Eigen::Quaternionf orientation;
			Eigen::Vector3f position, intrinsics;

			memcpy(orientation.coeffs().data(),
					req.transform[i].unit_quaternion.data(), 4 * sizeof(float));
			memcpy(position.data(), req.transform[i].position.data(),
					3 * sizeof(float));

			Sophus::SE3f new_pos(orientation, position);

			if (req.idx[i] == closest_keyframe_idx) {
				//closest_keyframe_update_mutex.lock();

				camera_position = new_pos
						* keyframes[req.idx[i]]->get_pos().inverse()
						* camera_position;

				keyframes[req.idx[i]]->get_pos() = new_pos;

				//if (update_intrinsics) {
				//	keyframes[req.idx[i]]->get_intrinsics() = intrinsics;
				//}

				//closest_keyframe_update_mutex.unlock();

			} else {
				keyframes[req.idx[i]]->get_pos() = new_pos;

				//if (update_intrinsics) {
				//	keyframes[req.idx[i]]->get_intrinsics() = intrinsics;
				//}

			}
		}

		return true;
	}
bool
VirtualTrackball::mouseMoved ( const MouseEvent* event )
{
        if ( !event->isLeftButtonPressed() ) return false;
        const Eigen::Vector3f p0 = this->_oldp;
        const Eigen::Vector3f p1 = this->project_on_sphere ( event->x(), event->y() );
        this->_oldp = p1;
        if ( ( p0 - p1 ).norm() < this->_eps ) return false; // do nothing
		//何か間違ってそうなので訂正してみる
        float radian = std::acos( p0.dot ( p1 ) )*0.5;
        const float cost = std::cos(radian);
        const float sint = std::sin(radian);
        //const float cost = p0.dot ( p1 );
        //const float sint = std::sqrt ( 1 - cost * cost );
        const Eigen::Vector3f axis = p0.cross ( p1 ).normalized();
        const Eigen::Quaternionf q ( -cost, sint * axis.x(), sint * axis.y(), sint * axis.z() );
        if( ( q.x()!=q.x() )|| ( q.y()!=q.y() )|| ( q.z()!=q.z() )|| ( q.w()!=q.w() ) ) return false;

        /*
        Eigen::Vector3f bmin , bmax;
        Mesh mesh;
        mesh = this->_model.getMesh();
        mesh.getBoundingBox(bmin,bmax);
        Eigen::Vector3f mc = (bmin + bmax)*0.5;
        Eigen::Vector3f c = Eigen::Matrix3f(q) * ( this->_model.getCamera().getCenter() - mc ) + mc ;
        this->_model.setCameraPosition(c.x(),c.y(),c.z());*/

        //this->_model.addRotation ( q );
        return true;
}
Esempio n. 13
0
void ImuDeadReckon::makeQuaternionFromVector( Eigen::Vector3f& inVec, Eigen::Quaternionf& outQuat )
{
    float phi = inVec.norm();
    Eigen::Vector3f u = inVec / phi; // u is a unit vector

    outQuat.vec() = u * sin( phi / 2.0 );
    outQuat.w()   =     cos( phi / 2.0 );


    //outQuat = Eigen::Quaternionf( 1.0, 0., 0., 0. );
}
Esempio n. 14
0
void CPclView::addCameraMesh(const pcl::PointXYZ& position, const Eigen::Quaternionf& rotation,
                             const QString& id)
{
  pcl::PolygonMesh cameraMesh;

  initzializeCameraMeshCloud(&cameraMesh.cloud);

  Eigen::Vector3f cameraShapeVertices[CAMERA_SHAPE_N_VERTICES];

  cameraShapeVertices[0] = Eigen::Vector3f( 0.f,  0.f, 0.f);
  cameraShapeVertices[1] = Eigen::Vector3f(-1.f, -1.f, 1.f);
  cameraShapeVertices[2] = Eigen::Vector3f(-1.f,  1.f, 1.f);
  cameraShapeVertices[3] = Eigen::Vector3f( 1.f, -1.f, 1.f);
  cameraShapeVertices[4] = Eigen::Vector3f( 1.f,  1.f, 1.f);

  Eigen::Vector3f cameraBasePos(position.x, position.y, position.z);
  Eigen::Quaternionf quat = rotation.normalized();
  Eigen::Matrix3f rotationMatrix = quat.toRotationMatrix();

  for(int i = 0; i < CAMERA_SHAPE_N_VERTICES; i++)
  {
    cameraShapeVertices[i] = rotationMatrix * cameraShapeVertices[i];
    cameraShapeVertices[i] += cameraBasePos;

    for(unsigned int j = 0; j < 3; j++)
    {
      reinterpret_cast<float*>(cameraMesh.cloud.data.data())[i * 3 + j] = cameraShapeVertices[i][j];
    }
  }

  pcl::Vertices v;

  v.vertices.push_back(1);
  v.vertices.push_back(2);
  v.vertices.push_back(4);
  v.vertices.push_back(3);
  cameraMesh.polygons.push_back(v);

  v.vertices.clear();
  v.vertices.push_back(0);
  v.vertices.push_back(1);
  v.vertices.push_back(3);
  v.vertices.push_back(0);
  cameraMesh.polygons.push_back(v);

  v.vertices.clear();
  v.vertices.push_back(0);
  v.vertices.push_back(2);
  v.vertices.push_back(4);
  v.vertices.push_back(0);
  cameraMesh.polygons.push_back(v);

  mpPclVisualizer->addPolylineFromPolygonMesh(cameraMesh, id.toStdString());
}
// Callback function for "segment_plans" service
bool segment (segment_plans_objects::PlantopSegmentation::Request  &req,
		         	segment_plans_objects::PlantopSegmentation::Response &res) {		         	
	cout << "Query received !" << endl;
	if (current_cloud->empty()) {
		res.result = 1;
		cout << "Error : segment_plans_objects : no current cloud" << endl;
		return false;
	}
	cout << "Segmenting..." << endl;
	res.result = 4;
  if (segmenter.segment_plans (current_cloud) == 0)
		res.result = 2;
	else if (segmenter.segment_clusters (current_cloud) == 0)
		res.result = 3;
	else if (segmenter.segment_rgb (current_rgb) == 0)
		res.result = 3;
	cout << "Segmentation done." << endl;

	// TODO Remplir le header pour la pose de la table
	Eigen::Vector3f t = segmenter.get_prefered_plan_translation ();
	res.table.pose.header = current_cloud->header;
	res.table.pose.pose.position.x = t(0);
	res.table.pose.pose.position.y = t(1);
	res.table.pose.pose.position.z = t(2);
	Eigen::Quaternionf q = segmenter.get_prefered_plan_orientation ();
	// Be carefull here, eigen quaternion ordering is : w, x, y, z
	// while ROS is : x, y, z, w
	res.table.pose.pose.orientation.x = q.x();
	res.table.pose.pose.orientation.y = q.y();
	res.table.pose.pose.orientation.z = q.z();
	res.table.pose.pose.orientation.w = q.w();
	Eigen::Vector4f min, max;
	min = segmenter.get_prefered_plan_min ();
	max = segmenter.get_prefered_plan_max ();
	res.table.x_min = min(0);
	res.table.x_max = max(0);
	res.table.y_min = min(1);
	res.table.y_max = max(1);

	for (size_t i=0; i < segmenter.get_num_clusters(); ++i) {
		sensor_msgs::PointCloud2 tmp;
		pcl::toROSMsg(*(segmenter.get_cluster(i)), tmp);
		res.clusters.push_back(tmp);
	}

	for (size_t i=0; i < segmenter.get_num_rgbs(); ++i) {
		cv_bridge::CvImagePtr cv_ptr (new cv_bridge::CvImage);
		//cv_ptr->header = ; TODO fill the header with current image header
		cv_ptr->encoding = enc::BGR8;
		cv_ptr->image = *(segmenter.get_rgb_cluster(i));
		res.cluster_images.push_back(*cv_ptr->toImageMsg());
	}
	return true;
}
 void imageCallback(const sensor_msgs::ImageConstPtr& image_msg)
 {
   try
   {
     input_bridge_ = cv_bridge::toCvCopy(image_msg, "mono8");
     output_bridge_ = cv_bridge::toCvCopy(image_msg, "bgr8");
   }
   catch (cv_bridge::Exception& ex)
   {
     ROS_ERROR("[calibrate] Failed to convert image");
     return;
   }
 
   Eigen::Vector3f translation;
   Eigen::Quaternionf orientation;
   
   if (!pattern_detector_.detectPattern(input_bridge_->image, translation, orientation, output_bridge_->image))
   {
     ROS_INFO("[calibrate] Couldn't detect checkerboard, make sure it's visible in the image.");
     return;
   }
   
   tf::Transform target_transform;
   tf::StampedTransform base_transform;
   try
   {
     ros::Time acquisition_time = image_msg->header.stamp;
     ros::Duration timeout(1.0 / 30.0);
                                  
     target_transform.setOrigin( tf::Vector3(translation.x(), translation.y(), translation.z()) );
     target_transform.setRotation( tf::Quaternion(orientation.x(), orientation.y(), orientation.z(), orientation.w()) );
     tf_broadcaster_.sendTransform(tf::StampedTransform(target_transform, image_msg->header.stamp, image_msg->header.frame_id, target_frame));
   }
   catch (tf::TransformException& ex)
   {
     ROS_WARN("[calibrate] TF exception:\n%s", ex.what());
     return;
   }
   publishCloud(ideal_points_, target_transform, image_msg->header.frame_id);
   
   overlayPoints(ideal_points_, target_transform, output_bridge_);
   
   // Publish calibration image
   pub_.publish(output_bridge_->toImageMsg());
   
   pcl_ros::transformPointCloud(ideal_points_, image_points_, target_transform);
   
   cout << "Got an image callback!" << endl;
   
   calibrate(image_msg->header.frame_id);
   
   ros::shutdown();
 }
Esempio n. 17
0
void BulletWrapper::updateObjectHolding(const SkeletonHand& skeletonHand, BulletHandRepresentation& handRepresentation, float deltaTime)
{
  const float strengthThreshold = 1.8f;
  if (skeletonHand.grabStrength >= strengthThreshold && handRepresentation.m_HeldBody == NULL)
  {
    // Find body to pick
    EigenTypes::Vector3f underHandDirection = -skeletonHand.rotationButNotReally * EigenTypes::Vector3f::UnitY();

    btRigidBody* closestBody = utilFindClosestBody(skeletonHand.getManipulationPoint(), underHandDirection);
    handRepresentation.m_HeldBody = closestBody;
  }

  if (skeletonHand.grabStrength < strengthThreshold)
  {
    // Let body go
    handRepresentation.m_HeldBody = NULL;
  }
  

  if (handRepresentation.m_HeldBody != NULL)
  {
    btRigidBody* body = handRepresentation.m_HeldBody;
    // Control held body
    s_LastHeldBody = body;
    m_FramesTilLastHeldBodyReset = 12;

    //handRepresentation.m_HeldBody->setCollisionFlags(0);

    // apply velocities or apply positions
    btVector3 target = ToBullet(skeletonHand.getManipulationPoint());// - skeletonHand.rotationButNotReally * EigenTypes::Vector3f(0.1f, 0.1f, 0.1f));
    btVector3 current = body->getWorldTransform().getOrigin();
    btVector3 targetVelocity = 0.5f * (target - current) / deltaTime;
    body->setLinearVelocity(targetVelocity);

    // convert from-to quaternions to angular velocity in world space
    {
      Eigen::Quaternionf currentRotation = FromBullet(body->getWorldTransform().getRotation());
      Eigen::Quaternionf targetRotation = Eigen::Quaternionf(skeletonHand.arbitraryRelatedRotation()); // breaks for left hand ???

      Eigen::Quaternionf delta = currentRotation.inverse() * targetRotation;
      Eigen::AngleAxis<float> angleAxis(delta);
      EigenTypes::Vector3f axis = angleAxis.axis();
      float angle = angleAxis.angle();
      const float pi = 3.1415926536f;
      if (angle > pi) angle -= 2.0f * pi;
      if (angle < -pi) angle += 2.0f * pi;

      EigenTypes::Vector3f angularVelocity = currentRotation * (axis * angle * 0.5f / deltaTime);
      body->setAngularVelocity(ToBullet(angularVelocity));
    }
  }
}
bool math_eigen_test_matrix_conversion()
{
	UNIT_TEST_BEGIN("matrix conversion")
		Eigen::Quaternionf q = Eigen::Quaternionf(0.980671287f, 0.177366823f, 0.0705093816f, 0.0430502370f);
		assert_eigen_quaternion_is_normalized(q);

		Eigen::Matrix3f m = eigen_quaternion_to_clockwise_matrix3f(q);
		Eigen::Quaternionf q_copy = eigen_matrix3f_to_clockwise_quaternion(m);

		success &= q.isApprox(q_copy, k_normal_epsilon);
		assert(success);
	UNIT_TEST_COMPLETE()
}
Esempio n. 19
0
int main( int argc, char* argv[] ) {

  //
  Eigen::Vector3f trans;
  Eigen::Quaternionf q;
  double a, b, c;
  std::string gOriginalFile;  

  int v;
  while( (v=getopt(argc,argv,"x:y:z:r:p:q:n:a:b:c:f:h")) != -1 ) {
  switch(v) {
  case 'f': { gOriginalFile.assign(optarg); } break;
  case 'x': { trans(0) = atof(optarg); } break;
  case 'y': { trans(1) = atof(optarg); } break;
  case 'z': { trans(2) = atof(optarg); } break;
  case 'r': { q.x() = atof(optarg); } break;
  case 'p': { q.y() = atof(optarg); } break;
  case 'q': { q.z() = atof(optarg); } break;
  case 'n': { q.w() = atof(optarg); } break;
  case 'a': { a = atof(optarg); } break;
  case 'b': { b = atof(optarg); } break;
  case 'c': { c = atof(optarg); } break;
  }
  }
  

  boost::shared_ptr<pcl::visualization::PCLVisualizer> viewer (new pcl::visualization::PCLVisualizer ("Viz"));
  viewer->initCameraParameters ();
  viewer->addCoordinateSystem(0.2);
  // Add pointcloud
  pcl::PointCloud<pcl::PointXYZRGBA>::Ptr cloud( new pcl::PointCloud<pcl::PointXYZRGBA>() );
  pcl::PointCloud<pcl::PointXYZRGBA>::Ptr table( new pcl::PointCloud<pcl::PointXYZRGBA>() );
  pcl::io::loadPCDFile<pcl::PointXYZRGBA> (gOriginalFile, *cloud);
  pcl::io::loadPCDFile<pcl::PointXYZRGBA> ("data_july_18/table_points.pcd", *table);
  viewer->addPointCloud( cloud, "cloud_original");
  viewer->addPointCloud( table, "table");
  // Add point
  pcl::PointXYZ cp;
  cp.x = trans(0); cp.y = trans(1); cp.z = trans(2);
  viewer->addSphere( cp, 0.005, "sphere" );
  // Add cube
  viewer->addCube( trans, q, 2*a,2*b,2*c);

  
  while (!viewer->wasStopped ()) {
    viewer->spinOnce (100);
    boost::this_thread::sleep (boost::posix_time::microseconds (100000));
  }

}
  void detectCB(const sensor_msgs::ImageConstPtr& image_msg, const sensor_msgs::CameraInfoConstPtr& info_msg)
  {
    // make sure that the action is active                            
    if (!as_.isActive())
      return;

    ROS_INFO("%s: Received image, performing detection", action_name_.c_str());
    // Convert image message

    try
    {
      img_bridge_ = cv_bridge::toCvCopy(image_msg, "mono8");
    }
    catch (cv_bridge::Exception& ex)
    {
      ROS_ERROR("[calibrate] Failed to convert image");
      return;
    }


    ROS_INFO("%s: created cv::Mat", action_name_.c_str());
    cam_model_.fromCameraInfo(info_msg);
    detector_.setCameraMatrices(cam_model_.intrinsicMatrix(), cam_model_.distortionCoeffs());

    Eigen::Vector3f translation;
    Eigen::Quaternionf orientation;

    if (detector_.detectPattern(img_bridge_->image, translation, orientation))
    {
      ROS_INFO("detected fiducial");
      tf::Transform fiducial_transform;
      fiducial_transform.setOrigin( tf::Vector3(translation.x(), translation.y(), translation.z()) );
      fiducial_transform.setRotation( tf::Quaternion(orientation.x(), orientation.y(), orientation.z(), orientation.w()) );

      tf::StampedTransform fiducial_transform_stamped(fiducial_transform, image_msg->header.stamp, cam_model_.tfFrame(), "fiducial_frame");
      tf_broadcaster_.sendTransform(fiducial_transform_stamped);

      tf::Pose fiducial_pose;
      fiducial_pose.setRotation(tf::Quaternion(0, 0, 0, 1.0));
      fiducial_pose = fiducial_transform * fiducial_pose;
      tf::Stamped<tf::Pose> fiducial_pose_stamped(fiducial_pose, image_msg->header.stamp, cam_model_.tfFrame());

      tf::poseStampedTFToMsg(fiducial_pose_stamped, result_.pose);
      as_.setSucceeded(result_);
      pub_timer_.stop();
      sub_.shutdown();
    }

  }
static bool math_eigen_test_rotation_method_consistency(const Eigen::Quaternionf &q, const Eigen::Vector3f &v)
{
	bool success = true;

	assert_eigen_quaternion_is_normalized(q);

	// This is the same as computing the rotation by computing: q^-1*[0,v]*q, but cheaper
	Eigen::Vector3f v_rotated = eigen_vector3f_clockwise_rotate(q, v);

	// Make sure doing the matrix based rotation performs the same result
	{
		Eigen::Matrix3f m = eigen_quaternion_to_clockwise_matrix3f(q);
		Eigen::Vector3f v_test = m * v;

		success &= v_test.isApprox(v_rotated, k_normal_epsilon);
		assert(success);
	}

	// Make sure the Hamilton product style rotation matches
	{
		Eigen::Quaternionf v_as_quaternion = Eigen::Quaternionf(0.f, v.x(), v.y(), v.z());
		Eigen::Quaternionf q_inv = q.conjugate();
		Eigen::Quaternionf qinv_v = q_inv * v_as_quaternion;
		Eigen::Quaternionf qinv_v_q = qinv_v * q;

		success &=
			is_nearly_equal(qinv_v_q.w(), 0.f, k_normal_epsilon) &&
			is_nearly_equal(qinv_v_q.x(), v_rotated.x(), k_normal_epsilon) &&
			is_nearly_equal(qinv_v_q.y(), v_rotated.y(), k_normal_epsilon) &&
			is_nearly_equal(qinv_v_q.z(), v_rotated.z(), k_normal_epsilon);
		assert(success);
	}

	return success;
}
void generateTF(int trans, Eigen::Quaternionf &R,  Eigen::Vector3f &t, Eigen::Matrix3f &mR)
{
  float strengthR=(trans%3)*0.05;
  trans/=3;
  float strengthT=(trans%3)*0.05;
  trans/=3;

  int temp=trans%8;
  Eigen::Vector3f axis;
  axis(0) = temp&1?1:0;
  axis(1) = temp&2?1:0;
  axis(2) = temp&4?1:0;
  trans/=8;

  Eigen::AngleAxisf aa(strengthR,axis);
  R = aa.toRotationMatrix();
  mR= R.toRotationMatrix();
  R = mR;

  t(0) = temp&1?1:0;
  if(temp&2) t(0)=-t(0);
  t(1) = temp&4?1:0;
  if(temp&8) t(1)=-t(1);
  t(2) = temp&16?1:0;
  if(temp&32) t(2)=-t(2);
  trans/=64;

  t*=strengthT;
}
Esempio n. 23
0
TEST (PCL, WarpPointRigid6DFloat)
{
  WarpPointRigid6D<PointXYZ, PointXYZ, float> warp;
  Eigen::Quaternionf q (0.4455f, 0.9217f, 0.3382f, 0.3656f);
  q.normalize ();
  Eigen::Vector3f t (0.82550f, 0.11697f, 0.44864f);

  Eigen::VectorXf p (6);
  p[0] = t.x (); p[1] = t.y (); p[2] = t.z (); p[3] = q.x (); p[4] = q.y (); p[5] = q.z ();
  warp.setParam (p);

  PointXYZ pin (1, 2, 3), pout;
  warp.warpPoint (pin, pout);
  EXPECT_NEAR (pout.x, 4.15963f, 1e-5);
  EXPECT_NEAR (pout.y, -1.51363f, 1e-5);
  EXPECT_NEAR (pout.z, 0.922648f, 1e-5);
}
Esempio n. 24
0
 void Plane::initializeCoordinates()
 {
   Eigen::Quaternionf rot;
   rot.setFromTwoVectors(Eigen::Vector3f::UnitZ(), normal_);
   double c = normal_[2];
   double z = 0.0;
   // ax + by + cz + d = 0
   // z = - d / c (when x = y = 0)
   if (c == 0.0) {             // its not good
     z = 0.0;
   }
   else {
     z = - d_ / c;
   }
   plane_coordinates_
     = Eigen::Affine3f::Identity() * Eigen::Translation3f(0, 0, z) * rot;
 }
Esempio n. 25
0
void QuaternionDemo::paintGL()
{
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
	glMatrixMode(GL_PROJECTION);
	glLoadIdentity();
	gluPerspective(60.0, (double)width() / height(), 0.1, 10);

	glMatrixMode(GL_MODELVIEW);
	glLoadIdentity();
	glTranslatef(0, 0, -5);

	glRotatef(-90, 1, 0, 0);
	glRotatef(90, 0, 0, 1);

	Eigen::Transform<float, 3, Eigen::Affine> t_ref(ref.conjugate());
	glMultMatrixf(t_ref.data());
	Eigen::Transform<float, 3, Eigen::Affine> t_q(q);
	glMultMatrixf(t_q.data());

	glEnable(GL_DEPTH_TEST);
	glBegin(GL_QUADS);
		glColor3f(1, 1, 1);
		glVertex3f(-1, -1, -1);
		glVertex3f(1, -1, -1);
		glVertex3f(1, 1, -1);
		glVertex3f(-1, 1, -1);

		glColor3f(1, 0, 0);
		glVertex3f(-1, -1, 1);
		glVertex3f(1, -1, 1);
		glVertex3f(1, 1, 1);
		glVertex3f(-1, 1, 1);

		glColor3f(0, 1, 0);
		glVertex3f(-1, -1, 1);
		glVertex3f(-1, -1, -1);
		glVertex3f(-1, 1, -1);
		glVertex3f(-1, 1, 1);

		glColor3f(1, 0.5f, 0);
		glVertex3f(1, -1, 1);
		glVertex3f(1, -1, -1);
		glVertex3f(1, 1, -1);
		glVertex3f(1, 1, 1);

		glColor3f(0, 0, 1);
		glVertex3f(-1, -1, 1);
		glVertex3f(-1, -1, -1);
		glVertex3f(1, -1, -1);
		glVertex3f(1, -1, 1);

		glColor3f(1, 0, 0.5f);
		glVertex3f(-1, 1, 1);
		glVertex3f(-1, 1, -1);
		glVertex3f(1, 1, -1);
		glVertex3f(1, 1, 1);
	glEnd();
}
void transform(pcl::PointCloud<pcl::PointXYZ> &pc, Eigen::Quaternionf &R,  Eigen::Vector3f &t)
{
  Eigen::Matrix4f m = Eigen::Matrix4f::Identity();
  for(int i=0; i<3; i++)
    for(int j=0; j<3; j++)
      m(i,j) = R.toRotationMatrix()(i,j);
  m.col(3).head<3>() = t;

  pcl::transformPointCloud(pc,pc,m);
}
static Eigen::Quaternionf 
angular_velocity_to_quaternion_derivative(
    const Eigen::Quaternionf &current_orientation,
    const Eigen::Vector3f &ang_vel)
{
    Eigen::Quaternionf omega = Eigen::Quaternionf(0.f, ang_vel.x(), ang_vel.y(), ang_vel.z());
    Eigen::Quaternionf quaternion_derivative = Eigen::Quaternionf(current_orientation.coeffs() * 0.5f) *omega;

    return quaternion_derivative;
}
static Eigen::Vector3f
quaternion_derivative_to_angular_velocity(
    const Eigen::Quaternionf &current_orientation,
    const Eigen::Quaternionf &quaternion_derivative)
{
    Eigen::Quaternionf inv_orientation = current_orientation.conjugate();
    auto q_ang_vel = (quaternion_derivative*inv_orientation).coeffs() * 2.f;
    Eigen::Vector3f ang_vel(q_ang_vel.x(), q_ang_vel.y(), q_ang_vel.z());

    return ang_vel;
}
Esempio n. 29
0
void makeJointMarker(std::string jointName)
{
	boost::shared_ptr<const urdf::Joint> targetJoint = huboModel.getJoint(jointName);

	// The marker must be created in the parent frame so you don't get feedback when you move it
	visualization_msgs::InteractiveMarker marker;

	marker.scale = .125;
	marker.name = jointName;
	marker.header.frame_id = targetJoint->parent_link_name;

	geometry_msgs::Pose controlPose = hubo_motion_ros::toPose( targetJoint->parent_to_joint_origin_transform);
	marker.pose = controlPose;

	visualization_msgs::InteractiveMarkerControl control;

	Eigen::Quaternionf jointAxis;
	Eigen::Vector3f axisVector = hubo_motion_ros::toEVector3(targetJoint->axis);
	jointAxis.setFromTwoVectors(Eigen::Vector3f::UnitX(), axisVector);

	control.orientation.w = jointAxis.w();
	control.orientation.x = jointAxis.x();
	control.orientation.y = jointAxis.y();
	control.orientation.z = jointAxis.z();

	control.always_visible = true;
	control.interaction_mode = visualization_msgs::InteractiveMarkerControl::ROTATE_AXIS;
	control.orientation_mode = visualization_msgs::InteractiveMarkerControl::INHERIT;

	marker.controls.push_back(control);

	gIntServer->insert(marker);
	gIntServer->setCallback(marker.name, &processFeedback);
}
void UrdfModelMarker::addInvisibleMeshMarkerControl(visualization_msgs::InteractiveMarker &int_marker, boost::shared_ptr<const Link> link, const std_msgs::ColorRGBA &color){
  visualization_msgs::InteractiveMarkerControl control;
  //    ps.pose = UrdfPose2Pose(link->visual->origin);
  visualization_msgs::Marker marker;

  //if (use_color) marker.color = color;
  marker.type = visualization_msgs::Marker::CYLINDER;
  double scale=0.01;
  marker.scale.x = scale;
  marker.scale.y = scale * 1;
  marker.scale.z = scale * 40;
  marker.color = color;
  //marker.pose = stamped.pose;
  //visualization_msgs::InteractiveMarkerControl control;
  boost::shared_ptr<Joint> parent_joint = link->parent_joint;
  Eigen::Vector3f origin_x(0,0,1);
  Eigen::Vector3f dest_x(parent_joint->axis.x, parent_joint->axis.y, parent_joint->axis.z);
  Eigen::Quaternionf qua;

  qua.setFromTwoVectors(origin_x, dest_x);
  marker.pose.orientation.x = qua.x();
  marker.pose.orientation.y = qua.y();
  marker.pose.orientation.z = qua.z();
  marker.pose.orientation.w = qua.w();

  control.markers.push_back( marker );
  control.interaction_mode = visualization_msgs::InteractiveMarkerControl::BUTTON;
  control.always_visible = true;

  int_marker.controls.push_back(control);
  return;
}