Beispiel #1
0
tf::Matrix3x3 JPCT_Math::quatToMatrix(tf::Quaternion q) {
    tf::Matrix3x3 matrix; 
    float norm = magnitudeSquared(q);
    float s = (double)norm > 0.0?2.0f / norm:0.0F;
    float xs = q.x() * s;
    float ys = q.y() * s;
    float zs = q.z() * s;
    float xx = q.x() * xs;
    float xy = q.x() * ys;
    float xz = q.x() * zs;
    float xw = q.w() * xs;
    float yy = q.y() * ys;
    float yz = q.y() * zs;
    float yw = q.w() * ys;
    float zz = q.z() * zs;
    float zw = q.w() * zs;
    matrix[0][0] = 1.0F - (yy + zz);
    matrix[1][0] = xy - zw;
    matrix[2][0] = xz + yw;
    matrix[0][1] = xy + zw;
    matrix[1][1] = 1.0F - (xx + zz);
    matrix[2][1] = yz - xw;
    matrix[0][2] = xz - yw;
    matrix[1][2] = yz + xw;
    matrix[2][2] = 1.0F - (xx + yy);

    return matrix;
}
Beispiel #2
0
void Hand_filter::update(tf::Vector3 p, tf::Quaternion& q){

    if(b_first){
        p_filter_buffer.push_back(p);
        q_filter_buffer.push_back(q);
        if(p_filter_buffer.size() == p_filter_buffer.capacity()){
            b_first = false;
            ROS_INFO("====== hand filter ======");
           // ROS_INFO("buffer full: %d",p_filter_buffer.size());
            ROS_INFO("p: %f %f %f",p.x(),p.y(),p.z());
            ROS_INFO("q: %f %f %f %f",q.x(),q.y(),q.z(),q.w());

            k_position(0) = p.x();k_position(1) = p.y(); k_position(2) = p.z();
            kalman_filter.Init(k_position);

            q_tmp = q;
            p_tmp = p;

        }
    }else{


        /// Orientation filter
       if(jumped(q,q_tmp,q_threashold)){
            ROS_INFO("q jumped !");
            q = q_tmp;
        }

       q_attractor(q,up);
       q = q_tmp.slerp(q,0.1);


       /// Position filter
        if(!jumped(p,p_tmp,p_threashold)){

            k_measurement(0) = p.x();
            k_measurement(1) = p.y();
            k_measurement(2) = p.z();

        }else{
            ROS_INFO("p jumped !");
            k_measurement(0) = p_tmp.x();
            k_measurement(1) = p_tmp.y();
            k_measurement(2) = p_tmp.z();
        }

        kalman_filter.Update(k_measurement,0.001);
        kalman_filter.GetPosition(k_position);
        p.setValue(k_position(0),k_position(1),k_position(2));



        q_tmp = q;
        p_tmp = p;

    }


}
Beispiel #3
0
/**
 * Convert tf::Quaternion to string
 */
template<> std::string toString<tf::Quaternion>(const tf::Quaternion& p_quat)
{
  std::stringstream ss;

  ss << "(" << p_quat.x() << ", " << p_quat.y() << ", " << p_quat.z() << ", " << p_quat.w() << ")";

  return ss.str();
}
Beispiel #4
0
void ATTCallback (IvyClientPtr app, void* data , int argc, char **argv)
{
	double att_unit_coef= 0.0139882;
	double phi, theta, yaw;
		
	sscanf(argv[0],"%lf %lf %lf %*d %*d %*d",&phi,&theta,&yaw);
	phi*=att_unit_coef*DEG2RAD;
	theta*=-att_unit_coef*DEG2RAD;
	yaw*=-att_unit_coef*DEG2RAD;
	
	q.setRPY(phi,theta,yaw);
	
	//ROS_INFO("Phi %f; Theta %f; Yaw %f", phi,theta,yaw);
	//ROS_INFO("q1 %f; q2 %f; q3 %f; q4 %f", q.x(),q.y(),q.z(),q.w());

	imu_data.header.stamp = ros::Time::now();
	imu_data.orientation.x=q.x();
	imu_data.orientation.y=q.y();
	imu_data.orientation.z=q.z();
	imu_data.orientation.w=q.w();
	
	imu_message.publish(imu_data);
	
	//Only temporary until the rates are equal
	att_data.header.stamp = ros::Time::now();
	att_data.orientation.x=q.x();
	att_data.orientation.y=q.y();
	att_data.orientation.z=q.z();
	att_data.orientation.w=q.w();

	att_message.publish(att_data);
}
Beispiel #5
0
/**
  * Packs current state in a odom message. Needs a quaternion for conversion.
  */
void pack_pose(tf::Quaternion& q, nav_msgs::Odometry& odom)
{
    q.setRPY(0, 0, _theta);

    odom.header.stamp = ros::Time::now();

    odom.pose.pose.position.x = _x;
    odom.pose.pose.position.y = _y;

    odom.pose.pose.orientation.x = q.x();
    odom.pose.pose.orientation.y = q.y();
    odom.pose.pose.orientation.z = q.z();
    odom.pose.pose.orientation.w = q.w();
}
Beispiel #6
0
void MTMHaptics::convert_bodyForcetoSpatialForce(geometry_msgs::WrenchStamped &body_wrench){

    visualize_haptic_force(body_force_pub);
    rot_quat.setX(cur_mtm_pose.orientation.x);
    rot_quat.setY(cur_mtm_pose.orientation.y);
    rot_quat.setZ(cur_mtm_pose.orientation.z);
    rot_quat.setW(cur_mtm_pose.orientation.w);
    F7wrt0.setValue(body_wrench.wrench.force.x, body_wrench.wrench.force.y, body_wrench.wrench.force.z);
    rot_matrix.setRotation(rot_quat);
    F0wrt7 = rot_matrix.transpose() * F7wrt0;
    body_wrench.wrench.force.x = F0wrt7.x();
    body_wrench.wrench.force.y = F0wrt7.y();
    body_wrench.wrench.force.z = F0wrt7.z();
    visualize_haptic_force(spatial_force_pub);

}
Beispiel #7
0
void Jumps::update(tf::Vector3& origin,tf::Quaternion& orientation){

    if(bFirst){

        origin_buffer.push_back(origin);
        orientation_buffer.push_back(orientation);

        if(origin_buffer.size() == origin_buffer.capacity()){
            bFirst = false;
            ROS_INFO("====== jump filter full ======");
        }

    }else{

        origin_tmp      = origin_buffer[origin_buffer.size()-1];
        orientation_tmp = orientation_buffer[orientation_buffer.size()-1];

        if(bDebug){
            std::cout<< "=== jum debug === " << std::endl;
            std::cout<< "p    : " << origin.x() << "\t" << origin.y() << "\t" << origin.z() << std::endl;
            std::cout<< "p_tmp: " << origin_tmp.x() << "\t" << origin_tmp.y() << "\t" << origin_tmp.z() << std::endl;
            std::cout<< "p_dis: " << origin.distance(origin_tmp) << std::endl;

            std::cout<< "q    : " << orientation.x() << "\t" << orientation.y() << "\t" << orientation.z() <<  "\t" << orientation.w() << std::endl;
            std::cout<< "q_tmp: " << orientation_tmp.x() << "\t" << orientation_tmp.y() << "\t" << orientation_tmp.z() << "\t" << orientation_tmp.w() << std::endl;
            std::cout<< "q_dis: " << dist(orientation,orientation_tmp) << std::endl;
        }

        /// Position jump
        if(jumped(origin,origin_tmp,origin_threashold)){
            ROS_INFO("position jumped !");
            origin = origin_tmp;
           // exit(0);
        }else{
            origin_buffer.push_back(origin);
        }

        /// Orientation jump
        if(jumped(orientation,orientation_tmp,orientation_threashold)){
            ROS_INFO("orientation jumped !");
            orientation = orientation_tmp;
            //exit(0);
        }else{
            orientation_buffer.push_back(orientation);
        }
    }
}
void odomCallback(const nav_msgs::Odometry::ConstPtr& msg)
{
    double r, p ,y;
    
    lastQuat = tf::Quaternion(msg->pose.pose.orientation.x,
                              msg->pose.pose.orientation.y,
                              msg->pose.pose.orientation.z,
                              msg->pose.pose.orientation.w);
    
    btMatrix3x3(lastQuat).getRPY(r, p, y);
    
    lastQuat.setRPY(r, p, y + M_PI / 2.0);
    
    lastOdom = *msg;
}
void imuMsgCallback(const sensor_msgs::Imu& imu_msg)
{
  tf::quaternionMsgToTF(imu_msg.orientation, tmp_);

  tfScalar yaw, pitch, roll;
  tf::Matrix3x3(tmp_).getRPY(roll, pitch, yaw);

  tmp_.setRPY(roll, pitch, 0.0);

  transform_.setRotation(tmp_);

  transform_.stamp_ = imu_msg.header.stamp;

  tfB_->sendTransform(transform_);
}
  void LeapMotionListener::leapmotionCallback(const leap_motion::leapros2::ConstPtr& dataHand)
{
      dataHand_=(*dataHand);
      ROS_INFO("ORIENTATION OF THE HAND SET TO \n X: %f\n  Y: %f\n Z: %f\n ",dataHand_.ypr.x,dataHand_.ypr.y,dataHand_.ypr.z);
      ROS_INFO("DIRECTION OF THE HAND SET TO \n X: %f\n  Y: %f\n Z: %f\n ",dataHand_.direction.x,dataHand_.direction.y,dataHand_.direction.z);
      ROS_INFO("NORMAL OF THE HAND SET TO \n X: %f\n  Y: %f\n Z: %f\n ",dataHand_.normal.x,dataHand_.normal.y,dataHand_.normal.z);
      //ROS_INFO("INSIDE CALLBACK");
      //We create the values of reference for the first postion of our hand
      if (FIRST_VALUE)
        {
        dataLastHand_.palmpos.x=dataHand_.palmpos.x;
        dataLastHand_.palmpos.y=dataHand_.palmpos.y;
        dataLastHand_.palmpos.z=dataHand_.palmpos.z;
        FIRST_VALUE=0;
        Updifferencex=dataLastHand_.palmpos.x+10;
        Downdifferencex=dataLastHand_.palmpos.x-10;
        Updifferencez=dataLastHand_.palmpos.z+10;
        Downdifferencez=dataLastHand_.palmpos.z-20;
        Updifferencey=dataLastHand_.palmpos.y+20;
        Downdifferencey=dataLastHand_.palmpos.y-20;
        //ROS_INFO("ORIGINAL POSITION OF THE HAND SET TO \n X: %f\n  Y: %f\n Z: %f\n ",trajectory_hand.at(i).palmpos.x,trajectory_hand.at(i).palmpos.y,trajectory_hand.at(i).palmpos.z);
         
        //sleep(2);
        }
        else
        {
            //We get the distance between the finger and transform it into gripper distance
            rot7=DtA+DtAx*dataHand_.finger_distance;
            rot8=DtA+DtAx*dataHand_.finger_distance;
            joint_msg_leap=jointstate_;
            joint_msg_leap.position[7] = -rot8;
            joint_msg_leap.position[6] = rot7;
            
            if ((dataHand_.palmpos.x<Downdifferencex)||(dataHand_.palmpos.x>Updifferencex)||(dataHand_.palmpos.y<Downdifferencey)||(dataHand_.palmpos.y>Updifferencey)||(dataHand_.palmpos.z<Downdifferencez)||(dataHand_.palmpos.z>Updifferencez))
              {
                //q.setRPY(0,0,M_PI/2);//Fixed Position for testing..with this pose the robot is oriented to the ground
                q.setRPY(dataHand_.ypr.x,dataHand_.ypr.y,dataHand_.ypr.z);
                pose.orientation.x = q.getAxis().getX();//cambiado aposta getZ()
                pose.orientation.y = q.getAxis().getY();
                pose.orientation.z = q.getAxis().getZ();//cambiado aposta getX()
                pose.orientation.w = q.getW();
                //pose.orientation.w = ;
                //pose.orientation.z=1;
                //pose.orientation.y=0;
                //pose.orientation.x=0;
                //We need to send the correct axis to the robot. Currently they are rotated and x is z
                //ROS_INFO("VALUES OF THE QUATERNION SET TO \n X: %f\n  Y: %f\n Z: %f W: %f\n",pose.orientation.x,pose.orientation.y,pose.orientation.z,pose.orientation.w);
                pose.position.y +=(dataHand_.palmpos.x-dataLastHand_.palmpos.x) ;
                pose.position.z +=(dataHand_.palmpos.y-dataLastHand_.palmpos.y);
                if(pose.position.z>Uplimitez)
                pose.position.z=Uplimitez;
                pose.position.x +=(dataHand_.palmpos.z-dataLastHand_.palmpos.z);
                //Here we instantiate an autogenerated service class 
                srv.request.target = pose ;
                if (pclient->call(srv))
                {
                  //ROS_INFO("Ret: %d", (int)srv.response.ret);
                  dataLastHand_.palmpos.x=dataHand_.palmpos.x;
                  dataLastHand_.palmpos.y=dataHand_.palmpos.y;
                  dataLastHand_.palmpos.z=dataHand_.palmpos.z;
                  // Both limits for x,y,z to avoid small changes
                  Updifferencex=dataLastHand_.palmpos.x+10;//
                  Downdifferencex=dataLastHand_.palmpos.x-10;
                  Updifferencez=dataLastHand_.palmpos.z+10;
                  Downdifferencez=dataLastHand_.palmpos.z-20;
                  Updifferencey=dataLastHand_.palmpos.y+20;
                  Downdifferencey=dataLastHand_.palmpos.y-20;
                  old_pose=pose;
                  ROS_INFO("response %f\n",srv.response.ret);
                }
                else
                {
                  ROS_ERROR("Position out of range");
                  pose=old_pose;
                } 
              }
         //We get the aswer of the service and publish it into the joint_msg_leap message
         //joint_msg_leap.position[0] = srv.response.joint1;
         //joint_msg_leap.position[1] = srv.response.joint2;
         //joint_msg_leap.position[2] = srv.response.joint3;
         //joint_msg_leap.position[3] = srv.response.joint4;
         //joint_msg_leap.position[4] = srv.response.joint5;
         //joint_msg_leap.position[5] = srv.response.joint6;
         //robo_pub.publish(joint_msg_leap);
        }
 //ROS_INFO("END EFFECTOR POSITION \n X: %f\n  Y: %f\n Z: %f\n", pose.position.x,pose.position.y,pose.position.z);
 //ROS_INFO("ORIENTATION OF THE HAND SET TO \n X: %f\n  Y: %f\n Z: %f\n ",q.getAxis().getX(),q.getAxis().getY(),q.getAxis().getZ());  
}
Beispiel #11
0
int main(int argc, char** argv)
{
	ros::init(argc, argv, "test2d");

	ros::NodeHandle node;
	
	tf::TransformListener t(ros::Duration(20));
	tf::StampedTransform tr_o, tr_i;
	
	double a_test(0);
	double b_test(0);
	double theta_test(0);
	double nu_theta(1);
	double nu_trans(1);
	
	ROS_INFO_STREAM("waiting for initial transforms");
	while (node.ok())
	{
		ros::Time now(ros::Time::now());
		//ROS_INFO_STREAM(now);
		if (t.waitForTransform(baseLinkFrame, now, baseLinkFrame, now, odomFrame, ros::Duration(0.1)))
			break;
		//ROS_INFO("wait");
		//ros::Duration(0.1).sleep();
	}
	ROS_INFO_STREAM("got first odom to baseLink");
	while (node.ok())
	{
		ros::Time now(ros::Time::now());
		//ROS_INFO_STREAM(now);
		if (t.waitForTransform(kinectFrame, now, kinectFrame, now, worldFrame, ros::Duration(0.1)))
			break;
		//ROS_INFO("wait");
		//ros::Duration(0.1).sleep();
	}
	ROS_INFO_STREAM("got first world to kinect");
	
	sleep(10);
	
	ros::Rate rate(0.5);
	while (node.ok())
	{
		// sleep
		rate.sleep();
		
		// get parameters from transforms
		ros::Time curTime(ros::Time::now());
		ros::Time lastTime = curTime - ros::Duration(10);
		//ROS_INFO_STREAM("curTime: " << curTime << ", lastTime: " << lastTime);
		t.waitForTransform(baseLinkFrame, curTime, baseLinkFrame, lastTime, odomFrame, ros::Duration(3));
		t.waitForTransform(kinectFrame, curTime, kinectFrame, lastTime, worldFrame, ros::Duration(3));
		t.lookupTransform(baseLinkFrame, curTime, baseLinkFrame, lastTime, odomFrame, tr_o);
		//ROS_INFO_STREAM("odom to baselink: trans: " << tr_o.getOrigin() << ", rot: " << tr_o.getRotation());
		const double alpha_o_tf = tr_o.getOrigin().x();
		const double beta_o_tf = tr_o.getOrigin().y();
		const double theta_o = 2*atan2(tr_o.getRotation().z(), tr_o.getRotation().w());
		const double alpha_o = cos(theta_o)*alpha_o_tf + sin(theta_o)*beta_o_tf;
		const double beta_o = -sin(theta_o)*alpha_o_tf + cos(theta_o)*beta_o_tf;
		t.lookupTransform(kinectFrame, curTime, kinectFrame, lastTime, worldFrame, tr_i);
		//ROS_INFO_STREAM("world to kinect: trans: " << tr_i.getOrigin() << ", rot: " << tr_i.getRotation());
		const double alpha_i_tf = tr_i.getOrigin().z();
		const double beta_i_tf = -tr_i.getOrigin().x();
		const double theta_i = 2*atan2(-tr_i.getRotation().y(), tr_i.getRotation().w());
		const double alpha_i = cos(theta_i)*alpha_i_tf + sin(theta_i)*beta_i_tf;
		const double beta_i = -sin(theta_i)*alpha_i_tf + cos(theta_i)*beta_i_tf;
		lastTime = curTime;
		
		ROS_WARN_STREAM("Input odom: ("<<alpha_o<<", "<<beta_o<<", "<<theta_o<<"), icp: ("\
				<<alpha_i<<", "<<beta_i<<", "<<theta_i<<")");
		if (abs(theta_i-theta_o) > max_diff_angle)
		{
			ROS_WARN_STREAM("Angle difference too big: " << abs(theta_i-theta_o));
			continue;
		}

		// compute correspondances, check values to prevent numerical instabilities
		const double R_denom = 2 * sin(theta_o);
		/*if (abs(R_denom) < limit_low)
		{
			ROS_WARN_STREAM("magnitude of R_denom too low: " << R_denom);
			continue;
		}*/
		const double kr_1 = (alpha_o * cos(theta_o) + alpha_o + beta_o * sin(theta_o));
		const double kr_2 = (beta_o * cos(theta_o) + beta_o - alpha_o * sin(theta_o));
		const double phi = atan2(kr_2, kr_1);
		const double r_1 = kr_1/R_denom;
		const double r_2 = kr_2/R_denom;
		const double R = sqrt(r_1*r_1 + r_2*r_2);
		const double kC_1 = (beta_i + beta_i * cos(theta_o) + alpha_i * sin(theta_o));
		const double kC_2 = (alpha_i + alpha_i * cos(theta_o) + beta_i * sin(theta_o));
		//const double xi = atan2(kC_2 + R_denom*b_test, kC_1 + R_denom*a_test);
		const double C_1 = kC_1 / R_denom;
		const double C_2 = kC_2 / R_denom;
		double xi(0);
		if (R_denom)
			xi = atan2(C_1 + a_test, C_2 + b_test);
		else
			xi = atan2(kC_1, kC_2);
		
		// compute new values 
		//double tmp_theta = M_PI/2 - phi  - xi;
		double tmp_theta = xi - phi;
		double tmp_a = R * sin(tmp_theta+phi) - C_1;
		double tmp_b = R * cos(tmp_theta+phi) - C_2;
		//tmp_theta = (tmp_theta<-M_PI)?tmp_theta+2*M_PI:((tmp_theta>M_PI)?tmp_theta-2*M_PI:tmp_theta);
		
		//const double V = sqrt((C_1+a_test)*(C_1+a_test)+(C_2+b_test)*(C_2+b_test));
		/*const double err = sqrt((a_test-tmp_a)*(a_test-tmp_a)+(b_test-tmp_b)*(b_test-tmp_b));
		const double err_pred = sqrt(R*R + V*V -2*R*V*cos(tmp_theta+phi-xi));
		if (abs(err-err_pred)>0.00001)
		{
		ROS_WARN_STREAM("Error="<<err<<" Computed="<<err_pred);
		ROS_WARN_STREAM("chosen: ("<<tmp_a<<", "<<tmp_b<<"); rejected: ("
				<<R*sin(tmp_theta+phi+M_PI)-C_1<<", "<<R*cos(tmp_theta+phi+M_PI)-C_2<<")");
		
		ROS_WARN_STREAM("R: "<<R<<", V: "<<V<<", C_1: "<<C_1<<", C_2: "<<C_2\
				<<", phi: "<<phi<<", xi: "<<xi<<", theta: "<<tmp_theta);
		}*/
		if (R>min_R_rot)
		{
			theta_test = atan2((1-nu_theta)*sin(theta_test)+nu_theta*sin(tmp_theta),
					(1-nu_theta)*cos(theta_test)+nu_theta*cos(tmp_theta));
			nu_theta = max(min_nu, 1/(1+1/nu_theta));
		}
		if (R<max_R_trans)
		{
			a_test = (1-nu_trans)*a_test + nu_trans*tmp_a;
			b_test = (1-nu_trans)*b_test + nu_trans*tmp_b;
			nu_trans = max(min_nu, 1/(1+1/nu_trans));
		}
		
		// compute transform
		const tf::Quaternion quat_trans = tf::Quaternion(a_test, b_test, 0, 1);
		const tf::Quaternion quat_test = tf::Quaternion(0, 0, sin(theta_test/2), cos(theta_test / 2));
		const tf::Quaternion quat_axes = tf::Quaternion(-0.5, 0.5, -0.5, 0.5);
		const tf::Quaternion quat_rot = quat_test*quat_axes;
		
		tf::Quaternion quat_tmp = quat_rot.inverse()*quat_trans*quat_rot;

		const tf::Vector3 vect_trans = tf::Vector3(quat_tmp.x(), quat_tmp.y(), 0);


		tf::Transform transform;
		transform.setRotation(quat_rot);
		transform.setOrigin(vect_trans);

		ROS_INFO_STREAM("Estimated transform: trans: " <<  a_test << ", " <<
				b_test << ", rot: " << 2*atan2(quat_test.z(), quat_test.w()));
	
		static tf::TransformBroadcaster br;
br.sendTransform(tf::StampedTransform(transform, ros::Time::now(),
baseLinkFrame, myKinectFrame));
	}
	
	return 0;
}
void InteractiveMarkerArrow::MakeInteractiveMarker(std::string intMarkerName, tf::Quaternion qx_control, tf::Quaternion qy_control, tf::Quaternion qz_control)
{	
    visualization_msgs::InteractiveMarker int_marker;
    int_marker.header.frame_id = "world";
    int_marker.scale = 0.1;
    int_marker.name = intMarkerName;
    
    geometry_msgs::Pose pose;    
    tfSrv->get("world", intMarkerName + "_control", pose);    
    int_marker.pose = pose;

    InteractiveMarkerArrow::MakeControl(int_marker);
    
    int_marker.controls[0].interaction_mode = 7;

    visualization_msgs::InteractiveMarkerControl control;
    control.orientation_mode = visualization_msgs::InteractiveMarkerControl::FIXED;
    
    tf::Transform tr;
    tfSrv->get("world", "viceGripRotation", tr);
    
    qx_control = tr.getRotation() * qx_control;
    qy_control = tr.getRotation() * qy_control;
    qz_control = tr.getRotation() * qz_control;

    control.orientation.w = qx_control.getW();
    control.orientation.x = qx_control.getX();
    control.orientation.y = qx_control.getY();
    control.orientation.z = qx_control.getZ();
    control.name = "rotate_x";
    control.interaction_mode = visualization_msgs::InteractiveMarkerControl::ROTATE_AXIS;
    int_marker.controls.push_back(control);
    control.name = "move_x";
    control.interaction_mode = visualization_msgs::InteractiveMarkerControl::MOVE_AXIS;
    int_marker.controls.push_back(control);

    control.orientation.w = qz_control.getW();
    control.orientation.x = qz_control.getX();
    control.orientation.y = qz_control.getY();
    control.orientation.z = qz_control.getZ();
    control.name = "rotate_z";
    control.interaction_mode = visualization_msgs::InteractiveMarkerControl::ROTATE_AXIS;
    int_marker.controls.push_back(control);
    control.name = "move_z";
    control.interaction_mode = visualization_msgs::InteractiveMarkerControl::MOVE_AXIS;
    int_marker.controls.push_back(control);

    control.orientation.w = qy_control.getW();
    control.orientation.x = qy_control.getX();
    control.orientation.y = qy_control.getY();
    control.orientation.z = qy_control.getZ();
    control.name = "rotate_y";
    control.interaction_mode = visualization_msgs::InteractiveMarkerControl::ROTATE_AXIS;
    int_marker.controls.push_back(control);
    control.name = "move_y";
    control.interaction_mode = visualization_msgs::InteractiveMarkerControl::MOVE_AXIS;
    int_marker.controls.push_back(control);
	    
    intMarkerSrv->insert(int_marker);      
    intMarkerSrv->setCallback(int_marker.name, _processFeedBackTemp(boost::bind(&InteractiveMarkerArrow::ProcessFeedback, this, _1)));
    intMarkerSrv->applyChanges();
}
		// callback for the complete message
		void complete_message_callback(const homog_track::HomogComplete& msg)
		{
			/********** Begin splitting up the incoming message *********/
			// getting boolean indicating the reference has been set
			reference_set = msg.reference_set;

			// if the reference is set then will break out the points
			if (reference_set)
			{
				// initializer temp scalar to zero
				temp_scalar = cv::Mat::zeros(1,1,CV_64F);
			
				// getting the current marker points
				circles_curr = msg.current_points;
				
				// getting the refernce marker points
				circles_ref = msg.reference_points;
				
				// setting the current points to the point vector
				curr_red_p.x = circles_curr.red_circle.x;
				curr_green_p.x = circles_curr.green_circle.x;
				curr_cyan_p.x = circles_curr.cyan_circle.x;
				curr_purple_p.x = circles_curr.purple_circle.x;
				curr_red_p.y = circles_curr.red_circle.y;
				curr_green_p.y = circles_curr.green_circle.y;
				curr_cyan_p.y = circles_curr.cyan_circle.y;
				curr_purple_p.y = circles_curr.purple_circle.y;
				curr_points_p.push_back(curr_red_p);
				curr_points_p.push_back(curr_green_p);
				curr_points_p.push_back(curr_cyan_p);
				curr_points_p.push_back(curr_purple_p);
				
				
				
				// converting the points to be the projective coordinates
				for (int ii = 0; ii < curr_points_m.size(); ii++)
				{
					curr_points_m[ii] = K.inv(cv::DECOMP_LU)*curr_points_m[ii];
					std::cout << "currpoints at " << ii << " is: " << curr_points_m[ii] << std::endl;
					
				}
				
				// setting the reference points to the point vector
				ref_red_p.x = circles_ref.red_circle.x;
				ref_green_p.x = circles_ref.green_circle.x;
				ref_cyan_p.x = circles_ref.cyan_circle.x;
				ref_purple_p.x = circles_ref.purple_circle.x;
				ref_red_p.y = circles_ref.red_circle.y;
				ref_green_p.y = circles_ref.green_circle.y;
				ref_cyan_p.y = circles_ref.cyan_circle.y;
				ref_purple_p.y = circles_ref.purple_circle.y;
				ref_points_p.push_back(ref_red_p);
				ref_points_p.push_back(ref_green_p);
				ref_points_p.push_back(ref_cyan_p);
				ref_points_p.push_back(ref_purple_p);
				
				
				// setting the reference points to the matrix vector, dont need to do the last one because its already 1
				ref_red_m.at<double>(0,0) = ref_red_p.x;
				ref_red_m.at<double>(1,0) = ref_red_p.y;
				ref_green_m.at<double>(0,0) = ref_green_p.x;
				ref_green_m.at<double>(1,0) = ref_green_p.y;
				ref_cyan_m.at<double>(0,0) = ref_cyan_p.x;
				ref_cyan_m.at<double>(1,0) = ref_cyan_p.y;
				ref_purple_m.at<double>(0,0) = ref_purple_p.x;
				ref_purple_m.at<double>(1,0) = ref_purple_p.y;
				ref_points_m.push_back(ref_red_m);
				ref_points_m.push_back(ref_green_m);
				ref_points_m.push_back(ref_cyan_m);
				ref_points_m.push_back(ref_purple_m);
				
				
				// converting the points to be the projective coordinates
				for (int ii = 0; ii < ref_points_m.size(); ii++)
				{
					ref_points_m[ii] = K.inv(cv::DECOMP_LU)*ref_points_m[ii];
					//std::cout << "refpoints at " << ii << " is: " << ref_points_m[ii] << std::endl;
				}
				
				// if any of the points have a -1 will skip over the homography
				if (curr_red_p.x != -1 && curr_green_p.x != -1 && curr_cyan_p.x != -1 && curr_purple_p.x != -1)
				{
					//std::cout << "hi" << std::endl;
					
					// finding the perspective homography
					G = cv::findHomography(curr_points_p,ref_points_p,0);
					//G = cv::findHomography(ref_points_p,ref_points_p,0);
					
					std::cout << "G: " << G << std::endl;
					
					// decomposing the homography into the four solutions
					// G and K are 3x3
					// R is 3x3
					// 3x1
					// 3x1
					// successful_decomp is the number of solutions found
					successful_decomp = cv::decomposeHomographyMat(G,K,R,T,n);
					
					std::cout << "successful_decomp: " << successful_decomp << std::endl;
					
					
					// if the decomp is successful will find the best matching
					if (successful_decomp > 0)
					{
						
						std::cout << std::endl << std::endl << " begin check for visibility" << std::endl;
						
						// finding the alphas
						alpha_red.data = 1/(G.at<double>(2,0)*ref_red_p.x + G.at<double>(2,1)*ref_red_p.y + 1);
						alpha_green.data = 1/(G.at<double>(2,0)*ref_green_p.x + G.at<double>(2,1)*ref_green_p.y + 1);
						alpha_cyan.data = 1/(G.at<double>(2,0)*ref_cyan_p.x + G.at<double>(2,1)*ref_cyan_p.y + 1);
						alpha_purple.data = 1/(G.at<double>(2,0)*ref_purple_p.x + G.at<double>(2,1)*ref_purple_p.y + 1);
						
						// finding the solutions that give the positive results
						for (int ii = 0; ii < successful_decomp; ii++)
						{
							
							std::cout << "solution set number " << ii << std::endl;
							
							// performing the operation transpose(m)*R*n to check if greater than 0 later
							// order operating on is red green cyan purple
							for (int jj = 0; jj < 4; jj++)
							{
								
								//std::cout << " T size: " << T[ii].size() << std::endl;
								//std::cout << " T type: " << T[ii].type() << std::endl;
								std::cout << " T value: " << T[ii] << std::endl;
								
								//std::cout << " temp scalar 1 " << std::endl;
								//std::cout << " temp scalar size: " << temp_scalar.size() << std::endl;
								//std::cout << " temp scalar type: " << temp_scalar.type() << std::endl;
								//std::cout << " temp scalar value " << temp_scalar <<std::endl;
								temp_scalar = curr_points_m[jj].t();
								
								//std::cout << " temp scalar 2 " << std::endl;
								//std::cout << " temp scalar size: " << temp_scalar.size() << std::endl;
								//std::cout << " temp scalar type: " << temp_scalar.type() << std::endl;
								//std::cout << " temp scalar value " << temp_scalar <<std::endl;
								
								//std::cout << " R size: " << R[ii].size() << std::endl;
								//std::cout << " R type: " << R[ii].type() << std::endl;
								//std::cout << " R value: " << R[ii] << std::endl;
								temp_scalar = temp_scalar*R[ii];
								
								//std::cout << " temp scalar 3 " << std::endl;
								//std::cout << " temp scalar size: " << temp_scalar.size() << std::endl;
								//std::cout << " temp scalar type: " << temp_scalar.type() << std::endl;
								//std::cout << " temp scalar value " << temp_scalar <<std::endl;
								
								//std::cout << " n size: " << n[ii].size() << std::endl;
								//std::cout << " n type: " << n[ii].type() << std::endl;
								std::cout << " n value: " << n[ii] << std::endl;
								temp_scalar = temp_scalar*n[ii];
								
								//std::cout << " temp scalar size: " << temp_scalar.size() << std::endl;
								//std::cout << " temp scalar type: " << temp_scalar.type() << std::endl;
								//std::cout << " temp scalar value " << temp_scalar <<std::endl;
								//std::cout << " temp scalar value at 0,0" << temp_scalar.at<double>(0,0) << std::endl;
								
								scalar_value_check.push_back(temp_scalar.at<double>(0,0));
								
								////std::cout << " scalar value check size: " << scalar_value_check.size() << std::endl;
								//std::cout << " \tthe value for the " << jj << " visibility check is: " << scalar_value_check[4*ii+jj] << std::endl;
								
							}
						}
						
						std::cout << " end check for visibility" << std::endl << std::endl;
						
						// restting first solution found and second solution found
						first_solution_found = false;
						second_solution_found = false;
						fc_found = false;
						
						// getting the two solutions or only one if there are not two
						for (int ii = 0; ii < successful_decomp; ii++)
						{
							// getting the values onto the temporary vector
							// getting the start and end of the next solution
							temp_solution_start = scalar_value_check.begin() + 4*ii;
							temp_solution_end = scalar_value_check.begin() + 4*ii+4;
							temp_solution.assign(temp_solution_start,temp_solution_end);
							
							// checking if all the values are positive
							all_positive = true;
							current_temp_index = 0;
							while (all_positive && current_temp_index < 4)
							{
								if (temp_solution[current_temp_index] >= 0)
								{
									current_temp_index++;
								}
								else
								{
									all_positive = false;
								}
							}
							
							// if all the values were positive and a first solution has not been found will assign 
							// to first solution. if all positive and first solution has been found will assign
							// to second solution. if all positive is false then will not do anything
							if (all_positive && first_solution_found && !second_solution_found)
							{
								// setting it to indicate a solution has been found
								second_solution_found = true;
								
								// setting the rotation, translation, and normal to be the second set
								second_R = R[ii];
								second_T = T[ii];
								second_n = n[ii];
								
								// setting the projected values
								second_solution = temp_solution;
							}
							else if (all_positive && !first_solution_found)
							{
								// setting it to indicate a solution has been found
								first_solution_found = true;
								
								// setting the rotation, translation, and normal to be the first set
								first_R = R[ii];
								first_T = T[ii];
								first_n = n[ii];
								
								// setting the projected values
								first_solution = temp_solution;
							}
							
							// erasing all the values from the temp solution
							temp_solution.erase(temp_solution.begin(),temp_solution.end());
						}
						
						// erasing all the scalar values from the check
						scalar_value_check.erase(scalar_value_check.begin(),scalar_value_check.end());
					
					
						// displaying the first solution if it was found
						if (first_solution_found)
						{
							std::cout << std::endl << "first R: " << first_R << std::endl;
							std::cout << "first T: " << first_T << std::endl;
							std::cout << "first n: " << first_n << std::endl;
							for (double ii : first_solution)
							{
								std::cout << ii << " ";
							}
							std::cout << std::endl;
							
						}
						
						// displaying the second solution if it was found
						if (second_solution_found)
						{
							std::cout << std::endl << "second R: " << second_R << std::endl;
							std::cout << "second T: " << second_T << std::endl;
							std::cout << "second n: " << second_n << std::endl;
							for (double ii : second_solution)
							{
								std::cout << ii << " ";
							}
							std::cout << std::endl;
						}
						
						// because the reference is set to the exact value when when n should have only a z componenet, the correct
						// choice should be the one closest to n_ref = [0,0,1]^T which will be the one with the greatest dot product with n_ref
						if (first_solution_found && second_solution_found)
						{
							if (first_n.dot(n_ref) >= second_n.dot(n_ref))
							{
								R_fc = first_R;
								T_fc = first_T;
							}
							else
							{
								R_fc = second_R;
								T_fc = second_T;
							}
							fc_found = true;
						}
						else if(first_solution_found)
						{
							R_fc = first_R;
							T_fc = first_T;
							fc_found = true;
						}
						
						//if a solution was found will publish
						// need to convert to pose message so use
						if (fc_found)
						{
							// converting the rotation from a cv matrix to quaternion, first need it as a matrix3x3
							R_fc_tf[0][0] = R_fc.at<double>(0,0);
							R_fc_tf[0][1] = R_fc.at<double>(0,1);
							R_fc_tf[0][2] = R_fc.at<double>(0,2);
							R_fc_tf[1][0] = R_fc.at<double>(1,0);
							R_fc_tf[1][1] = R_fc.at<double>(1,1);
							R_fc_tf[1][2] = R_fc.at<double>(1,2);
							R_fc_tf[2][0] = R_fc.at<double>(2,0);
							R_fc_tf[2][1] = R_fc.at<double>(2,1);
							R_fc_tf[2][2] = R_fc.at<double>(2,2);
							std::cout << "Final R:\n" << R_fc << std::endl;
							
							// converting the translation to a vector 3
							T_fc_tf.setX(T_fc.at<double>(0,0));
							T_fc_tf.setY(T_fc.at<double>(0,1));
							T_fc_tf.setZ(T_fc.at<double>(0,2));
							std::cout << "Final T :\n" << T_fc << std::endl;
							
							// getting the rotation as a quaternion
							R_fc_tf.getRotation(Q_fc_tf);
							
							std::cout << "current orientation:" << "\n\tx:\t" << Q_fc_tf.getX() 
																<< "\n\ty:\t" << Q_fc_tf.getY() 
																<< "\n\tz:\t" << Q_fc_tf.getZ() 
																<< "\n\tw:\t" << Q_fc_tf.getW() 
																<< std::endl;
				
							std::cout << "norm of quaternion:\t" << Q_fc_tf.length() << std::endl;
							
							// getting the negated version of the quaternion for the check
							Q_fc_tf_negated = tf::Quaternion(-Q_fc_tf.getX(),-Q_fc_tf.getY(),-Q_fc_tf.getZ(),-Q_fc_tf.getW());
							
							std::cout << "negated orientation:" << "\n\tx:\t" << Q_fc_tf_negated.getX() 
																<< "\n\ty:\t" << Q_fc_tf_negated.getY() 
																<< "\n\tz:\t" << Q_fc_tf_negated.getZ() 
																<< "\n\tw:\t" << Q_fc_tf_negated.getW() 
																<< std::endl;
																
							std::cout << "norm of negated quaternion:\t" << Q_fc_tf_negated.length() << std::endl;
							
							// showing the last orientation
							std::cout << "last orientation:" << "\n\tx:\t" << Q_fc_tf_last.getX() 
															 << "\n\ty:\t" << Q_fc_tf_last.getY() 
															 << "\n\tz:\t" << Q_fc_tf_last.getZ() 
															 << "\n\tw:\t" << Q_fc_tf_last.getW() 
															 << std::endl;
																
							std::cout << "norm of last quaternion:\t" << Q_fc_tf_last.length() << std::endl;
							
							// checking if the quaternion has flipped
							Q_norm_current_diff = std::sqrt(std::pow(Q_fc_tf.getX() - Q_fc_tf_last.getX(),2.0)
														  + std::pow(Q_fc_tf.getY() - Q_fc_tf_last.getY(),2.0) 
														  + std::pow(Q_fc_tf.getZ() - Q_fc_tf_last.getZ(),2.0) 
														  + std::pow(Q_fc_tf.getW() - Q_fc_tf_last.getW(),2.0));
							
							std::cout << "current difference:\t" << Q_norm_current_diff << std::endl;
							
							Q_norm_negated_diff = std::sqrt(std::pow(Q_fc_tf_negated.getX() - Q_fc_tf_last.getX(),2.0)
														  + std::pow(Q_fc_tf_negated.getY() - Q_fc_tf_last.getY(),2.0) 
														  + std::pow(Q_fc_tf_negated.getZ() - Q_fc_tf_last.getZ(),2.0) 
														  + std::pow(Q_fc_tf_negated.getW() - Q_fc_tf_last.getW(),2.0));
							
							std::cout << "negated difference:\t" << Q_norm_negated_diff << std::endl;
							
							if (Q_norm_current_diff > Q_norm_negated_diff)
							{
								Q_fc_tf = Q_fc_tf_negated;
							}
							
							// updating the last
							Q_fc_tf_last = Q_fc_tf;
							
							// converting the tf quaternion to a geometry message quaternion
							Q_fc_gm.x = Q_fc_tf.getX();
							Q_fc_gm.y = Q_fc_tf.getY();
							Q_fc_gm.z = Q_fc_tf.getZ();
							Q_fc_gm.w = Q_fc_tf.getW();
							
							// converting the tf vector3 to a point
							P_fc_gm.x = T_fc_tf.getX();
							P_fc_gm.y = T_fc_tf.getY();
							P_fc_gm.z = T_fc_tf.getZ();
							
							// setting the transform with the values
							fc_tf.setOrigin(T_fc_tf);
							fc_tf.setRotation(Q_fc_tf);
							tf_broad.sendTransform(tf::StampedTransform(fc_tf, msg.header.stamp,"f_star","f_current"));
							
							// setting the decomposed message
							pose_fc_gm.position = P_fc_gm;
							pose_fc_gm.orientation = Q_fc_gm;
							decomposed_msg.pose = pose_fc_gm;
							decomposed_msg.header.stamp = msg.header.stamp;
							decomposed_msg.header.frame_id = "current_frame_normalized";
							decomposed_msg.alpha_red = alpha_red;
							decomposed_msg.alpha_green = alpha_green;
							decomposed_msg.alpha_cyan = alpha_cyan;
							decomposed_msg.alpha_purple = alpha_purple;
							homog_decomp_pub.publish(decomposed_msg);
							
							std::cout << "complete message\n" << decomposed_msg << std::endl << std::endl;
							
							// publish the marker
							marker.pose = pose_fc_gm;
							marker_pub.publish(marker);
							
						}
					}
				}

				// erasing all the temporary points
				if (first_solution_found || second_solution_found)
				{
					// erasing all the point vectors and matrix vectors
					curr_points_p.erase(curr_points_p.begin(),curr_points_p.end());
					ref_points_p.erase(ref_points_p.begin(),ref_points_p.end());
					curr_points_m.erase(curr_points_m.begin(),curr_points_m.end());
					ref_points_m.erase(ref_points_m.begin(),ref_points_m.end());
				}
			}
			/********** End splitting up the incoming message *********/
			
		}
Beispiel #14
0
float JPCT_Math::magnitudeSquared(tf::Quaternion q) {
    return q.w() * q.w() + q.x() * q.x() + q.y() * q.y() + q.z() * q.z();
}
Beispiel #15
0
bool Hand_filter::jumped(const tf::Quaternion& q_current,const tf::Quaternion& q_previous,const float threashold) const {
    tf::Vector3 axis = q_current.getAxis();

   // std::cout<< "axis: " << axis.x() << " " << axis.y() << " " << axis.z() << std::endl;
    return false;
}
Beispiel #16
0
int main (int argc, char** argv) {
    ros::init(argc, argv, "tp_serial");
    ros::NodeHandle n;
    ROS_INFO("Serial server is starting");

    ros::Time current_time;

    tf::TransformBroadcaster transform_broadcaster;

    ros::Subscriber ucCommandMsg; // subscript to "cmd_vel" for receive command
    ros::Publisher odom_pub; // publish the odometry
    ros::Publisher path_pub;

    nav_msgs::Odometry move_base_odom;
    nav_msgs::Path pathMsg;

    static tf::Quaternion move_base_quat;

    //Initialize fixed values for odom and tf message
    move_base_odom.header.frame_id = "odom";
    move_base_odom.child_frame_id = "base_robot";

    //Reset all covariance values
    move_base_odom.pose.covariance = boost::assign::list_of(WHEEL_COVARIANCE)(0)(0)(0)(0)(0)
                                     (0)(WHEEL_COVARIANCE)(0)(0)(0)(0)
                                     (0)(0)(999999)(0)(0)(0)
                                     (0)(0)(0)(999999)(0)(0)
                                     (0)(0)(0)(0)(999999)(0)
                                     (0)(0)(0)(0)(0)(WHEEL_COVARIANCE);
    move_base_odom.twist.covariance = boost::assign::list_of(WHEEL_COVARIANCE)(0)(0)(0)(0)(0)
                                      (0)(WHEEL_COVARIANCE)(0)(0)(0)(0)
                                      (0)(0)(999999)(0)(0)(0)
                                      (0)(0)(0)(999999)(0)(0)
                                      (0)(0)(0)(0)(999999)(0)
                                      (0)(0)(0)(0)(0)(WHEEL_COVARIANCE);
    pathMsg.header.frame_id = "odom";
    //----------ROS Odometry publishing------------------

    //----------Initialize serial connection
    ROS_INFO("Serial Initialing:\n Port: %s \n BaudRate: %d", DEFAULT_SERIALPORT, DEFAULT_BAUDRATE);
    int fd = -1;
    struct termios newtio;
    FILE *fpSerial = NULL;
    // read/write, not controlling terminal for process
    fd = open(DEFAULT_SERIALPORT, O_RDWR | O_NOCTTY | O_NDELAY);
    if (fd < 0) {
        ROS_ERROR("Serial Init: Could not open serial device %s", DEFAULT_SERIALPORT);
        return 0;
    }
    //set up new setting
    memset(&newtio, 0, sizeof(newtio));
    newtio.c_cflag = CS8 | CLOCAL | CREAD; //8bit, no parity, 1 stop bit
    newtio.c_iflag |= IGNBRK; //ignore break codition
    newtio.c_oflag = 0;	//all options off
    //newtio.c_lflag = ICANON; //process input as lines
    newtio.c_cc[VTIME] = 0;
    newtio.c_cc[VMIN] = 20; // byte readed per a time
    //active new setting
    tcflush(fd, TCIFLUSH);

    if (cfsetispeed(&newtio, BAUDMACRO) < 0 || cfsetospeed(&newtio, BAUDMACRO)) {
        ROS_ERROR("Serial Init: Failed to set serial BaudRate: %d", DEFAULT_BAUDRATE);
        close(fd);
        return 0;
    }
    ROS_INFO("Connection established with %s at %d.", DEFAULT_SERIALPORT, BAUDMACRO);
    tcsetattr(fd, TCSANOW, &newtio);
    tcflush(fd, TCIOFLUSH);

    // Open file as a standard I/O stream
    fpSerial = fdopen(fd, "r+");
    if (!fpSerial) {
        ROS_ERROR("Serial Init: Failed to open serial stream %s", DEFAULT_SERIALPORT);
        fpSerial = NULL;
    }

    //Creating message to talk with ROS
    //Subscribe to ROS message
    ucCommandMsg = n.subscribe<geometry_msgs::Twist>("cmd_vel", 1, ucCommandCallback);
    //Setup to publish ROS message
    odom_pub = n.advertise<nav_msgs::Odometry>("tp_robot/odom", 10);
    path_pub = n.advertise<nav_msgs::Path>("tp_robot/odomPath", 10);

    // An "adaptive" timer to maintain periodical communication with the MCU
    ros::Rate rate(FPS);
    uint8_t i = FPS;
    while (i--) {
        find_mean = true;
        rate.sleep();
        ros::spinOnce();
    }
    find_mean = false;

    //Loop for input
    while(ros::ok()) {
        //Process the callbacks
        ros::spinOnce();
        //Impose command and get back respone
        int res;
        cmd_frame[0] = 'f';
        //cmd_frame[1] = 'T';
        *(uint16_t *)(cmd_frame + 2) = pkg_id;
        //*(uint16_t *)(cmd_frame + 4) = left_speed; //set left speed
        //*(uint16_t *)(cmd_frame + 6) = right_speed; // set right speed
        res = write(fd, cmd_frame, 8);
        if (res < 8) ROS_ERROR("Error sending command");
        current_time = ros::Time::now();
        //sleep to wait the response is returned
        rate.sleep();

        //Read a number of RCV_LENGHT chars as if they have arrived
        res = read(fd, rsp_frame, RCV_LENGTH);
        if (res < RCV_LENGTH) {
            //ROS_INFO("frame %d dropped", pkg_id);
            while(read(fd, rsp_frame, 1) > 0);
        }
        else {
            uint16_t rcv_pkg_id = *(uint16_t *)(rsp_frame +2);
            if(rcv_pkg_id == pkg_id) {
                //read the commands to display
                cmd_field[0] = rsp_frame[0];
                cmd_field[1] = rsp_frame[1];
                cmd_field[2] = 0;

                //Time sent from RTOS to check if the moving base has been reset
                uint8_t reset_robot = *(uint8_t *)(rsp_frame + 16);
                //Calculate the x,y,z,v,w odometry data
                double left_speed = *(int16_t *)(rsp_frame + 4)*M_PI/98500;
                double right_speed = *(int16_t *)(rsp_frame + 6)*M_PI/98500;

                //Get the newly traveled distance on each wheel
                if (rcv_pkg_id == 0 || reset_robot == 1 ) {
                    left_path_offset = *(int32_t *)(rsp_frame +8)*M_PI/98500;
                    right_path_offset = *(int32_t *)(rsp_frame + 12)*M_PI/98500;
                    left_path = 0;
                    right_path = 0;
                }
                else {
                    left_path = *(int32_t *)(rsp_frame +8)*M_PI/98500 - left_path_offset;
                    right_path = *(int32_t *)(rsp_frame + 12)*M_PI/98500 - right_path_offset;
                }

                //Calculate and publish the odometry data
                double left_delta = left_path - left_path1;
                double right_delta = right_path - right_path1;

                //only if the robot has traveled a significant distance will be calculate the odometry
                if(!((left_delta < 0.0075 && left_delta > -0.0075) && (right_delta < 0.0075 && right_delta > -0.0075))) {
                    //Calculate the pose in reference to world base
                    theta += (right_delta - left_delta)/0.3559;

                    //translate theta to [-pi; pi]
                    if(theta > (2*M_PI)) theta -= 2*M_PI;
                    if (theta < 0) theta += 2*M_PI;

                    //Calculate displacement
                    double disp = (right_delta + left_delta)/2;
                    x += disp*cos(theta);
                    y += disp*sin(theta);

                    //Calculate the twist in reference to the robot base
                    linear_x = (right_speed + left_speed)/2;
                    angular_z = (right_speed - left_speed)/0.362;

                    left_path1 = left_path;
                    right_path1 = right_path;

                }

                // Infer the rotation angle and publish transform
                move_base_quat = tf::Quaternion(tf::Vector3(0, 0, 1), theta);
                transform_broadcaster.sendTransform(tf::StampedTransform(
                                                        tf::Transform(move_base_quat, tf::Vector3(x,y,0)),
                                                        current_time, "odom", "base_robot"));

                //Publish the odometry message over ROS
                move_base_odom.header.stamp = current_time;
                //move_base_odom.header.stamp = ros::Time::now();
                //set the position
                move_base_odom.pose.pose.position.x = x;
                move_base_odom.pose.pose.position.y = y;
                move_base_odom.pose.pose.position.z = 0;
                move_base_odom.pose.pose.orientation.x = move_base_quat.x();
                move_base_odom.pose.pose.orientation.y = move_base_quat.y();
                move_base_odom.pose.pose.orientation.z = move_base_quat.z();
                move_base_odom.pose.pose.orientation.w = move_base_quat.w();

                //Set the velocity
                move_base_odom.child_frame_id = "base_robot";
                move_base_odom.twist.twist.linear.x = linear_x;
                move_base_odom.twist.twist.linear.y = 0;
                move_base_odom.twist.twist.angular.z = angular_z;

                //Push back the pose to path message
                pathMsg.header = move_base_odom.header;
                geometry_msgs::PoseStamped pose_stamped;
                pose_stamped.header = move_base_odom.header;
                pose_stamped.pose = move_base_odom.pose.pose;
                pathMsg.poses.push_back(pose_stamped);
                //publish the odom and path message
                odom_pub.publish(move_base_odom);
                path_pub.publish(pathMsg);

                //ROS_INFO("MCU responded: %s\t%d\t%f\t%f\t%f\t%f\t%d", cmd_field,\
                rcv_pkg_id,\
                left_speed,\
                right_speed,\
                left_path,\
                right_path,\
                reset_robot);

            }
            else {
                ROS_INFO("frame %d corrupted !", pkg_id);
                while(read(fd, rsp_frame, 1) > 0);
            }
        }
bool
ArucoMapping::processImage(cv::Mat input_image,cv::Mat output_image)
{
  aruco::MarkerDetector Detector;
  std::vector<aruco::Marker> temp_markers;

  //Set visibility flag to false for all markers
  for(size_t i = 0; i < num_of_markers_; i++)
      markers_[i].visible = false;

  // Save previous marker count
  marker_counter_previous_ = marker_counter_;

  // Detect markers
  Detector.detect(input_image,temp_markers,aruco_calib_params_,marker_size_);
    
  // If no marker found, print statement
  if(temp_markers.size() == 0)
    ROS_DEBUG("No marker found!");

  //------------------------------------------------------
  // FIRST MARKER DETECTED
  //------------------------------------------------------
  if((temp_markers.size() > 0) && (first_marker_detected_ == false))
  {
    //Set flag
    first_marker_detected_=true;

    // Detect lowest marker ID
    lowest_marker_id_ = temp_markers[0].id;
    for(size_t i = 0; i < temp_markers.size();i++)
    {
      if(temp_markers[i].id < lowest_marker_id_)
        lowest_marker_id_ = temp_markers[i].id;
    }


    ROS_DEBUG_STREAM("The lowest Id marker " << lowest_marker_id_ );

    // Identify lowest marker ID with world's origin
    markers_[0].marker_id = lowest_marker_id_;

    markers_[0].geometry_msg_to_world.position.x = 0;
    markers_[0].geometry_msg_to_world.position.y = 0;
    markers_[0].geometry_msg_to_world.position.z = 0;

    markers_[0].geometry_msg_to_world.orientation.x = 0;
    markers_[0].geometry_msg_to_world.orientation.y = 0;
    markers_[0].geometry_msg_to_world.orientation.z = 0;
    markers_[0].geometry_msg_to_world.orientation.w = 1;

    // Relative position and Global position
    markers_[0].geometry_msg_to_previous.position.x = 0;
    markers_[0].geometry_msg_to_previous.position.y = 0;
    markers_[0].geometry_msg_to_previous.position.z = 0;

    markers_[0].geometry_msg_to_previous.orientation.x = 0;
    markers_[0].geometry_msg_to_previous.orientation.y = 0;
    markers_[0].geometry_msg_to_previous.orientation.z = 0;
    markers_[0].geometry_msg_to_previous.orientation.w = 1;

    // Transformation Pose to TF
    tf::Vector3 position;
    position.setX(0);
    position.setY(0);
    position.setZ(0);

    tf::Quaternion rotation;
    rotation.setX(0);
    rotation.setY(0);
    rotation.setZ(0);
    rotation.setW(1);

    markers_[0].tf_to_previous.setOrigin(position);
    markers_[0].tf_to_previous.setRotation(rotation);

    // Relative position of first marker equals Global position
    markers_[0].tf_to_world=markers_[0].tf_to_previous;

    // Increase count
    marker_counter_++;

    // Set sign of visibility of first marker
    markers_[0].visible=true;

    ROS_INFO_STREAM("First marker with ID: " << markers_[0].marker_id << " detected");

    //First marker does not have any previous marker
    markers_[0].previous_marker_id = THIS_IS_FIRST_MARKER;
  }

  //------------------------------------------------------
  // FOR EVERY MARKER DO
  //------------------------------------------------------
  for(size_t i = 0; i < temp_markers.size();i++)
  {
    int index;
    int current_marker_id = temp_markers[i].id;

    //Draw marker convex, ID, cube and axis
    temp_markers[i].draw(output_image, cv::Scalar(0,0,255),2);
    aruco::CvDrawingUtils::draw3dCube(output_image,temp_markers[i], aruco_calib_params_);
    aruco::CvDrawingUtils::draw3dAxis(output_image,temp_markers[i], aruco_calib_params_);

    // Existing marker ?
    bool existing = false;
    int temp_counter = 0;

    while((existing == false) && (temp_counter < marker_counter_))
    {
      if(markers_[temp_counter].marker_id == current_marker_id)
      {
        index = temp_counter;
        existing = true;
        ROS_DEBUG_STREAM("Existing marker with ID: " << current_marker_id << "found");
      }
        temp_counter++;
    }

    //New marker ?
    if(existing == false)
    {
      index = marker_counter_;
      markers_[index].marker_id = current_marker_id;
      existing = true;
      ROS_DEBUG_STREAM("New marker with ID: " << current_marker_id << " found");
    }

    // Change visibility flag of new marker
    for(size_t j = 0;j < marker_counter_; j++)
    {
      for(size_t k = 0;k < temp_markers.size(); k++)
      {
        if(markers_[j].marker_id == temp_markers[k].id)
          markers_[j].visible = true;
      }
    }

    //------------------------------------------------------
    // For existing marker do
    //------------------------------------------------------
    if((index < marker_counter_) && (first_marker_detected_ == true))
    {
      markers_[index].current_camera_tf = arucoMarker2Tf(temp_markers[i]);
      markers_[index].current_camera_tf = markers_[index].current_camera_tf.inverse();

      const tf::Vector3 marker_origin = markers_[index].current_camera_tf.getOrigin();
      markers_[index].current_camera_pose.position.x = marker_origin.getX();
      markers_[index].current_camera_pose.position.y = marker_origin.getY();
      markers_[index].current_camera_pose.position.z = marker_origin.getZ();

      const tf::Quaternion marker_quaternion = markers_[index].current_camera_tf.getRotation();
      markers_[index].current_camera_pose.orientation.x = marker_quaternion.getX();
      markers_[index].current_camera_pose.orientation.y = marker_quaternion.getY();
      markers_[index].current_camera_pose.orientation.z = marker_quaternion.getZ();
      markers_[index].current_camera_pose.orientation.w = marker_quaternion.getW();
    }

    //------------------------------------------------------
    // For new marker do
    //------------------------------------------------------
    if((index == marker_counter_) && (first_marker_detected_ == true))
    {
      markers_[index].current_camera_tf=arucoMarker2Tf(temp_markers[i]);

      tf::Vector3 marker_origin = markers_[index].current_camera_tf.getOrigin();
      markers_[index].current_camera_pose.position.x = marker_origin.getX();
      markers_[index].current_camera_pose.position.y = marker_origin.getY();
      markers_[index].current_camera_pose.position.z = marker_origin.getZ();

      tf::Quaternion marker_quaternion = markers_[index].current_camera_tf.getRotation();
      markers_[index].current_camera_pose.orientation.x = marker_quaternion.getX();
      markers_[index].current_camera_pose.orientation.y = marker_quaternion.getY();
      markers_[index].current_camera_pose.orientation.z = marker_quaternion.getZ();
      markers_[index].current_camera_pose.orientation.w = marker_quaternion.getW();

      // Naming - TFs
      std::stringstream camera_tf_id;
      std::stringstream camera_tf_id_old;
      std::stringstream marker_tf_id_old;

      camera_tf_id << "camera_" << index;

      // Flag to keep info if any_known marker_visible in actual image
      bool any_known_marker_visible = false;

      // Array ID of markers, which position of new marker is calculated
      int last_marker_id;

      // Testing, if is possible calculate position of a new marker to old known marker
      for(int k = 0; k < index; k++)
      {
        if((markers_[k].visible == true) && (any_known_marker_visible == false))
        {
          if(markers_[k].previous_marker_id != -1)
          {
            any_known_marker_visible = true;
            camera_tf_id_old << "camera_" << k;
            marker_tf_id_old << "marker_" << k;
            markers_[index].previous_marker_id = k;
            last_marker_id = k;
           }
         }
       }

     // New position can be calculated
     if(any_known_marker_visible == true)
     {
       // Generating TFs for listener
       for(char k = 0; k < 10; k++)
       {
         // TF from old marker and its camera
         broadcaster_.sendTransform(tf::StampedTransform(markers_[last_marker_id].current_camera_tf,ros::Time::now(),
                                                         marker_tf_id_old.str(),camera_tf_id_old.str()));

         // TF from old camera to new camera
         broadcaster_.sendTransform(tf::StampedTransform(markers_[index].current_camera_tf,ros::Time::now(),
                                                         camera_tf_id_old.str(),camera_tf_id.str()));

         ros::Duration(BROADCAST_WAIT_INTERVAL).sleep();
       }

        // Calculate TF between two markers
        listener_->waitForTransform(marker_tf_id_old.str(),camera_tf_id.str(),ros::Time(0),
                                    ros::Duration(WAIT_FOR_TRANSFORM_INTERVAL));
        try
        {
          broadcaster_.sendTransform(tf::StampedTransform(markers_[last_marker_id].current_camera_tf,ros::Time::now(),
                                                          marker_tf_id_old.str(),camera_tf_id_old.str()));

          broadcaster_.sendTransform(tf::StampedTransform(markers_[index].current_camera_tf,ros::Time::now(),
                                                          camera_tf_id_old.str(),camera_tf_id.str()));

          listener_->lookupTransform(marker_tf_id_old.str(),camera_tf_id.str(),ros::Time(0),
                                     markers_[index].tf_to_previous);
        }
        catch(tf::TransformException &e)
        {
          ROS_ERROR("Not able to lookup transform");
        }

        // Save origin and quaternion of calculated TF
        marker_origin = markers_[index].tf_to_previous.getOrigin();
        marker_quaternion = markers_[index].tf_to_previous.getRotation();

        // If plane type selected roll, pitch and Z axis are zero
        if(space_type_ == "plane")
        {
          double roll, pitch, yaw;
          tf::Matrix3x3(marker_quaternion).getRPY(roll,pitch,yaw);
          roll = 0;
          pitch = 0;
          marker_origin.setZ(0);
          marker_quaternion.setRPY(pitch,roll,yaw);
        }

        markers_[index].tf_to_previous.setRotation(marker_quaternion);
        markers_[index].tf_to_previous.setOrigin(marker_origin);

        marker_origin = markers_[index].tf_to_previous.getOrigin();
        markers_[index].geometry_msg_to_previous.position.x = marker_origin.getX();
        markers_[index].geometry_msg_to_previous.position.y = marker_origin.getY();
        markers_[index].geometry_msg_to_previous.position.z = marker_origin.getZ();

        marker_quaternion = markers_[index].tf_to_previous.getRotation();
        markers_[index].geometry_msg_to_previous.orientation.x = marker_quaternion.getX();
        markers_[index].geometry_msg_to_previous.orientation.y = marker_quaternion.getY();
        markers_[index].geometry_msg_to_previous.orientation.z = marker_quaternion.getZ();
        markers_[index].geometry_msg_to_previous.orientation.w = marker_quaternion.getW();

        // increase marker count
        marker_counter_++;

        // Invert and position of new marker to compute camera pose above it
        markers_[index].current_camera_tf = markers_[index].current_camera_tf.inverse();

        marker_origin = markers_[index].current_camera_tf.getOrigin();
        markers_[index].current_camera_pose.position.x = marker_origin.getX();
        markers_[index].current_camera_pose.position.y = marker_origin.getY();
        markers_[index].current_camera_pose.position.z = marker_origin.getZ();

        marker_quaternion = markers_[index].current_camera_tf.getRotation();
        markers_[index].current_camera_pose.orientation.x = marker_quaternion.getX();
        markers_[index].current_camera_pose.orientation.y = marker_quaternion.getY();
        markers_[index].current_camera_pose.orientation.z = marker_quaternion.getZ();
        markers_[index].current_camera_pose.orientation.w = marker_quaternion.getW();

        // Publish all TFs and markers
        publishTfs(false);
      }
    }

    //------------------------------------------------------
    // Compute global position of new marker
    //------------------------------------------------------
    if((marker_counter_previous_ < marker_counter_) && (first_marker_detected_ == true))
    {
      // Publish all TF five times for listener
      for(char k = 0; k < 5; k++)
        publishTfs(false);

      std::stringstream marker_tf_name;
      marker_tf_name << "marker_" << index;

      listener_->waitForTransform("world",marker_tf_name.str(),ros::Time(0),
                                  ros::Duration(WAIT_FOR_TRANSFORM_INTERVAL));
      try
      {
        listener_->lookupTransform("world",marker_tf_name.str(),ros::Time(0),
                                   markers_[index].tf_to_world);
      }
      catch(tf::TransformException &e)
      {
        ROS_ERROR("Not able to lookup transform");
      }

      // Saving TF to Pose
      const tf::Vector3 marker_origin = markers_[index].tf_to_world.getOrigin();
      markers_[index].geometry_msg_to_world.position.x = marker_origin.getX();
      markers_[index].geometry_msg_to_world.position.y = marker_origin.getY();
      markers_[index].geometry_msg_to_world.position.z = marker_origin.getZ();

      tf::Quaternion marker_quaternion=markers_[index].tf_to_world.getRotation();
      markers_[index].geometry_msg_to_world.orientation.x = marker_quaternion.getX();
      markers_[index].geometry_msg_to_world.orientation.y = marker_quaternion.getY();
      markers_[index].geometry_msg_to_world.orientation.z = marker_quaternion.getZ();
      markers_[index].geometry_msg_to_world.orientation.w = marker_quaternion.getW();
    }
  }

  //------------------------------------------------------
  // Compute which of visible markers is the closest to the camera
  //------------------------------------------------------
  bool any_markers_visible=false;
  int num_of_visible_markers=0;

  if(first_marker_detected_ == true)
  {
    double minimal_distance = INIT_MIN_SIZE_VALUE;
    for(int k = 0; k < num_of_markers_; k++)
    {
      double a,b,c,size;

      // If marker is visible, distance is calculated
      if(markers_[k].visible==true)
      {
        a = markers_[k].current_camera_pose.position.x;
        b = markers_[k].current_camera_pose.position.y;
        c = markers_[k].current_camera_pose.position.z;
        size = std::sqrt((a * a) + (b * b) + (c * c));
        if(size < minimal_distance)
        {
          minimal_distance = size;
          closest_camera_index_ = k;
        }

        any_markers_visible = true;
        num_of_visible_markers++;
      }
    }
  }

  //------------------------------------------------------
  // Publish all known markers
  //------------------------------------------------------
  if(first_marker_detected_ == true)
    publishTfs(true);

  //------------------------------------------------------
  // Compute global camera pose
  //------------------------------------------------------
  if((first_marker_detected_ == true) && (any_markers_visible == true))
  {
    std::stringstream closest_camera_tf_name;
    closest_camera_tf_name << "camera_" << closest_camera_index_;

    listener_->waitForTransform("world",closest_camera_tf_name.str(),ros::Time(0),
                                ros::Duration(WAIT_FOR_TRANSFORM_INTERVAL));
    try
    {
      listener_->lookupTransform("world",closest_camera_tf_name.str(),ros::Time(0),
                                 world_position_transform_);
    }
    catch(tf::TransformException &ex)
    {
      ROS_ERROR("Not able to lookup transform");
    }

    // Saving TF to Pose
    const tf::Vector3 marker_origin = world_position_transform_.getOrigin();
    world_position_geometry_msg_.position.x = marker_origin.getX();
    world_position_geometry_msg_.position.y = marker_origin.getY();
    world_position_geometry_msg_.position.z = marker_origin.getZ();

    tf::Quaternion marker_quaternion = world_position_transform_.getRotation();
    world_position_geometry_msg_.orientation.x = marker_quaternion.getX();
    world_position_geometry_msg_.orientation.y = marker_quaternion.getY();
    world_position_geometry_msg_.orientation.z = marker_quaternion.getZ();
    world_position_geometry_msg_.orientation.w = marker_quaternion.getW();
  }

  //------------------------------------------------------
  // Publish all known markers
  //------------------------------------------------------
  if(first_marker_detected_ == true)
    publishTfs(true);

  //------------------------------------------------------
  // Publish custom marker message
  //------------------------------------------------------
  aruco_mapping::ArucoMarker marker_msg;

  if((any_markers_visible == true))
  {
    marker_msg.header.stamp = ros::Time::now();
    marker_msg.header.frame_id = "world";
    marker_msg.marker_visibile = true;
    marker_msg.num_of_visible_markers = num_of_visible_markers;
    marker_msg.global_camera_pose = world_position_geometry_msg_;
    marker_msg.marker_ids.clear();
    marker_msg.global_marker_poses.clear();
    for(size_t j = 0; j < marker_counter_; j++)
    {
      if(markers_[j].visible == true)
      {
        marker_msg.marker_ids.push_back(markers_[j].marker_id);
        marker_msg.global_marker_poses.push_back(markers_[j].geometry_msg_to_world);       
      }
    }
  }
  else
  {
    marker_msg.header.stamp = ros::Time::now();
    marker_msg.header.frame_id = "world";
    marker_msg.num_of_visible_markers = num_of_visible_markers;
    marker_msg.marker_visibile = false;
    marker_msg.marker_ids.clear();
    marker_msg.global_marker_poses.clear();
  }

  // Publish custom marker msg
  marker_msg_pub_.publish(marker_msg);

  return true;
}
	void MocapKalman::spinOnce( )
	{
		const static double dt = (double)r.expectedCycleTime( ).nsec / 1000000000 + r.expectedCycleTime( ).sec;

		static double K[36] = { 0 };
		static double F[36] = { 0 };
		static geometry_msgs::PoseWithCovariance residual;
		static geometry_msgs::Vector3 rpy;
		static tf::Quaternion curr_quat;
		static tf::StampedTransform tr;

		// Get the transform
		try
		{
			li.lookupTransform( frame_base, frame_id, ros::Time(0), tr );
		}
		catch( tf::TransformException ex )
		{
			ROS_INFO( "Missed a transform...chances are that we are still OK" );
			return;
		}
		if( tr.getOrigin( ).x( ) != tr.getOrigin( ).x( ) )
		{
			ROS_WARN( "NaN DETECTED" );
			return;
		}

		nav_msgs::OdometryPtr odom_msg( new nav_msgs::Odometry );

		odom_msg->header.frame_id = frame_base;
		odom_msg->header.stamp = ros::Time::now( );
		odom_msg->child_frame_id = frame_id;

		// Get the RPY from this iteration
		curr_quat = tr.getRotation( );
		tf::Matrix3x3( ( curr_quat * last_quat.inverse( ) ).normalize( ) ).getRPY(rpy.x, rpy.y, rpy.z);

		// Step 1:
		// x = F*x
		// We construct F based on the acceleration previously observed
		// We will assume that dt hasn't changed much since the last calculation
		F[0] = ( xdot.twist.linear.x < 0.001 ) ? 1 : ( xdot.twist.linear.x + last_delta_twist.twist.linear.x ) / xdot.twist.linear.x;
		F[7] = ( xdot.twist.linear.y < 0.001 ) ? 1 : ( xdot.twist.linear.y + last_delta_twist.twist.linear.y ) / xdot.twist.linear.y;
		F[14] = ( xdot.twist.linear.z < 0.001 ) ? 1 : ( xdot.twist.linear.z + last_delta_twist.twist.linear.z ) / xdot.twist.linear.z;
		F[21] = ( xdot.twist.angular.x < 0.001 ) ? 1 : ( xdot.twist.angular.x + last_delta_twist.twist.angular.x ) / xdot.twist.angular.x;
		F[28] = ( xdot.twist.angular.y < 0.001 ) ? 1 : ( xdot.twist.angular.y + last_delta_twist.twist.angular.y ) / xdot.twist.angular.y;
		F[35] = ( xdot.twist.angular.z < 0.001 ) ? 1 : ( xdot.twist.angular.z + last_delta_twist.twist.angular.z ) / xdot.twist.angular.z;

		// Step 2:
		// P = F*P*F' + Q
		// Since F is only populated on the diagonal, F=F'
		xdot.covariance[0] += linear_process_variance;
		xdot.covariance[7] += linear_process_variance;
		xdot.covariance[14] += linear_process_variance;
		xdot.covariance[21] += angular_process_variance;
		xdot.covariance[28] += angular_process_variance;
		xdot.covariance[35] += angular_process_variance;

		// Step 3:
		// y = z - H*x
		// Because x estimates z directly, H = I
		residual.pose.position.x = ( tr.getOrigin( ).x( ) - last_transform.getOrigin( ).x( ) ) / dt - xdot.twist.linear.x;
		residual.pose.position.y = ( tr.getOrigin( ).y( ) - last_transform.getOrigin( ).y( ) ) / dt - xdot.twist.linear.y;
		residual.pose.position.z = ( tr.getOrigin( ).z( ) - last_transform.getOrigin( ).z( ) ) / dt - xdot.twist.linear.z;
		// HACK for some odd discontinuity in the rotations...
		//if( rpy.x > .4 || rpy.y > .4 || rpy.z > .4 || rpy.x < -.4 || rpy.y < -.4 || rpy.z < -.4 )
		//	rpy.x = rpy.y = rpy.z = 0;
		residual.pose.orientation.x = rpy.x / dt - xdot.twist.angular.x;
		residual.pose.orientation.y = rpy.y / dt - xdot.twist.angular.y;
		residual.pose.orientation.z = rpy.z / dt - xdot.twist.angular.z;

		// Step 4:
		// S = H*P*H' + R
		// Again, since H = I, S is simply P + R
		residual.covariance[0] = xdot.covariance[0] + linear_observation_variance;
		residual.covariance[7] = xdot.covariance[7] + linear_observation_variance;
		residual.covariance[14] = xdot.covariance[14] + linear_observation_variance;
		residual.covariance[21] = xdot.covariance[21] + angular_observation_variance;
		residual.covariance[28] = xdot.covariance[28] + angular_observation_variance;
		residual.covariance[35] = xdot.covariance[35] + angular_observation_variance;

		// Step 5:
		// K = P*H'*S^(-1)
		// Again, since H = I, and since S is only populated along the diagonal,
		// we can invert each element along the diagonal
		K[0] = xdot.covariance[0] / residual.covariance[0];
		K[7] = xdot.covariance[7] / residual.covariance[7];
		K[14] = xdot.covariance[14] / residual.covariance[14];
		K[21] = xdot.covariance[21] / residual.covariance[21];
		K[28] = xdot.covariance[28] / residual.covariance[28];
		K[35] = xdot.covariance[35] / residual.covariance[35];

		// Step 6:
		// x = x + K*y
		xdot.twist.linear.x += K[0] * residual.pose.position.x;
		xdot.twist.linear.y += K[7] * residual.pose.position.y;
		xdot.twist.linear.z += K[14] * residual.pose.position.z;
		xdot.twist.angular.x += K[21] * residual.pose.orientation.x;
		xdot.twist.angular.y += K[28] * residual.pose.orientation.y;
		xdot.twist.angular.z += K[35] * residual.pose.orientation.z;

		// Step 7:
		// P = (I - K*H)*P
		// H is still I, so (I-K) * P
		xdot.covariance[0] *= -K[0];
		xdot.covariance[7] *= -K[7];
		xdot.covariance[14] *= -K[14];
		xdot.covariance[21] *= -K[21];
		xdot.covariance[28] *= -K[28];
		xdot.covariance[35] *= -K[35];

		// Populate Message
		odom_msg->pose.pose.position.x = tr.getOrigin( ).x( );
		odom_msg->pose.pose.position.y = tr.getOrigin( ).y( );
		odom_msg->pose.pose.position.z= tr.getOrigin( ).z( );
		tf::quaternionTFToMsg( tr.getRotation( ), odom_msg->pose.pose.orientation );

		odom_msg->twist = xdot;

		if( local_frame )
		{
			// Rotate velocity vector to the local frame
			tf::Vector3 tmp;
			tf::Vector3 tmp2;
			tf::vector3MsgToTF( odom_msg->twist.twist.linear, tmp );
			tf::vector3MsgToTF( odom_msg->twist.twist.angular, tmp2 );
			tf::vector3TFToMsg( tf::quatRotate( curr_quat.inverse( ), tmp ), odom_msg->twist.twist.linear );
			tf::vector3TFToMsg( tf::quatRotate( curr_quat.inverse( ), tmp2 ), odom_msg->twist.twist.angular );
		}

		// Done, Publish
		odom_pub.publish( odom_msg );

		// Record when this message was for next time
		last_delta_twist.twist.linear.x = odom_msg->twist.twist.linear.x - last_twist.twist.linear.x;
		last_delta_twist.twist.linear.y = odom_msg->twist.twist.linear.y - last_twist.twist.linear.y;
		last_delta_twist.twist.linear.z = odom_msg->twist.twist.linear.z - last_twist.twist.linear.z;
		last_delta_twist.twist.angular.x = odom_msg->twist.twist.angular.x - last_twist.twist.angular.x;
		last_delta_twist.twist.angular.y = odom_msg->twist.twist.angular.y - last_twist.twist.angular.y;
		last_delta_twist.twist.angular.z = odom_msg->twist.twist.angular.z - last_twist.twist.angular.z;
		last_transform = tr;
		last_twist = odom_msg->twist;
		last_quat = curr_quat;
	};