Пример #1
0
void Scan::scanCallBack(const sensor_msgs::LaserScan::ConstPtr& scan)
{
	/* use the beam facing downwards to calculate the average height */
	float sum = 0;												/* sum of available distances detected */
	int count = 0;												/* number of available distances detected */
	int scan_angle = 30;										/* angle used for calculation in deg */
	float intensities = 0;

	float pitch = asin(2 * (q.q0 * q.q2 - q.q3 * q.q1));

	/* only take the values within the set range */
	for (int i = 90 - scan_angle/2 - RAD2DEG(pitch); i < 90 + scan_angle/2 - RAD2DEG(pitch); i++)
	{
		/* only take the values between the minimum and maximum value of the laser rangefinder */
		if (scan->ranges[i]>scan->range_min && scan->ranges[i]<scan->range_max)
		{
			float angle = DEG2RAD(i);

			/* convert the distances from the laser rangefinder into coordinates in the body frame */
			Quaternion p_body;
			p_body.q0 = 0;
			p_body.q1 = offset[0] + scan->ranges[i] * cos(angle);
			p_body.q2 = offset[1];
			p_body.q3 = offset[2] - scan->ranges[i] * sin(angle);

			/* convert the coordinates in body frame into ground frame */
			Quaternion p_ground;
			p_ground = q_mult(q_mult(q, p_body), q_inv(q));

			/* the distance to the crop is z in the ground frame */
			sum += p_ground.q3;
			count++;

			//intensities += scan->intensities[i];
		}		
	}

	/* publish the distance to the crop in Distance */
	std_msgs::Float32 Distance;
	Distance.data = sum / (count);
	if (Distance.data == Distance.data)
	{}
	else
	{
		Distance.data = -6.0f;
	}
	CropDistance.publish(Distance);
	ROS_INFO("\npitch:\t%f \ndist:\t%f \n",RAD2DEG(pitch), Distance.data);
}
Пример #2
0
void vrpn_Poser::set_pose_relative( const timeval t, const vrpn_float64 position_delta[3], 
								   const vrpn_float64 quaternion[4] )
{
    // Update the time
    p_timestamp.tv_sec = t.tv_sec;
    p_timestamp.tv_usec = t.tv_usec;

    // Update the position and quaternion
	p_pos[0] += position_delta[0];
	p_pos[1] += position_delta[1];
	p_pos[2] += position_delta[2];
	q_mult( p_quat, quaternion, p_quat );
}
Пример #3
0
int vrpn_Poser_Server::handle_relative_change_message(void* userdata,
	    vrpn_HANDLERPARAM p)
{
    vrpn_Poser_Server* me = (vrpn_Poser_Server *)userdata;
    const char* params = (p.buffer);
    int	i;

    // Fill in the parameters to the poser from the message
    if (p.payload_len != (7 * sizeof(vrpn_float64)) ) {
	    fprintf(stderr,"vrpn_Poser_Server: change message payload error\n");
	    fprintf(stderr,"             (got %d, expected %d)\n",
		    p.payload_len, 7 * sizeof(vrpn_float64) );
	    return -1;
    }
    me->p_timestamp = p.msg_time;

	vrpn_float64 dp[3], dq[4];
    for (i = 0; i < 3; i++) {
	    vrpn_unbuffer(&params, &(dp[i]));
    }
    for (i = 0; i < 4; i++) {
	    vrpn_unbuffer(&params, &(dq[i]));
    }

	// apply the requested changes
	for( i = 0; i <= 2; i++ )
		me->p_pos[i] += dp[i];
	q_mult( me->p_quat, dq, me->p_quat );

    // Check the pose against the max and min values of the workspace
    for (i = 0; i < 3; i++) 
	{
        if (me->p_pos[i] < me->p_pos_min[i]) {
            me->p_pos[i] = me->p_pos_min[i];
        }
        else if (me->p_pos[i] > me->p_pos_max[i]) {
            me->p_pos[i] = me->p_pos_max[i];
        }
    }

    ///Now pack the information in a way that user-routine will understand
    vrpn_POSERCB cp;
    cp.msg_time = me->p_timestamp;
    memcpy( cp.pos, dp, sizeof(cp.pos) );
    memcpy( cp.quat, dq, sizeof(cp.quat) );
	// Go down the list of callbacks that have been registered.
	me->d_relative_callback_list.call_handlers(cp);

    return 0;
}
Пример #4
0
void vrpn_Tracker_DeadReckoning_Rotation::handle_tracker_report(void *userdata,
    const vrpn_TRACKERCB info)
{
    // Find the pointer to the class that registered the callback and get a
    // reference to the RotationState we're supposed to be using.
    vrpn_Tracker_DeadReckoning_Rotation *me =
        static_cast<vrpn_Tracker_DeadReckoning_Rotation *>(userdata);
    if (info.sensor >= me->d_numSensors) {
        me->send_text_message(vrpn_TEXT_WARNING)
            << "Received tracker message from sensor " << info.sensor
            << " but I only have " << me->d_numSensors
            << "sensors.  Discarding.";
        return;
    }
    vrpn_Tracker_DeadReckoning_Rotation::RotationState &state =
        me->d_rotationStates[info.sensor];

    if (!state.d_receivedAngularVelocityReport && me->d_estimateVelocity) {
        // If we have not received any velocity reports, then we estimate
        // the angular velocity using the last report (if any).  The new
        // combined rotation T3 = T2 * T1, where T2 is the difference in
        // rotation between the last time (T1) and now (T3).  We want to
        // solve for T2 (so we can keep applying it going forward).  We
        // find it by right-multiuplying both sides of the equation by
        // T1i (inverse of T1): T3 * T1i = T2.
        if (state.d_lastReportTime.tv_sec != 0) {
            q_type inverted;
            q_invert(inverted, state.d_lastOrientation);
            q_mult(state.d_rotationAmount, info.quat, inverted);
            state.d_rotationInterval = vrpn_TimevalDurationSeconds(
                info.msg_time, state.d_lastReportTime);
            // If we get a zero or negative rotation interval, we're
            // not going to be able to handle it, so we set things back
            // to no rotation over a unit-time interval.
            if (state.d_rotationInterval < 0) {
                state.d_rotationInterval = 1;
                q_make(state.d_rotationAmount, 0, 0, 0, 1);
            }
        }
    }

    // Keep track of the position, orientation and time for the next report
    q_vec_copy(state.d_lastPosition, info.pos);
    q_copy(state.d_lastOrientation, info.quat);
    state.d_lastReportTime = info.msg_time;

    // We have new data, so we send a new prediction.
    me->sendNewPrediction(info.sensor);
}
Пример #5
0
void vrpn_Poser::set_pose_velocity_relative( const timeval t, const vrpn_float64 velocity_delta[3], 
											 const vrpn_float64 quaternion[4], const vrpn_float64 interval_delta )
{
    // Update the time
    p_timestamp.tv_sec = t.tv_sec;
    p_timestamp.tv_usec = t.tv_usec;

    // Update the position and quaternion
	p_vel[0] += velocity_delta[0];
	p_vel[1] += velocity_delta[1];
	p_vel[2] += velocity_delta[2];
	q_mult( p_vel_quat, quaternion, p_vel_quat );

    // Update the interval
    p_vel_quat_dt += interval_delta;
}
Пример #6
0
int vrpn_Poser_Server::handle_relative_vel_change_message(void* userdata,
	    vrpn_HANDLERPARAM p)
{
	vrpn_Poser_Server* me = (vrpn_Poser_Server*)userdata;
	const char* params = (p.buffer);
	int	i;

	// Fill in the parameters to the poser from the message
	if (p.payload_len != (8 * sizeof(vrpn_float64)) ) {
		fprintf(stderr,"vrpn_Poser_Server: velocity message payload error\n");
		fprintf(stderr,"             (got %d, expected %d)\n",
			p.payload_len, 8 * sizeof(vrpn_float64) );
		return -1;
	}
	me->p_timestamp = p.msg_time;

	vrpn_float64 dv[3], dq[4], di;
	for (i = 0; i < 3; i++) {
	 	vrpn_unbuffer( &params, &(dv[i]) );
	}
	for (i = 0; i < 4; i++) {
		vrpn_unbuffer( &params, &(dq[i]) );
	}
    vrpn_unbuffer(&params, &di);
	
	// apply the requested changes
	for( i = 0; i < 2; i++ )
		me->p_vel[i] += dv[i];
	q_mult( me->p_quat, dq, me->p_quat );
	me->p_vel_quat_dt += di;

    // Check the velocity against the max and min values of the workspace
    for (i = 0; i < 3; i++) {
        if (me->p_vel[i] < me->p_vel_min[i]) {
            me->p_vel[i] = me->p_vel_min[i];
        }
        else if (me->p_vel[i] > me->p_vel_max[i]) {
            me->p_vel[i] = me->p_vel_max[i];
        }
    }
    return 0;
}
Пример #7
0
void
q_xyz_quat_compose(q_xyz_quat_type *C_from_A_ptr, 
                   q_xyz_quat_type *C_from_B_ptr, 
                   q_xyz_quat_type *B_from_A_ptr)
{
    q_vec_type rotated_BA_vec;

    /* rotate local xlate into global   */
    q_xform(rotated_BA_vec, C_from_B_ptr->quat, B_from_A_ptr->xyz);

    /* now add the xformed local vec to the unchanged global vec    */
    q_vec_add(C_from_A_ptr->xyz, C_from_B_ptr->xyz, rotated_BA_vec);

    /* compose the rotations    */
    /* CA_rotate = CB_rotate . BA_rotate */
    q_mult(C_from_A_ptr->quat, C_from_B_ptr->quat, B_from_A_ptr->quat);

    q_normalize(C_from_A_ptr->quat, C_from_A_ptr->quat);

} 	/* qp_xyz_quat_compose */
Пример #8
0
void	vrpn_IMU_SimpleCombiner::update_matrix_based_on_values(double time_interval)
{
  //==================================================================
  // Adjust the orientation based on the rotational velocity, which is
  // measured in the rotated coordinate system.  We need to rotate the
  // difference vector back to the canonical orientation, apply the
  // orientation change there, and then rotate back.
  // Be sure to scale by the time value.
  q_type forward, inverse;
  q_copy(forward, d_quat);
  q_invert(inverse, forward);
  // Remember that Euler angles in Quatlib have rotation around Z in
  // the first term.  Compute the time-scaled delta transform in
  // canonical space.
  q_type delta;
  q_from_euler(delta,
    time_interval * d_rotational_vel.values[Q_Z],
    time_interval * d_rotational_vel.values[Q_Y],
    time_interval * d_rotational_vel.values[Q_X]);
  // Bring the delta back into canonical space
  q_type canonical;
  q_mult(canonical, delta, inverse);
  q_mult(canonical, forward, canonical);
  q_mult(d_quat, canonical, d_quat);

  //==================================================================
  // To the extent that the acceleration vector's magnitude is equal
  // to the expected gravity, rotate the orientation so that the vector
  // points downward.  This is measured in the rotated coordinate system,
  // so we need to rotate back to canonical orientation and apply
  // the difference there and then rotate back.  The rate of rotation
  // should be as specified in the gravity-rotation-rate parameter so
  // we don't swing the head around too quickly but only slowly re-adjust.

  double accel = sqrt(
      d_acceleration.values[0] * d_acceleration.values[0] +
      d_acceleration.values[1] * d_acceleration.values[1] +
      d_acceleration.values[2] * d_acceleration.values[2] );
  double diff = fabs(accel - 9.80665);

  // Only adjust if we're close enough to the expected gravity
  // @todo In a more complicated IMU tracker, base this on state and
  // error estimates from a Kalman or other filter.
  double scale = 1.0 - diff;
  if (scale > 0) {
    // Get a new forward and inverse matrix from the current, just-
    // rotated matrix.
    q_copy(forward, d_quat);

    // Change how fast we adjust based on how close we are to the
    // expected value of gravity.  Then further scale this by the
	// amount of time since the last estimate.
    double gravity_scale = scale * d_gravity_restore_rate * time_interval;

	// Rotate the gravity vector by the estimated transform.
	// We expect this direction to match the global down (-Y) vector.
	q_vec_type gravity_global;
	q_vec_set(gravity_global, d_acceleration.values[0],
		d_acceleration.values[1], d_acceleration.values[2]);
	q_vec_normalize(gravity_global, gravity_global);
	q_xform(gravity_global, forward, gravity_global);
	//printf("  XXX Gravity: %lg, %lg, %lg\n", gravity_global[0], gravity_global[1], gravity_global[2]);

	// Determine the rotation needed to take gravity and rotate
	// it into the direction of -Y.
	q_vec_type neg_y;
	q_vec_set(neg_y, 0, -1, 0);
	q_type rot;
	q_from_two_vecs(rot, gravity_global, neg_y);

	// Scale the rotation by the fraction of the orientation we
	// should remove based on the time that has passed, how well our
	// gravity vector matches expected, and the specified rate of
	// correction.
	static q_type identity = { 0, 0, 0, 1 };
	q_type scaled_rot;
	q_slerp(scaled_rot, identity, rot, gravity_scale);
	//printf("XXX Scaling gravity vector by %lg\n", gravity_scale);

    // Rotate by this angle.
    q_mult(d_quat, scaled_rot, d_quat);

    //==================================================================
    // If we are getting compass data, and to the extent that the
    // acceleration vector's magnitude is equal to the expected gravity,
    // compute the cross product of the cross product to find the
    // direction of north perpendicular to down.  This is measured in
    // the rotated coordinate system, so we need to rotate back to the
    // canonical orientation and do the comparison there.
	//  The fraction of rotation should be as specified in the
    // magnetometer-rotation-rate parameter so we don't swing the head
    // around too quickly but only slowly re-adjust.

    if (d_magnetometer.ana != NULL) {
      // Get a new forward and inverse matrix from the current, just-
      // rotated matrix.
      q_copy(forward, d_quat);

      // Find the North vector that is perpendicular to gravity by
      // clearing its Y axis to zero and renormalizing.
	  q_vec_type magnetometer;
      q_vec_set(magnetometer, d_magnetometer.values[0],
        d_magnetometer.values[1], d_magnetometer.values[2]);
	  q_vec_type magnetometer_global;
	  q_xform(magnetometer_global, forward, magnetometer);
	  magnetometer_global[Q_Y] = 0;
	  q_vec_type north_global;
	  q_vec_normalize(north_global, magnetometer_global);
	  //printf("  XXX north_global: %lg, %lg, %lg\n", north_global[0], north_global[1], north_global[2]);

      // Determine the rotation needed to take north and rotate it
	  // into the direction of negative Z.
	  q_vec_type neg_z;
	  q_vec_set(neg_z, 0, 0, -1);
	  q_from_two_vecs(rot, north_global, neg_z);

	  // Change how fast we adjust based on how close we are to the
	  // expected value of gravity.  Then further scale this by the
	  // amount of time since the last estimate.
	  double north_rate = scale * d_north_restore_rate * time_interval;

	  // Scale the rotation by the fraction of the orientation we
	  // should remove based on the time that has passed, how well our
	  // gravity vector matches expected, and the specified rate of
	  // correction.
	  static q_type identity = { 0, 0, 0, 1 };
	  q_slerp(scaled_rot, identity, rot, north_rate);

      // Rotate by this angle.
      q_mult(d_quat, scaled_rot, d_quat);
    }
  }

  //==================================================================
  // Compute and store the velocity information
  // This will be in the rotated coordinate system, so we need to
  // rotate back to the identity orientation before storing.
  // Convert from radians/second into a quaternion rotation as
  // rotated in a hundredth of a second and set the rotational
  // velocity dt to a hundredth of a second so that we don't
  // risk wrapping.
  // Remember that Euler angles in Quatlib have rotation around Z in
  // the first term.  Compute the time-scaled delta transform in
  // canonical space.
  q_from_euler(delta,
    1e-2 * d_rotational_vel.values[Q_Z],
    1e-2 * d_rotational_vel.values[Q_Y],
    1e-2 * d_rotational_vel.values[Q_X]);
  // Bring the delta back into canonical space
  q_mult(canonical, delta, inverse);
  q_mult(vel_quat, forward, canonical);
  vel_quat_dt = 1e-2;
}
Пример #9
0
void vrpn_Tracker_OSVRHackerDevKit::on_data_received(std::size_t bytes,
                                                     vrpn_uint8 *buffer)
{
    if (bytes != 32 && bytes != 16) {
        send_text_message(vrpn_TEXT_WARNING)
            << "Received a report " << bytes
            << " in length, but expected it to be 32 or 16 bytes. Discarding. "
               "(May indicate issues with HID!)";
        return;
    }

    vrpn_uint8 firstByte = vrpn_unbuffer_from_little_endian<vrpn_uint8>(buffer);

    vrpn_uint8 version = vrpn_uint8(0x0f) & firstByte;
    _reportVersion = version;

    switch (version) {
    case 1:
        if (bytes != 32 && bytes != 16) {
            send_text_message(vrpn_TEXT_WARNING)
                << "Received a v1 report " << bytes
                << " in length, but expected it to be 32 or 16 bytes. "
                   "Discarding. "
                   "(May indicate issues with HID!)";
            return;
        }
        break;
    case 2:
        if (bytes != 16) {
            send_text_message(vrpn_TEXT_WARNING)
                << "Received a v2 report " << bytes
                << " in length, but expected it to be 16 bytes. Discarding. "
                   "(May indicate issues with HID!)";
            return;
        }
        break;

    case 3:
        /// @todo once this report format is finalized, tighten up the
        /// requirements.
        if (bytes < 16) {
            send_text_message(vrpn_TEXT_WARNING)
                << "Received a v3 report " << bytes
                << " in length, but expected it to be at least 16 bytes. "
                   "Discarding. "
                   "(May indicate issues with HID!)";
            return;
        }
        break;
    default:
        /// Highlight that we don't know this report version well...
        _knownVersion = false;
        /// Do a minimal check of it.
        if (bytes < 16) {
            send_text_message(vrpn_TEXT_WARNING)
                << "Received a report claiming to be version " << int(version)
                << " that was " << bytes << " in length, but expected it to be "
                                            "at least 16 bytes. Discarding. "
                                            "(May indicate issues with HID!)";
            return;
        }
        break;
    }

    // Report version as analog channel 0.
    channel[0] = version;

    vrpn_uint8 msg_seq = vrpn_unbuffer_from_little_endian<vrpn_uint8>(buffer);

    // Signed, 16-bit, fixed-point numbers in Q1.14 format.
    typedef vrpn::FixedPoint<1, 14> FixedPointValue;
    d_quat[Q_X] =
        FixedPointValue(vrpn_unbuffer_from_little_endian<vrpn_int16>(buffer))
            .get<vrpn_float64>();
    d_quat[Q_Y] =
        FixedPointValue(vrpn_unbuffer_from_little_endian<vrpn_int16>(buffer))
            .get<vrpn_float64>();
    d_quat[Q_Z] =
        FixedPointValue(vrpn_unbuffer_from_little_endian<vrpn_int16>(buffer))
            .get<vrpn_float64>();
    d_quat[Q_W] =
        FixedPointValue(vrpn_unbuffer_from_little_endian<vrpn_int16>(buffer))
            .get<vrpn_float64>();

    vrpn_Tracker::timestamp = _timestamp;
    {
        char msgbuf[512];
        int len = vrpn_Tracker::encode_to(msgbuf);
        if (d_connection->pack_message(len, _timestamp, position_m_id,
                                       d_sender_id, msgbuf,
                                       vrpn_CONNECTION_LOW_LATENCY)) {
            fprintf(stderr, "vrpn_Tracker_OSVRHackerDevKit: cannot write "
                            "message: tossing\n");
        }
    }
    if (version >= 2) {
        // We've got angular velocity in this message too
        // Given XYZ radians per second velocity.
        // Signed Q6.9
        typedef vrpn::FixedPoint<6, 9> VelFixedPoint;
        q_vec_type angVel;
        angVel[0] =
            VelFixedPoint(vrpn_unbuffer_from_little_endian<vrpn_int16>(buffer))
                .get<vrpn_float64>();
        angVel[1] =
            VelFixedPoint(vrpn_unbuffer_from_little_endian<vrpn_int16>(buffer))
                .get<vrpn_float64>();
        angVel[2] =
            VelFixedPoint(vrpn_unbuffer_from_little_endian<vrpn_int16>(buffer))
                .get<vrpn_float64>();

        //==================================================================
        // Determine the rotational velocity, which is
        // measured in the rotated coordinate system.  We need to rotate the
        // difference Euler angles back to the canonical orientation, apply
        // the change, and then rotate back to change our coordinates.
        // Be sure to scale by the time value vrpn_HDK_DT.
        q_type forward, inverse;
        q_copy(forward, d_quat);
        q_invert(inverse, forward);
        // Remember that Euler angles in Quatlib have rotation around Z in
        // the first term.  Compute the time-scaled delta transform in
        // canonical space.
        q_type delta;
        {
            delta[Q_W] = 0;
            delta[Q_X] = angVel[Q_X] * vrpn_HDK_DT * 0.5;
            delta[Q_Y] = angVel[Q_Y] * vrpn_HDK_DT * 0.5;
            delta[Q_Z] = angVel[Q_Z] * vrpn_HDK_DT * 0.5;
            q_exp(delta, delta);
            q_normalize(delta, delta);
        }
        // Bring the delta back into canonical space
        q_type canonical;
        q_mult(canonical, delta, inverse);
        q_mult(vel_quat, forward, canonical);

        // Send the rotational velocity information.
        // The dt value was set once, in the constructor.
        char msgbuf[512];
        int len = vrpn_Tracker::encode_vel_to(msgbuf);
        if (d_connection->pack_message(len, _timestamp, velocity_m_id,
                                       d_sender_id, msgbuf,
                                       vrpn_CONNECTION_LOW_LATENCY)) {
            fprintf(stderr, "vrpn_Tracker_OSVRHackerDevKit: cannot write "
                            "message: tossing\n");
        }
    }
    if (version < 3) {
        // No status info hidden in the first byte.
        channel[1] = STATUS_UNKNOWN;
    }
    else {
        // v3+: We've got status info in the upper nibble of the first byte.
        bool gotVideo = (firstByte & (0x01 << 4)) != 0;    // got video?
        bool gotPortrait = (firstByte & (0x01 << 5)) != 0; // portrait mode?
        if (!gotVideo) {
            channel[1] = STATUS_NO_VIDEO_INPUT;
        }
        else {
            if (gotPortrait) {
                channel[1] = STATUS_PORTRAIT_VIDEO_INPUT;
            }
            else {
                channel[1] = STATUS_LANDSCAPE_VIDEO_INPUT;
            }
        }
    }

    if (_messageCount == 0) {
        // When _messageCount overflows, send a report whether or not there was
        // a change.
        vrpn_Analog::report();
    }
    else {
        // otherwise just report if we have a change.
        vrpn_Analog::report_changes();
    };
    _messageCount = (_messageCount + 1) % vrpn_HDK_STATUS_STRIDE;
}
Пример #10
0
int vrpn_Tracker_DeadReckoning_Rotation::test(void)
{
    // Construct a loopback connection to be used by all of our objects.
    vrpn_Connection *c = vrpn_create_server_connection("loopback:");

    // Create a tracker server to be the initator and a dead-reckoning
    // rotation tracker to use it as a base; have it predict 1 second
    // into the future.
    vrpn_Tracker_Server *t0 = new vrpn_Tracker_Server("Tracker0", c, 2);
    vrpn_Tracker_DeadReckoning_Rotation *t1 =
        new vrpn_Tracker_DeadReckoning_Rotation("Tracker1", c, "*Tracker0", 2, 1);

    // Create a remote tracker to listen to t1 and set up its callbacks for
    // position and velocity reports.  They will fill in the static structures
    // listed above with whatever values they receive.
    vrpn_Tracker_Remote *tr = new vrpn_Tracker_Remote("Tracker1", c);
    tr->register_change_handler(&poseResponse, handle_test_tracker_report);
    tr->register_change_handler(&velResponse, handle_test_tracker_velocity_report);

    // Set up the values in the pose and velocity responses with incorrect values
    // so that things will fail if the class does not perform as expected.
    q_vec_set(poseResponse.pos, 1, 1, 1);
    q_make(poseResponse.quat, 0, 0, 1, 0);
    poseResponse.time.tv_sec = 0;
    poseResponse.time.tv_usec = 0;
    poseResponse.sensor = -1;

    // Send a pose report from sensors 0 and 1 on the original tracker that places
    // them at (1,1,1) and with the identity rotation.  We should get a response for
    // each of them so we should end up with the sensor-1 response matching what we
    // sent, with no change in position or orientation.
    q_vec_type pos = { 1, 1, 1 };
    q_type quat = { 0, 0, 0, 1 };
    struct timeval firstSent;
    vrpn_gettimeofday(&firstSent, NULL);
    t0->report_pose(0, firstSent, pos, quat);
    t0->report_pose(1, firstSent, pos, quat);

    t0->mainloop();
    t1->mainloop();
    c->mainloop();
    tr->mainloop();

    if ( (poseResponse.time.tv_sec != firstSent.tv_sec + 1)
        || (poseResponse.sensor != 1)
        || (q_vec_distance(poseResponse.pos, pos) > 1e-10)
        || (poseResponse.quat[Q_W] != 1)
       )
    {
        std::cerr << "vrpn_Tracker_DeadReckoning_Rotation::test(): Got unexpected"
            << " initial response: pos (" << poseResponse.pos[Q_X] << ", "
            << poseResponse.pos[Q_Y] << ", " << poseResponse.pos[Q_Z] << "), quat ("
            << poseResponse.quat[Q_X] << ", " << poseResponse.quat[Q_Y] << ", "
            << poseResponse.quat[Q_Z] << ", " << poseResponse.quat[Q_W] << ")"
            << " from sensor " << poseResponse.sensor
            << " at time " << poseResponse.time.tv_sec << ":" << poseResponse.time.tv_usec
            << std::endl;
        delete tr;
        delete t1;
        delete t0;
        c->removeReference();
        return 1;
    }
    
    // Send a second tracker report for sensor 0 coming 0.4 seconds later that has
    // translated to position (2,1,1) and rotated by 0.4 * 90 degrees around Z.
    // This should cause a prediction for one second later than this new pose
    // message that has rotated by very close to 1.4 * 90 degrees.
    q_vec_type pos2 = { 2, 1, 1 };
    q_type quat2;
    double angle2 = 0.4 * 90 * M_PI / 180.0;
    q_from_axis_angle(quat2, 0, 0, 1, angle2);
    struct timeval p4Second = { 0, 400000 };
    struct timeval firstPlusP4 = vrpn_TimevalSum(firstSent, p4Second);
    t0->report_pose(0, firstPlusP4, pos2, quat2);

    t0->mainloop();
    t1->mainloop();
    c->mainloop();
    tr->mainloop();

    double x, y, z, angle;
    q_to_axis_angle(&x, &y, &z, &angle, poseResponse.quat);
    if ((poseResponse.time.tv_sec != firstPlusP4.tv_sec + 1)
        || (poseResponse.sensor != 0)
        || (q_vec_distance(poseResponse.pos, pos2) > 1e-10)
        || !isClose(x, 0) || !isClose(y, 0) || !isClose(z, 1)
        || !isClose(angle, 1.4 * 90 * M_PI / 180.0)
       )
    {
        std::cerr << "vrpn_Tracker_DeadReckoning_Rotation::test(): Got unexpected"
            << " predicted pose response: pos (" << poseResponse.pos[Q_X] << ", "
            << poseResponse.pos[Q_Y] << ", " << poseResponse.pos[Q_Z] << "), quat ("
            << poseResponse.quat[Q_X] << ", " << poseResponse.quat[Q_Y] << ", "
            << poseResponse.quat[Q_Z] << ", " << poseResponse.quat[Q_W] << ")"
            << " from sensor " << poseResponse.sensor
            << std::endl;
        delete tr;
        delete t1;
        delete t0;
        c->removeReference();
        return 2;
    }

    // Send a velocity report for sensor 1 that has has translation by (1,0,0)
    // and rotating by 0.4 * 90 degrees per 0.4 of a second around Z.
    // This should cause a prediction for one second later than the first
    // report that has rotated by very close to 90 degrees.  The translation
    // should be ignored, so the position should be the original position.
    q_vec_type vel = { 0, 0, 0 };
    t0->report_pose_velocity(1, firstPlusP4, vel, quat2, 0.4);

    t0->mainloop();
    t1->mainloop();
    c->mainloop();
    tr->mainloop();

    q_to_axis_angle(&x, &y, &z, &angle, poseResponse.quat);
    if ((poseResponse.time.tv_sec != firstSent.tv_sec + 1)
        || (poseResponse.sensor != 1)
        || (q_vec_distance(poseResponse.pos, pos) > 1e-10)
        || !isClose(x, 0) || !isClose(y, 0) || !isClose(z, 1)
        || !isClose(angle, 90 * M_PI / 180.0)
        )
    {
        std::cerr << "vrpn_Tracker_DeadReckoning_Rotation::test(): Got unexpected"
            << " predicted velocity response: pos (" << poseResponse.pos[Q_X] << ", "
            << poseResponse.pos[Q_Y] << ", " << poseResponse.pos[Q_Z] << "), quat ("
            << poseResponse.quat[Q_X] << ", " << poseResponse.quat[Q_Y] << ", "
            << poseResponse.quat[Q_Z] << ", " << poseResponse.quat[Q_W] << ")"
            << " from sensor " << poseResponse.sensor
            << std::endl;
        delete tr;
        delete t1;
        delete t0;
        c->removeReference();
        return 3;
    }

    // To test the behavior of the prediction code when we're moving around more
    // than one axis, and when we're starting from a non-identity orientation,
    // set sensor 1 to be rotated 180 degrees around X.  Then send a velocity
    // report that will produce a rotation of 180 degrees around Z.  The result
    // should match a prediction of 180 degrees around Y (plus or minus 180, plus
    // or minus Y axis are all equivalent).
    struct timeval oneSecond = { 1, 0 };
    struct timeval firstPlusOne = vrpn_TimevalSum(firstSent, oneSecond);
    q_type quat3;
    q_from_axis_angle(quat3, 1, 0, 0, M_PI);
    t0->report_pose(1, firstPlusOne, pos, quat3);
    q_type quat4;
    q_from_axis_angle(quat4, 0, 0, 1, M_PI);
    t0->report_pose_velocity(1, firstPlusOne, vel, quat4, 1.0);

    t0->mainloop();
    t1->mainloop();
    c->mainloop();
    tr->mainloop();

    q_to_axis_angle(&x, &y, &z, &angle, poseResponse.quat);
    if ((poseResponse.time.tv_sec != firstPlusOne.tv_sec + 1)
        || (poseResponse.sensor != 1)
        || (q_vec_distance(poseResponse.pos, pos) > 1e-10)
        || !isClose(x, 0) || !isClose(fabs(y), 1) || !isClose(z, 0)
        || !isClose(fabs(angle), M_PI)
        )
    {
        std::cerr << "vrpn_Tracker_DeadReckoning_Rotation::test(): Got unexpected"
            << " predicted pose + velocity response: pos (" << poseResponse.pos[Q_X] << ", "
            << poseResponse.pos[Q_Y] << ", " << poseResponse.pos[Q_Z] << "), quat ("
            << poseResponse.quat[Q_X] << ", " << poseResponse.quat[Q_Y] << ", "
            << poseResponse.quat[Q_Z] << ", " << poseResponse.quat[Q_W] << ")"
            << " from sensor " << poseResponse.sensor
            << std::endl;
        delete tr;
        delete t1;
        delete t0;
        c->removeReference();
        return 4;
    }

    // To test the behavior of the prediction code when we're moving around more
    // than one axis, and when we're starting from a non-identity orientation,
    // set sensor 0 to start out at identity.  Then in one second it will be
    // rotated 180 degrees around X.  Then in another second it will be rotated
    // additionally by 90 degrees around Z; the prediction should be another
    // 90 degrees around Z, which should turn out to compose to +/-180 degrees
    // around +/- Y, as in the velocity case above.
    // To make this work, we send a sequence of three poses, one second apart,
    // starting at the original time.  We do this on sensor 0, which has never
    // had a velocity report, so that it will be using the pose-only prediction.
    struct timeval firstPlusTwo = vrpn_TimevalSum(firstPlusOne, oneSecond);
    q_from_axis_angle(quat3, 1, 0, 0, M_PI);
    t0->report_pose(0, firstSent, pos, quat);
    t0->report_pose(0, firstPlusOne, pos, quat3);
    q_from_axis_angle(quat4, 0, 0, 1, M_PI / 2);
    q_type quat5;
    q_mult(quat5, quat4, quat3);
    t0->report_pose(0, firstPlusTwo, pos, quat5);

    t0->mainloop();
    t1->mainloop();
    c->mainloop();
    tr->mainloop();

    q_to_axis_angle(&x, &y, &z, &angle, poseResponse.quat);
    if ((poseResponse.time.tv_sec != firstPlusTwo.tv_sec + 1)
        || (poseResponse.sensor != 0)
        || (q_vec_distance(poseResponse.pos, pos) > 1e-10)
        || !isClose(x, 0) || !isClose(fabs(y), 1.0) || !isClose(z, 0)
        || !isClose(fabs(angle), M_PI)
        )
    {
        std::cerr << "vrpn_Tracker_DeadReckoning_Rotation::test(): Got unexpected"
            << " predicted pose + pose response: pos (" << poseResponse.pos[Q_X] << ", "
            << poseResponse.pos[Q_Y] << ", " << poseResponse.pos[Q_Z] << "), quat ("
            << poseResponse.quat[Q_X] << ", " << poseResponse.quat[Q_Y] << ", "
            << poseResponse.quat[Q_Z] << ", " << poseResponse.quat[Q_W] << ")"
            << " from sensor " << poseResponse.sensor
            << "; axis = (" << x << ", " << y << ", " << z << "), angle = "
            << angle
            << std::endl;
        delete tr;
        delete t1;
        delete t0;
        c->removeReference();
        return 5;
    }

    // Done; delete our objects and return 0 to indicate that
    // everything worked.
    delete tr;
    delete t1;
    delete t0;
    c->removeReference();
    return 0;
}
Пример #11
0
void vrpn_Tracker_DeadReckoning_Rotation::sendNewPrediction(vrpn_int32 sensor)
{
    //========================================================================
    // Figure out which rotation state we're supposed to use.
    if (sensor >= d_numSensors) {
        send_text_message(vrpn_TEXT_WARNING)
            << "sendNewPrediction: Asked for sensor " << sensor
            << " but I only have " << d_numSensors
            << "sensors.  Discarding.";
        return;
    }
    vrpn_Tracker_DeadReckoning_Rotation::RotationState &state =
        d_rotationStates[sensor];

    //========================================================================
    // If we haven't had a tracker report yet, nothing to send.
    if (state.d_lastReportTime.tv_sec == 0) {
        return;
    }

    //========================================================================
    // If we don't have permission to estimate velocity and haven't gotten it
    // either, then we just pass along the report.
    if (!state.d_receivedAngularVelocityReport && !d_estimateVelocity) {
        report_pose(sensor, state.d_lastReportTime, state.d_lastPosition,
                    state.d_lastOrientation);
        return;
    }

    //========================================================================
    // Estimate the future orientation based on the current angular velocity
    // estimate and the last reported orientation.  Predict it into the future
    // the amount we've been asked to.

    // Start with the previous orientation.
    q_type newOrientation;
    q_copy(newOrientation, state.d_lastOrientation);

    // Rotate it by the amount to rotate once for every integral multiple
    // of the rotation time we've been asked to go.
    double remaining = d_predictionTime;
    while (remaining > state.d_rotationInterval) {
        q_mult(newOrientation, state.d_rotationAmount, newOrientation);
        remaining -= state.d_rotationInterval;
    }

    // Then rotate it by the remaining fractional amount.
    double fractionTime = remaining / state.d_rotationInterval;
    q_type identity = { 0, 0, 0, 1 };
    q_type fractionRotation;
    q_slerp(fractionRotation, identity, state.d_rotationAmount, fractionTime);
    q_mult(newOrientation, fractionRotation, newOrientation);

    //========================================================================
    // Find out the future time for which we will be predicting by adding the
    // prediction interval to our last report time.
    struct timeval future_time;
    struct timeval delta;
    delta.tv_sec = static_cast<unsigned long>(d_predictionTime);
    double remainder = d_predictionTime - delta.tv_sec;
    delta.tv_usec = static_cast<unsigned long>(remainder * 1e6);
    future_time = vrpn_TimevalSum(delta, state.d_lastReportTime);

    //========================================================================
    // Pack our predicted tracker report for this future time.
    // Use the position portion of the report unaltered.
    if (0 != report_pose(sensor, future_time, state.d_lastPosition, newOrientation)) {
      fprintf(stderr, "vrpn_Tracker_DeadReckoning_Rotation::sendNewPrediction(): Can't report pose\n");
    }
}