// static
void vrpn_Tracker_WiimoteHead::handle_analog_update(void* userdata, const vrpn_ANALOGCB info) {
	vrpn_Tracker_WiimoteHead* wh = (vrpn_Tracker_WiimoteHead*)userdata;

	if (!wh) {
		return;
	}
	if (!wh->d_contact) {
#ifdef VERBOSE
		fprintf(stderr, "vrpn_Tracker_WiimoteHead: "
		        "got first report from Wiimote!\n");
#endif
	}

	int i, firstchan;
	// Grab all the blobs
	for (i = 0; i < 4; i++) {
		firstchan = i * 3 + 4;
		// -1 should signal a missing blob, but experimentally
		// we sometimes get 0 instead
		if (info.channel[firstchan] > 0
		        && info.channel[firstchan + 1] > 0
		        && info.channel[firstchan + 2] > 0) {
			wh->d_vX[i] = info.channel[firstchan];
			wh->d_vY[i] = info.channel[firstchan + 1];
			wh->d_vSize[i] = info.channel[firstchan + 2];
			wh->d_points = i + 1;
		} else {
			break;
		}
	}

	wh->d_contact = true;
	wh->d_updated = true;

	bool newgrav = false;

	// Grab gravity
	for (i = 0; i < 3; i++) {
		if (info.channel[1 + i] != wh->d_vGrav[i]) {
			newgrav = true;
			break;
		}
	}

	if (newgrav) {
		if (!wh->d_gravDirty) {
			// only slide back the previous gravity if we actually used it once.
			q_vec_copy(wh->d_vGravAntepenultimate, wh->d_vGravPenultimate);
			q_vec_copy(wh->d_vGravPenultimate, wh->d_vGrav);
		}
		for (i = 0; i < 3; i++) {
			wh->d_vGrav[i] = info.channel[1 + i];
		}
		wh->d_gravDirty = true;
	}

	// Store the time of the report into the tracker's timestamp field.
	wh->vrpn_Tracker::timestamp = info.msg_time;
}
Exemple #2
0
void VRPN_CALLBACK vrpn_Tracker_FilterOneEuro::handle_tracker_update(void *userdata, const vrpn_TRACKERCB info)
{
  // Get pointer to the object we're dealing with.
  vrpn_Tracker_FilterOneEuro *me = static_cast<vrpn_Tracker_FilterOneEuro *>(userdata);

  // See if this sensor is within our range.  If not, we ignore it.
  if (info.sensor >= me->d_channels) {
    return;
  }

  // Filter the position and orientation and then report the filtered value
  // for this channel.  Keep track of the delta-time, and update our current
  // time so we get the right one next time.
  double dt = vrpn_TimevalDurationSeconds(info.msg_time, me->d_last_report_times[info.sensor]);
  if (dt <= 0) { dt = 1; }  // Avoid divide-by-zero in case of fluke.
  vrpn_float64 pos[3];
  vrpn_float64 quat[4];
  memcpy(pos, info.pos, sizeof(pos));
  memcpy(quat, info.quat, sizeof(quat));
  const vrpn_float64 *filtered = me->d_filters[info.sensor].filter(dt, pos);
  q_vec_copy(me->pos, filtered);
  const double *q_filtered = me->d_qfilters[info.sensor].filter(dt, quat);
  q_normalize(me->d_quat, q_filtered);
  me->timestamp = info.msg_time;
  me->d_sensor = info.sensor;
  me->d_last_report_times[info.sensor] = info.msg_time;

  // Send the filtered report.
  char msgbuf[512];
  int len = me->encode_to(msgbuf);
  if (me->d_connection->pack_message(len, me->timestamp, me->position_m_id,
          me->d_sender_id, msgbuf,vrpn_CONNECTION_LOW_LATENCY)) {
    fprintf(stderr, "vrpn_Tracker_FilterOneEuro: cannot write message: tossing\n");
  }
}
static void VRPN_CALLBACK tracker_handler(void *userdata, const vrpn_TRACKERCB t)
{
        // Store the updated position and orientation of the targeted joint
        q_vec_copy(position[t.sensor], t.pos);
        q_copy(orientation[t.sensor], t.quat);


//	std::cout << "Tracker '" << t.sensor << "' : " << t.pos[0] << "," <<  t.pos[1] << "," << t.pos[2] << std::endl;
}
Exemple #4
0
// Fills in the POSE_INFO structure that is passed in with the data it
// receives.
static void VRPN_CALLBACK handle_test_tracker_report(void *userdata,
    const vrpn_TRACKERCB info)
{
    POSE_INFO *pi = static_cast<POSE_INFO *>(userdata);
    pi->time = info.msg_time;
    pi->sensor = info.sensor;
    q_vec_copy(pi->pos, info.pos);
    q_copy(pi->quat, info.quat);
}
Exemple #5
0
// Fills in the VEL_INFO structure that is passed in with the data it
// receives.
static void VRPN_CALLBACK handle_test_tracker_velocity_report(void *userdata,
    const vrpn_TRACKERVELCB info)
{
    VEL_INFO *vi = static_cast<VEL_INFO *>(userdata);
    vi->time = info.msg_time;
    vi->sensor = info.sensor;
    q_vec_copy(vi->pos, info.vel);
    q_copy(vi->quat, info.vel_quat);
    vi->quat_dt = info.vel_quat_dt;
}
Exemple #6
0
void vrpn_Tracker_DeadReckoning_Rotation::handle_tracker_report(void *userdata,
    const vrpn_TRACKERCB info)
{
    // Find the pointer to the class that registered the callback and get a
    // reference to the RotationState we're supposed to be using.
    vrpn_Tracker_DeadReckoning_Rotation *me =
        static_cast<vrpn_Tracker_DeadReckoning_Rotation *>(userdata);
    if (info.sensor >= me->d_numSensors) {
        me->send_text_message(vrpn_TEXT_WARNING)
            << "Received tracker message from sensor " << info.sensor
            << " but I only have " << me->d_numSensors
            << "sensors.  Discarding.";
        return;
    }
    vrpn_Tracker_DeadReckoning_Rotation::RotationState &state =
        me->d_rotationStates[info.sensor];

    if (!state.d_receivedAngularVelocityReport && me->d_estimateVelocity) {
        // If we have not received any velocity reports, then we estimate
        // the angular velocity using the last report (if any).  The new
        // combined rotation T3 = T2 * T1, where T2 is the difference in
        // rotation between the last time (T1) and now (T3).  We want to
        // solve for T2 (so we can keep applying it going forward).  We
        // find it by right-multiuplying both sides of the equation by
        // T1i (inverse of T1): T3 * T1i = T2.
        if (state.d_lastReportTime.tv_sec != 0) {
            q_type inverted;
            q_invert(inverted, state.d_lastOrientation);
            q_mult(state.d_rotationAmount, info.quat, inverted);
            state.d_rotationInterval = vrpn_TimevalDurationSeconds(
                info.msg_time, state.d_lastReportTime);
            // If we get a zero or negative rotation interval, we're
            // not going to be able to handle it, so we set things back
            // to no rotation over a unit-time interval.
            if (state.d_rotationInterval < 0) {
                state.d_rotationInterval = 1;
                q_make(state.d_rotationAmount, 0, 0, 0, 1);
            }
        }
    }

    // Keep track of the position, orientation and time for the next report
    q_vec_copy(state.d_lastPosition, info.pos);
    q_copy(state.d_lastOrientation, info.quat);
    state.d_lastReportTime = info.msg_time;

    // We have new data, so we send a new prediction.
    me->sendNewPrediction(info.sensor);
}
void vrpn_Tracker_WiimoteHead::_update_gravity_moving_avg() {
	// Moving average of last three gravity vectors
	/// @todo replace/supplement gravity moving average with Kalman filter
	q_vec_type movingAvg = Q_NULL_VECTOR;

	q_vec_copy(movingAvg, d_vGrav);
	q_vec_add(movingAvg, movingAvg, d_vGravPenultimate);
	q_vec_add(movingAvg, movingAvg, d_vGravAntepenultimate);
	q_vec_scale(movingAvg, 0.33333, movingAvg);

	// reset gravity transform
	MAKE_IDENTITY_QUAT(d_gravityXform.quat);
	MAKE_NULL_VEC(d_gravityXform.xyz);

	q_vec_type regulargravity = Q_NULL_VECTOR;
	regulargravity[2] = 1;

	q_from_two_vecs(d_gravityXform.quat, movingAvg, regulargravity);
	d_gravDirty = false;
}
void vrpn_Tracker_WiimoteHead::_convert_pose_to_tracker() {
	q_vec_copy(pos, d_currentPose.xyz); // set position;
	q_copy(d_quat, d_currentPose.quat); // set orientation
}
void vrpn_Tracker_WiimoteHead::_update_2_LED_pose(q_xyz_quat_type & newPose) {
	if (d_points != 2) {
		// we simply stop updating our pos+orientation if we lost LED's
		// TODO: right now if we don't have exactly 2 points we lose the lock
		d_lock = false;
		d_flipState = FLIP_UNKNOWN;
		return;
	}

	// TODO right now only handling the 2-LED glasses

	d_lock = true;
	double rx, ry, rz;
	rx = ry = rz = 0;

	double X0, X1, Y0, Y1;

	X0 = d_vX[0];
	X1 = d_vX[1];
	Y0 = d_vY[0];
	Y1 = d_vY[1];

	if (d_flipState == FLIP_180) {
		/// If the first report of a new tracking lock indicated that
		/// our "up" vector had no positive y component, we have the
		/// points in the wrong order - flip them around.
		/// This uses the assumption that the first time we see the glasses,
		/// they ought to be right-side up (a reasonable assumption for
		/// head tracking)
		std::swap(X0, X1);
		std::swap(Y0, Y1);
	}

	const double dx = X0 - X1;
	const double dy = Y0 - Y1;
	const double dist = sqrt(dx * dx + dy * dy);
	const double angle = dist * cvtDistToAngle;
	// Note that this is an approximation, since we don't know the
	// distance/horizontal position.  (I think...)

	const double headDist = (d_blobDistance / 2.0) / tan(angle);

	// Translate the distance along z axis, and tilt the head
	newPose.xyz[2] = headDist;      // translate along Z
	rz = atan2(dy, dx);     // rotate around Z

	// Find the sensor pixel of the line of sight - directly between
	// the LEDs
	const double avgX = (X0 + X1) / 2.0;
	const double avgY = (Y0 + Y1) / 2.0;

	/// @todo For some unnerving reason, in release builds, avgX tends to become NaN/undefined
	/// However, any kind of inspection (such as the following, or even a simple cout)
	/// appears to prevent the issue.  This makes me uneasy, but I won't argue with
	/// what is working.
	if (wm_isnan(avgX)) {
		std::cerr << "NaN detected in avgX: X0 = " << X0 << ", X1 = " << X1 << std::endl;
		return;
	}

	if (wm_isnan(avgY)) {
		std::cerr << "NaN detected in avgY: Y0 = " << Y0 << ", Y1 = " << Y1 << std::endl;
		return;
	}

	// b is the virtual depth in the sensor from a point to the full sensor
	// used for finding similar triangles to calculate x/y translation
	const double bHoriz = xResSensor / 2 / tan(fovX / 2);
	const double bVert = yResSensor / 2 / tan(fovY / 2);

	// World head displacement (X and Y) from a centered origin at
	// the calculated distance from the sensor
	newPose.xyz[0] = headDist * (avgX - xResSensor / 2) / bHoriz;
	newPose.xyz[1] = headDist * (avgY - yResSensor / 2) / bVert;

	// set the quat. part of our pose with rotation angles
	q_from_euler(newPose.quat, rz, ry, rx);

	// Apply the new pose
	q_vec_copy(d_currentPose.xyz, newPose.xyz);
	q_copy(d_currentPose.quat, newPose.quat);
}