Ejemplo n.º 1
0
// If the time is before our current time (or there is no current
// time) then reset back to the beginning.  Whether or not we did
// that, search forwards until we get to or past the time we are
// searching for.
// newtime is an elapsed time from the start of the file
int vrpn_File_Connection::jump_to_time(timeval newtime)
{
	if( d_earliest_user_time_valid )
	{  d_time = vrpn_TimevalSum( d_earliest_user_time, newtime );  }
	else
	{  d_time = vrpn_TimevalSum( d_start_time, newtime );  } // XXX get rid of this option - dtm
    
    // If the time is earlier than where we are, or if we have
    // run past the end (no current entry), jump back to
    // the beginning of the file before searching.
    if ( !d_currentLogEntry || vrpn_TimevalGreater(d_currentLogEntry->data.msg_time, d_time) ) {
        reset();
    }
    
    // Search forwards, as needed.  Do not play the messages as they are
    // passed, just skip over them until we get to a message that has a
    // time greater than or equal to the one we are looking for.  That is,
    // one whose time is not less than ours.
    while (!vrpn_TimevalGreater(d_currentLogEntry->data.msg_time, d_time)) {
      int ret = advance_currentLogEntry();
      if ( ret != 0 ) {
	return 0; // Didn't get where we were going!
      }
    }

    return 1; // Got where we were going!
}
Ejemplo n.º 2
0
// plays to an elapsed end_time
int vrpn_File_Connection::play_to_time(timeval end_time)
{
	if( d_earliest_user_time_valid )
	{  return play_to_filetime( vrpn_TimevalSum( d_earliest_user_time, end_time ) );  }
	else
	{  return play_to_filetime( vrpn_TimevalSum( d_start_time, end_time ) );  }
}
Ejemplo n.º 3
0
// Calcs the diff between tv1 and tv2.  Returns the diff in a timeval struct.
// Calcs negative times properly, with the appropriate sign on both tv_sec
// and tv_usec (these signs will match unless one of them is 0)
timeval vrpn_TimevalDiff( const timeval& tv1, const timeval& tv2 )
{
    timeval tv;

    tv.tv_sec = -tv2.tv_sec;
    tv.tv_usec = -tv2.tv_usec;

    return vrpn_TimevalSum( tv1, tv );
}
Ejemplo n.º 4
0
// virtual
void vrpn_RedundantTransmission::mainloop (void) {

  queuedMessage * qm;
  queuedMessage ** snitch;
  timeval now;

  if (!d_connection) {
    return;
  }

  //fprintf(stderr, "mainloop:  %d messages queued.\n", d_numMessagesQueued);

  vrpn_gettimeofday(&now, NULL);
  for (qm = d_messageList; qm; qm = qm->next) {
    if ((qm->remainingTransmissions > 0) &&
        vrpn_TimevalGreater(now, qm->nextValidTime)) {
      d_connection->pack_message(qm->p.payload_len, qm->p.msg_time,
                                 qm->p.type, qm->p.sender, qm->p.buffer,
                                 vrpn_CONNECTION_LOW_LATENCY);
      qm->nextValidTime = vrpn_TimevalSum(now, qm->transmissionInterval);
      qm->remainingTransmissions--;
      //fprintf(stderr, "Sending message;  "
      //"%d transmissions remaining at %d.%d int.\n",
      //qm->remainingTransmissions, qm->transmissionInterval.tv_sec,
      //qm->transmissionInterval.tv_usec);
    }
  }

  snitch = &d_messageList;
  qm = *snitch;

  while (qm) {
    if (!qm->remainingTransmissions) {
      *snitch = qm->next;
      delete [] (char *) qm->p.buffer;
      delete qm;
      qm = *snitch;
      d_numMessagesQueued--;
    } else {
      snitch = &qm->next;
      qm = *snitch;
    }
  }




  if ((d_numMessagesQueued && !d_messageList) ||
      (!d_numMessagesQueued && d_messageList)) {
    fprintf(stderr, "vrpn_RedundantTransmission::mainloop():  "
                    "serious internal error.\n");
    d_numMessagesQueued = 0;
    d_messageList = NULL;
  }
}
Ejemplo n.º 5
0
void vrpn_Nidaq::report_changes() {
	// always service the nidaq, but only pack messages if there is 
	// a new report and we have a connection.
	
	// getSample will fill in the report with most recent valid
	// data and the time of that data.
	// return value is the number of reports processed by
	// the a/d card since the last getSample call.
	// (if > 1, then we missed a report; if 0, then no new data)
	// if gfAllInertial is filled in, then we will grab the intervening
	// reports as well (note: gfAllInertial does not work properly as 
	// of 1/29/99 weberh).
#ifdef VERBOSE
	int fHadNew=0;
#endif
	if (d_connection) {
		// there is a reading and a connection ... so package it
		
		EnterCriticalSection(&csAnalogBuffer);
		
#ifdef VERBOSE
		fHadNew=fNewData;
#endif
		if (fNewData) {
			fNewData=0;
			// It will actually be sent out when the server calls 
			// mainloop() on the connection object this device uses.
			char rgch[1000];
			int	cChars = vrpn_Analog::encode_to(rgch);
			double dTime = dSampleTime;
			LeaveCriticalSection(&csAnalogBuffer);
			
			struct timeval tv;
			tv = vrpn_TimevalSum(vrpn_MsecsTimeval(dTime*1000.0), 
				tvOffset);
			
			if (d_connection->pack_message(cChars, tv, channel_m_id, d_sender_id, rgch,
				vrpn_CONNECTION_LOW_LATENCY)) {
				cerr << "vrpn_Nidaq::report_changes: cannot write message: tossing.\n";
			}
		} else {
			LeaveCriticalSection(&csAnalogBuffer);
		}
#ifdef VERBOSE
		if (fHadNew) {
			print();
		}
#endif
		
	} else {
		cerr << "vrpn_Nidaq::report_changes: no valid connection.\n";
	}
}
Ejemplo n.º 6
0
void vrpn_File_Connection::FileTime_Accumulator::accumulate_to(
    const timeval & now_time )
{
    timeval & accum       = d_filetime_accum_since_last_playback;
    timeval & last_accum  = d_time_of_last_accum;
    
    accum  // updated elapsed filetime
        = vrpn_TimevalSum (         // summed with previous elapsed time
            accum,
            vrpn_TimevalScale(      // scaled by replay rate
                vrpn_TimevalDiff(   // elapsed wallclock time
                    now_time,
                    d_time_of_last_accum ),
                d_replay_rate));
    last_accum = now_time;          // wallclock time of this whole mess
}
Ejemplo n.º 7
0
int vrpn_Tracker_Liberty::get_report(void)
{
   char errmsg[512];	// Error message to send to VRPN
   int ret;		// Return value from function call to be checked
   unsigned char *bufptr;	// Points into buffer at the current value to read

   //--------------------------------------------------------------------
   // Each report starts with the ASCII 'LY' characters. If we're synching,
   // read a byte at a time until we find a 'LY' characters.
   //--------------------------------------------------------------------
   // For the Patriot this is 'PA'.
   // For the (high speed) Liberty Latus this is 'LU'.

   if (status == vrpn_TRACKER_SYNCING) {

     // Try to get the first sync character if don't already have it. 
     // If none, just return.
     if (got_single_sync_char != 1) {
       ret = vrpn_read_available_characters(serial_fd, buffer, 1);
       if (ret != 1) {
	 //if (VRPN_LIBERTY_DEBUG) fprintf(stderr,"[DEBUG]: Missed First Sync Char, ret= %i\n",ret);
	 return 0;
       }
     }

     // Try to get the second sync character. If none, just return
     ret = vrpn_read_available_characters(serial_fd, &buffer[1], 1);
     if (ret == 1) {
       //Got second sync Char
       got_single_sync_char = 0;
     }
     else if (ret != -1) {
       if (VRPN_LIBERTY_DEBUG) fprintf(stderr,"[DEBUG]: Missed Second Sync Char\n");
       got_single_sync_char = 1;
       return 0;
     }

      // If it is not 'LY' or 'PA' or 'LU' , we don't want it but we
      // need to look at the next one, so just return and stay
      // in Syncing mode so that we will try again next time through.
      // Also, flush the buffer so that it won't take as long to catch up.
      if (
      ((( buffer[0] == 'L') && (buffer[1] == 'Y')) != 1) 
      && 
      ((( buffer[0] == 'P') && (buffer[1] == 'A')) != 1)
      && 
      ((( buffer[0] == 'L') && (buffer[1] == 'U')) != 1)
      ) 
      {
      	sprintf(errmsg,"While syncing (looking for 'LY' or 'PA' or 'LU', "
		"got '%c%c')", buffer[0], buffer[1]);
	VRPN_MSG_INFO(errmsg);
	vrpn_flush_input_buffer(serial_fd);
	if (VRPN_LIBERTY_DEBUG) fprintf(stderr,"[DEBUGA]: Getting Report - Not LY or PA or LU, Got Character %c %c \n",buffer[0],buffer[1]);
      	return 0;
      }

        if (VRPN_LIBERTY_DEBUG) fprintf(stderr,"[DEBUG]: Getting Report - Got LY or PA or LU\n");

      // Got the first character of a report -- go into AWAITING_STATION mode
      // and record that we got one character at this time. The next
      // bit of code will attempt to read the station.
      // The time stored here is as close as possible to when the
      // report was generated.  For the InterSense 900 in timestamp
      // mode, this value will be overwritten later.
      bufcount = 2;
      //      vrpn_gettimeofday(&timestamp, NULL);
      status = vrpn_TRACKER_AWAITING_STATION;
   }

   //--------------------------------------------------------------------
   // The third character of each report is the station number.  Once
   // we know this, we can compute how long the report should be for the
   // given station, based on what values are in its report.
   // The station number is converted into a VRPN sensor number, where
   // the first Liberty station is '1' and the first VRPN sensor is 0.
   //--------------------------------------------------------------------

   if (status == vrpn_TRACKER_AWAITING_STATION) {

      // Try to get a character.  If none, just return.
      if (vrpn_read_available_characters(serial_fd, &buffer[bufcount], 1) != 1) {
      	return 0;
      }
            if (VRPN_LIBERTY_DEBUG) fprintf(stderr,"[DEBUG]: Awaiting Station - Got Station (%i) \n",buffer[2]);

      d_sensor = buffer[2] - 1;	// Convert ASCII 1 to sensor 0 and so on.
      if ( (d_sensor < 0) || (d_sensor >= num_stations) ) {
	   status = vrpn_TRACKER_SYNCING;
      	   sprintf(errmsg,"Bad sensor # (%d) in record, re-syncing", d_sensor);
	   VRPN_MSG_INFO(errmsg);
	   vrpn_flush_input_buffer(serial_fd);
	   return 0;
      }

      // Figure out how long the current report should be based on the
      // settings for this sensor.
      REPORT_LEN = report_length(d_sensor);

      // Got the station report -- to into PARTIAL mode and record
      // that we got one character at this time.  The next bit of code
      // will attempt to read the rest of the report.
      bufcount++;
      status = vrpn_TRACKER_PARTIAL;
   }
   
   //--------------------------------------------------------------------
   // Read as many bytes of this report as we can, storing them
   // in the buffer.  We keep track of how many have been read so far
   // and only try to read the rest.  The routine that calls this one
   // makes sure we get a full reading often enough (ie, it is responsible
   // for doing the watchdog timing to make sure the tracker hasn't simply
   // stopped sending characters).
   //--------------------------------------------------------------------

   ret = vrpn_read_available_characters(serial_fd, &buffer[bufcount],
		REPORT_LEN-bufcount);
   if (ret == -1) {
	if (VRPN_LIBERTY_DEBUGA) fprintf(stderr,"[DEBUG]: Error Reading Report\n");
	VRPN_MSG_ERROR("Error reading report");
	status = vrpn_TRACKER_FAIL;
	return 0;
   }
   bufcount += ret;
   if (bufcount < REPORT_LEN) {	// Not done -- go back for more
     if (VRPN_LIBERTY_DEBUG)	fprintf(stderr,"[DEBUG]: Don't have full report (%i of %i)\n",bufcount,REPORT_LEN);
	return 0;
 }

   //--------------------------------------------------------------------
   // We now have enough characters to make a full report. Check to make
   // sure that its format matches what we expect. If it does, the next
   // section will parse it. If it does not, we need to go back into
   // synch mode and ignore this report. A well-formed report has the
   // first character '0', the next character is the ASCII station
   // number, and the third character is either a space or a letter.
   //--------------------------------------------------------------------
   //	fprintf(stderr,"[DEBUG]: Got full report\n");

   if (
   ((buffer[0] != 'L') || (buffer[1] != 'Y'))
   && 
   ((buffer[0] != 'P') || (buffer[1] != 'A'))
   && 
   ((buffer[0] != 'L') || (buffer[1] != 'U'))
   ) {
     if (VRPN_LIBERTY_DEBUGA)	fprintf(stderr,"[DEBUG]: Don't have LY or PA or 'LU' at beginning");
	   status = vrpn_TRACKER_SYNCING;
	   VRPN_MSG_INFO("Not 'LY' or 'PA' or 'LU' in record, re-syncing");
	   vrpn_flush_input_buffer(serial_fd);
	   return 0;
   }

   if (buffer[bufcount-1] != ' ') {
	   status = vrpn_TRACKER_SYNCING;
	   VRPN_MSG_INFO("No space character at end of report, re-syncing\n");
	   vrpn_flush_input_buffer(serial_fd);
	   if (VRPN_LIBERTY_DEBUGA) fprintf(stderr,"[DEBUG]: Don't have space at end of report, got (%c) sensor %i\n",buffer[bufcount-1], d_sensor);

	   return 0;
   }

   //Decode the error status and output a debug message
   if (buffer[4] != ' ') {
     // An error has been flagged
     if (VRPN_LIBERTY_DEBUGA) fprintf(stderr,"[DEBUG]:Error Flag %i\n",buffer[4]);
   }

   //--------------------------------------------------------------------
   // Decode the X,Y,Z of the position and the W,X,Y,Z of the quaternion
   // (keeping in mind that we store quaternions as X,Y,Z, W).
   //--------------------------------------------------------------------
   // The reports coming from the Liberty are in little-endian order,
   // which is the opposite of the network-standard byte order that is
   // used by VRPN. Here we swap the order to big-endian so that the
   // routines below can pull out the values in the correct order.
   // This is slightly inefficient on machines that have little-endian
   // order to start with, since it means swapping the values twice, but
   // that is more than outweighed by the cleanliness gained by keeping
   // all architecture-dependent code in the vrpn_Shared.C file.
   //--------------------------------------------------------------------

   // Point at the first value in the buffer (position of the X value)
   bufptr = &buffer[8];

   // When copying the positions, convert from inches to meters, since the
   // Liberty reports in inches and VRPN reports in meters.
   pos[0] = vrpn_unbuffer_from_little_endian<vrpn_float32>(bufptr) * INCHES_TO_METERS;
   pos[1] = vrpn_unbuffer_from_little_endian<vrpn_float32>(bufptr) * INCHES_TO_METERS;
   pos[2] = vrpn_unbuffer_from_little_endian<vrpn_float32>(bufptr) * INCHES_TO_METERS;

   // Change the order of the quaternion fields to match quatlib order
   d_quat[Q_W] = vrpn_unbuffer_from_little_endian<vrpn_float32>(bufptr);
   d_quat[Q_X] = vrpn_unbuffer_from_little_endian<vrpn_float32>(bufptr);
   d_quat[Q_Y] = vrpn_unbuffer_from_little_endian<vrpn_float32>(bufptr);
   d_quat[Q_Z] = vrpn_unbuffer_from_little_endian<vrpn_float32>(bufptr);

   //--------------------------------------------------------------------
   // Decode the time from the Liberty system (unsigned 32bit int), add it to the
   // time we zeroed the tracker, and update the report time.  Remember
   // to convert the MILLIseconds from the report into MICROseconds and
   // seconds.
   //--------------------------------------------------------------------

       struct timeval delta_time;   // Time since the clock was reset

       // Read the integer value of the time from the record.
       vrpn_uint32 read_time = vrpn_unbuffer_from_little_endian<vrpn_uint32>(bufptr);

       // Convert from the float in MILLIseconds to the struct timeval
       delta_time.tv_sec = (long)(read_time / 1000);	// Integer trunction to seconds
       vrpn_uint32 read_time_milliseconds = read_time - delta_time.tv_sec * 1000;	// Subtract out what we just counted
       delta_time.tv_usec = (long)(read_time_milliseconds * 1000);	// Convert remainder to MICROseconds

       // The time that the report was generated
       timestamp = vrpn_TimevalSum(liberty_zerotime, delta_time);
       vrpn_gettimeofday(&watchdog_timestamp, NULL);	// Set watchdog now       
 
   //--------------------------------------------------------------------
   // If this sensor has button on it, decode the button values
   // into the button device and mainloop the button device so that
   // it will report any changes.
   //--------------------------------------------------------------------

   if (stylus_buttons[d_sensor]) {
	   // Read the integer value of the bytton status from the record.
	   vrpn_uint32 button_status = vrpn_unbuffer_from_little_endian<vrpn_uint32>(bufptr);
	   
	   stylus_buttons[d_sensor]->set_button(0, button_status);
	   stylus_buttons[d_sensor]->mainloop();
   }

   //--------------------------------------------------------------------
   // Done with the decoding, set the report to ready
   //--------------------------------------------------------------------

   status = vrpn_TRACKER_SYNCING;
   bufcount = 0;

#ifdef VERBOSE2
      print_latest_report();
#endif

   return 1;
}
Ejemplo n.º 8
0
// virtual
int vrpn_File_Connection::mainloop( const timeval * /*timeout*/ )
{
    // XXX timeout ignored for now, needs to be added

    timeval now_time;
    vrpn_gettimeofday(&now_time, NULL);

    if ((d_last_time.tv_sec == 0) && (d_last_time.tv_usec == 0)) {
        // If first iteration, consider 0 time elapsed
        d_last_time = now_time;
        d_filetime_accum.reset_at_time( now_time );
        return 0;
    }
    
    // now_time:    current wallclock time (on method entry)
    // d_last_time: wallclock time of last call to mainloop
    //              (juliano-8/26/99) NO!  It is the time the
    //              wallclock read (at the top of mainloop) when the
    //              last event was played back from the file.
    //              If you call mainloop frequently enough,
    //              these are not necessarily the same!
    //              (may call mainloop too soon and then no event
    //              is played back from the file)
    // d_time:      current time in file
    // end_time:    computed time in file
    // d_rate:      wallclock -> fileclock rate scale factor
    // goal:        compute end_time, then advance to it
    //
    // scale elapsed time by d_rate (rate of replay);
    // this gives us the time to advance (skip_time)
    // our clock to (next_time).
    // -- see note above!
    //
    //const timeval real_elapsed_time  // amount of ellapsed wallclock time
    //  = vrpn_TimevalDiff( now_time, d_last_time );
    //const timeval skip_time          // scale it by d_rate
    //    = vrpn_TimevalScale( real_elapsed_time, d_rate );
    //const timeval end_time           // add it to the last file-time
    //    = vrpn_TimevalSum( d_time, skip_time );
    //
    // ------ new way of calculating end_time ------------

    d_filetime_accum.accumulate_to( now_time );
    const timeval end_time = vrpn_TimevalSum(
        d_time, d_filetime_accum.accumulated() );
    
    // (winston) Had to add need_to_play() because at fractional rates
    // (even just 1/10th) the d_time didn't accumulate properly
    // because tiny intervals after scaling were too small
    // for a timeval to represent (1us minimum).
    // 
    // (juliano-8/26/99) if ((end_time - timestamp of next event) < 1us)
    // then you have run out of precision in the struct timeval when
    // need_to_play differences those two timevals.  I.e., they 
    // appear to be the same time.
    // need_to_play will return n:n>1 only if this difference
    // is non-zero.
    // 
    // (juliano-8/25/99) need_to_play is not a boolean function!
    // it returns n:n>0 if you need to play
    //            n=0   if the timevals compare equal
    //            n=-1  if there was an error reading the next event
    //                  from the log file
    const int need_to_play_retval = need_to_play(end_time);

    if (need_to_play_retval > 0) {
        d_last_time = now_time;
        d_filetime_accum.reset_at_time( now_time );
        const int rv = play_to_filetime(end_time);
        return rv;
    } else if (need_to_play_retval == 0) {
        // (winston) we don't set d_last_time so that we can more
        // accurately measure the (larger) interval next time around
        //
        // (juliano-8/26/99) sounds right.  Only set d_last_time
        // if you actually played some event from the file.
        // You may get here if you have your data in more than one
        // file, and are trying to play back from the files in lockstep.
        // The tracker group does this to run the hybrid tracking
        // algorithm on both an inertial data file and a hiball
        // tracker file that were recorded with synchronized clocks.
        return 0;
    } else {
	// return something to indicate there was an error
        // reading the file
	return -1;

        // an error occurred while reading the next event from the file
        // let's close the connection.
        // XXX(jj) is this the right thing to do?
        // XXX(jj) for now, let's leave it how it was
        // XXX(jj) come back to this and do it right
/*
        fprintf( stderr, "vrpn_File_Connection::mainloop(): error reading "
                 "next event from file.  Skipping to end of file. "
                 "XXX Please edit this function and fix it.  It should probably"
                 " close the connection right here and now.\n");
        d_last_time = now_time;
        d_filetime_accum.reset_at_time( now_time );
        return play_to_filetime(end_time);
*/
    }
}
Ejemplo n.º 9
0
// This function will get the reports from the intersense dll, then
// put that report into the time, sensor, pos and quat fields, and
// finally call send_report to send it.
void vrpn_Tracker_InterSense::get_report(void)
{
#ifdef  VRPN_INCLUDE_INTERSENSE
  q_vec_type angles;
  ISD_TRACKING_DATA_TYPE data;
  int i;

  if(ISD_GetTrackingData(m_Handle,&data)) {
    for(int station=0;station<ISD_MAX_STATIONS;station++) {
      if(data.Station[station].NewData == TRUE) {

       d_sensor = station;

        //--------------------------------------------------------------------
		// If we are doing IS900 timestamps, decode the time, add it to the
        // time we zeroed the tracker, and update the report time.  Remember
        // to convert the MILLIseconds from the report into MICROseconds and
        // seconds.
        //--------------------------------------------------------------------

        if (do_is900_timestamps) {
          vrpn_float32 read_time = data.Station[station].TimeStamp;

          struct timeval delta_time;   // Time since the clock was reset

          // Convert from the float in MILLIseconds to the struct timeval
          delta_time.tv_sec = (long)(read_time / 1000);	// Integer trunction to seconds
          read_time -= delta_time.tv_sec * 1000;	// Subtract out what we just counted
          delta_time.tv_usec = (long)(read_time * 1000);	// Convert remainder to MICROseconds

          // Store the current time
          timestamp = vrpn_TimevalSum(is900_zerotime, delta_time);
        } else {
	  vrpn_gettimeofday(&timestamp, NULL);	// Set watchdog now
	}

        //--------------------------------------------------------------------
        // If this sensor has an IS900 button on it, decode
        // the button values into the button device and mainloop the button
        // device so that it will report any changes.  Each button is stored
        // in one bit of the byte, with the lowest-numbered button in the
        // lowest bit.
        //--------------------------------------------------------------------

        if (is900_buttons[station]) {
	    for (i = 0; i < is900_buttons[station]->number_of_buttons(); i++) {
	      is900_buttons[station]->set_button(i, data.Station[station].ButtonState[i]);
	    }
            is900_buttons[station]->mainloop();
        }

        //--------------------------------------------------------------------
        // If this sensor has an IS900 analog on it, decode the analog values
        // into the analog device and mainloop the analog device so that it
        // will report any changes.  The first byte holds the unsigned char
        // representation of left/right.  The second holds up/down.  For each,
        // 0 means min (left or rear), 127 means center and 255 means max.
        //--------------------------------------------------------------------

        if (is900_analogs[station]) {
	  // Normalize the values to the range -1 to 1
	  is900_analogs[station]->setChannelValue(0, (data.Station[station].AnalogData[0] - 127) / 128.0);
	  is900_analogs[station]->setChannelValue(1, (data.Station[station].AnalogData[1] - 127) / 128.0);

	  // Report the new values
	  is900_analogs[station]->report_changes();
	  is900_analogs[station]->mainloop();
	}

	// Copy the tracker data into our internal storage before sending
	// (no unit problem as the Position vector is already in meters, see ISD_STATION_STATE_TYPE)
	// Watch: For some reason, to get consistent rotation and translation axis permutations,
	//        we need non direct mapping.
        // RMT: Based on a report from Christian Odom, it seems like the Quaternions in the
        //      Isense are QXYZ, whereas in Quatlib (and VRPN) they are XYZQ.  Once these
        //      are switched correctly, the positions can be read without strange swapping.
	pos[0] = data.Station[station].Position[0];
	pos[1] = data.Station[station].Position[1];
	pos[2] = data.Station[station].Position[2];

	if(m_StationInfo[station].AngleFormat == ISD_QUATERNION) {	
		d_quat[0] = data.Station[station].Quaternion[1];
		d_quat[1] = data.Station[station].Quaternion[2];
		d_quat[2] = data.Station[station].Quaternion[3];
		d_quat[3] = data.Station[station].Quaternion[0];
        } else {
	        // Just return Euler for now...
	        // nahon@virtools needs to convert to radians
		angles[0] = VRPN_DEGREES_TO_RADIANS*data.Station[station].Euler[0];
		angles[1] = VRPN_DEGREES_TO_RADIANS*data.Station[station].Euler[1];
		angles[2] = VRPN_DEGREES_TO_RADIANS*data.Station[station].Euler[2];

		q_from_euler(d_quat, angles[0], angles[1], angles[2]);	
	}

	// have to just send it now
        status = vrpn_TRACKER_REPORT_READY;
//	fprintf(stderr, "sending message len %d\n", len);
	send_report();

	//printf("Isense %f, %f, %f\n",pos[0],pos[1],pos[2]);
	//printf("Isense a:%f, %f, %f : ",angles[0],angles[1],angles[2]); //if the tracker reports a quat, these will be garbage
	//printf("q: %f, %f, %f, %f\n",d_quat[0],d_quat[1],d_quat[2],d_quat[3]);	
      }
    }

  }
#endif
}
Ejemplo n.º 10
0
bool  edt_server::read_image_to_memory(unsigned minX, unsigned maxX,
							unsigned minY, unsigned maxY,
							double exposure_time_millisecs)
{
  u_char *image_p;
  struct timeval now;

  if (!_status) { return false; }

  //---------------------------------------------------------------------
  // XXX Set the exposure time.

  //---------------------------------------------------------------------
  // Set the size of the window to include all pixels if there were not
  // any binning.  This means adding all but 1 of the binning back at
  // the end to cover the pixels that are within that bin.
  _minX = minX * _binning;
  _maxX = maxX * _binning + (_binning-1);
  _minY = minY * _binning;
  _maxY = maxY * _binning + (_binning-1);

  //---------------------------------------------------------------------
  // If the maxes are greater than the mins, set them to the size of
  // the image.
  if (_maxX < _minX) {
    _minX = 0; _maxX = _num_columns - 1;
  }
  if (_maxY < _minY) {
    _minY = 0; _maxY = _num_rows - 1;
  }

  //---------------------------------------------------------------------
  // Clip collection range to the size of the sensor on the camera.
  if (_minX < 0) { _minX = 0; };
  if (_minY < 0) { _minY = 0; };
  if (_maxX >= _num_columns) { _maxX = _num_columns - 1; };
  if (_maxY >= _num_rows) { _maxY = _num_rows - 1; };

  // If the in-memory buffers have all been filled up, assume
  // that we have missed some unknown number of images.  Save this
  // so that it can be reported if we're sending VRPN messages.
  // Actually, we report this if we drop down to 1 buffer because
  // this was where the version running on one of our computers
  // hit the floor at and started losing frames.
  unsigned outstanding = edt_get_todo((PdvDev*)d_pdv_p) -
                         edt_done_count((PdvDev*)d_pdv_p);
  if ( outstanding <= 1 ) {
    d_missed_some_images = true;
  }

  // Once a second, tell how many buffers have been filled and are waiting
  // for us to process.
  static struct timeval last = { 0, 0 };
  vrpn_gettimeofday(&now, NULL);
  if (now.tv_sec > last.tv_sec) {
    last = now;
    //printf("XXX EDT: %d outstanding buffers\n", outstanding );
  }

  /*
   * get the image and immediately start the next one. Processing
   * can then occur in parallel with the next acquisition
   */
  unsigned int sec_usec[2];
  image_p = pdv_wait_image_timed((PdvDev*)d_pdv_p, sec_usec);
  if (image_p == NULL) {
    fprintf(stderr,"edt_server::read_image_to_memory(): Failed to read image\n");
    pdv_close((PdvDev*)d_pdv_p);
    d_pdv_p = NULL;
    _status = false;
    return false;
  }
  pdv_start_image((PdvDev*)d_pdv_p);
  /*XXX
  if (edt_reg_read((PdvDev*)d_pdv_p, PDV_STAT) & PDV_OVERRUN) {
    XXX_overrun_check; // http://www.edt.com/api/simple__take_8c-source.html
  }
  */

  // XXX We might think that we wanted to use the wait_images functions
  // rather than wait_image, but we do seem to fill up all of the buffers
  // when we can't send data out fast enough.  But it may be worth trying
  // to use it and ask for one at a time, then only start more reads when
  // half of them have been read or something?  And do this using a commend
  // to start multiple reads?  Does it re-start every time we call start?
  // If so, what's the point of multiple buffers in the simple_take.c file?
  // Google search for pdv_wait_image_timed turned up pointers to their docs.

  //---------------------------------------------------------------------
  // Time handling: We let the EDT board tell us what time each image
  // was put into the DMA buffer.  We don't know how this compares to
  // the absolute PC clock time, so we record the offset from the first
  // time we got an image (clock reading and EDT reading), so that the
  // time will be reported relative to the computer's clock.
  // If we don't have a nonzero PC time, this is the first time through,
  // so get both initial times.
  struct timeval edt_now = { sec_usec[0], sec_usec[1] };
  if ( d_pc_time_first_image.tv_sec == 0 ) {
    vrpn_gettimeofday(&d_pc_time_first_image, NULL);
    d_edt_time_first_image = edt_now;
  }
  struct timeval time_offset = vrpn_TimevalDiff(d_pc_time_first_image, d_edt_time_first_image);
  d_timestamp = vrpn_TimevalSum( edt_now, time_offset );

  // XXX The EDT-board time handling seems to be broken, so for now we just
  // put on the current time for the frame.
  vrpn_gettimeofday(&now, NULL);
  d_timestamp = now;

  // Check for timeouts in image transer from the camera into a memory
  // buffer.  This does NOT tell us when we ask for more images than will
  // fit into buffers, but rather when there was a communication error or
  // the system bus was too slow to pull the image off the camera.  We'll
  // need to look at timestamps to determine whether we missed any images
  // due to not having room in the buffers we allocated.

  unsigned timeouts = pdv_timeouts((PdvDev*)d_pdv_p);
  if (timeouts > d_last_timeouts)
  {
      /*
       * pdv_timeout_cleanup helps recover gracefully after a timeout,
       * particularly if multiple buffers were prestarted
       */
      if (d_num_buffers > 1)
      {
	  int     pending = pdv_timeout_cleanup((PdvDev*)d_pdv_p);
	  pdv_start_images((PdvDev*)d_pdv_p, pending);
      }
      d_unreported_timeouts += (timeouts - d_last_timeouts);
      d_last_timeouts = timeouts;
  }

  // If we're supposed to swap every other line, do that here.
  // The image lines are swapped in place.
  if (d_swap_lines) {
    unsigned j;    // Indexes into the lines, skipping every other.

    for (j = 0; j < _num_rows; j += 2) {
      memcpy(d_swap_buffer, image_p + j*_num_columns, _num_columns);
      memcpy(image_p + (j+1)*_num_columns, image_p + j*_num_columns, _num_columns);
      memcpy(image_p + (j+1)*_num_columns, d_swap_buffer, _num_columns);
    }
  }

  // Point to the image in memory.
  d_buffer = image_p;

  return true;
}
Ejemplo n.º 11
0
int vrpn_Tracker_DeadReckoning_Rotation::test(void)
{
    // Construct a loopback connection to be used by all of our objects.
    vrpn_Connection *c = vrpn_create_server_connection("loopback:");

    // Create a tracker server to be the initator and a dead-reckoning
    // rotation tracker to use it as a base; have it predict 1 second
    // into the future.
    vrpn_Tracker_Server *t0 = new vrpn_Tracker_Server("Tracker0", c, 2);
    vrpn_Tracker_DeadReckoning_Rotation *t1 =
        new vrpn_Tracker_DeadReckoning_Rotation("Tracker1", c, "*Tracker0", 2, 1);

    // Create a remote tracker to listen to t1 and set up its callbacks for
    // position and velocity reports.  They will fill in the static structures
    // listed above with whatever values they receive.
    vrpn_Tracker_Remote *tr = new vrpn_Tracker_Remote("Tracker1", c);
    tr->register_change_handler(&poseResponse, handle_test_tracker_report);
    tr->register_change_handler(&velResponse, handle_test_tracker_velocity_report);

    // Set up the values in the pose and velocity responses with incorrect values
    // so that things will fail if the class does not perform as expected.
    q_vec_set(poseResponse.pos, 1, 1, 1);
    q_make(poseResponse.quat, 0, 0, 1, 0);
    poseResponse.time.tv_sec = 0;
    poseResponse.time.tv_usec = 0;
    poseResponse.sensor = -1;

    // Send a pose report from sensors 0 and 1 on the original tracker that places
    // them at (1,1,1) and with the identity rotation.  We should get a response for
    // each of them so we should end up with the sensor-1 response matching what we
    // sent, with no change in position or orientation.
    q_vec_type pos = { 1, 1, 1 };
    q_type quat = { 0, 0, 0, 1 };
    struct timeval firstSent;
    vrpn_gettimeofday(&firstSent, NULL);
    t0->report_pose(0, firstSent, pos, quat);
    t0->report_pose(1, firstSent, pos, quat);

    t0->mainloop();
    t1->mainloop();
    c->mainloop();
    tr->mainloop();

    if ( (poseResponse.time.tv_sec != firstSent.tv_sec + 1)
        || (poseResponse.sensor != 1)
        || (q_vec_distance(poseResponse.pos, pos) > 1e-10)
        || (poseResponse.quat[Q_W] != 1)
       )
    {
        std::cerr << "vrpn_Tracker_DeadReckoning_Rotation::test(): Got unexpected"
            << " initial response: pos (" << poseResponse.pos[Q_X] << ", "
            << poseResponse.pos[Q_Y] << ", " << poseResponse.pos[Q_Z] << "), quat ("
            << poseResponse.quat[Q_X] << ", " << poseResponse.quat[Q_Y] << ", "
            << poseResponse.quat[Q_Z] << ", " << poseResponse.quat[Q_W] << ")"
            << " from sensor " << poseResponse.sensor
            << " at time " << poseResponse.time.tv_sec << ":" << poseResponse.time.tv_usec
            << std::endl;
        delete tr;
        delete t1;
        delete t0;
        c->removeReference();
        return 1;
    }
    
    // Send a second tracker report for sensor 0 coming 0.4 seconds later that has
    // translated to position (2,1,1) and rotated by 0.4 * 90 degrees around Z.
    // This should cause a prediction for one second later than this new pose
    // message that has rotated by very close to 1.4 * 90 degrees.
    q_vec_type pos2 = { 2, 1, 1 };
    q_type quat2;
    double angle2 = 0.4 * 90 * M_PI / 180.0;
    q_from_axis_angle(quat2, 0, 0, 1, angle2);
    struct timeval p4Second = { 0, 400000 };
    struct timeval firstPlusP4 = vrpn_TimevalSum(firstSent, p4Second);
    t0->report_pose(0, firstPlusP4, pos2, quat2);

    t0->mainloop();
    t1->mainloop();
    c->mainloop();
    tr->mainloop();

    double x, y, z, angle;
    q_to_axis_angle(&x, &y, &z, &angle, poseResponse.quat);
    if ((poseResponse.time.tv_sec != firstPlusP4.tv_sec + 1)
        || (poseResponse.sensor != 0)
        || (q_vec_distance(poseResponse.pos, pos2) > 1e-10)
        || !isClose(x, 0) || !isClose(y, 0) || !isClose(z, 1)
        || !isClose(angle, 1.4 * 90 * M_PI / 180.0)
       )
    {
        std::cerr << "vrpn_Tracker_DeadReckoning_Rotation::test(): Got unexpected"
            << " predicted pose response: pos (" << poseResponse.pos[Q_X] << ", "
            << poseResponse.pos[Q_Y] << ", " << poseResponse.pos[Q_Z] << "), quat ("
            << poseResponse.quat[Q_X] << ", " << poseResponse.quat[Q_Y] << ", "
            << poseResponse.quat[Q_Z] << ", " << poseResponse.quat[Q_W] << ")"
            << " from sensor " << poseResponse.sensor
            << std::endl;
        delete tr;
        delete t1;
        delete t0;
        c->removeReference();
        return 2;
    }

    // Send a velocity report for sensor 1 that has has translation by (1,0,0)
    // and rotating by 0.4 * 90 degrees per 0.4 of a second around Z.
    // This should cause a prediction for one second later than the first
    // report that has rotated by very close to 90 degrees.  The translation
    // should be ignored, so the position should be the original position.
    q_vec_type vel = { 0, 0, 0 };
    t0->report_pose_velocity(1, firstPlusP4, vel, quat2, 0.4);

    t0->mainloop();
    t1->mainloop();
    c->mainloop();
    tr->mainloop();

    q_to_axis_angle(&x, &y, &z, &angle, poseResponse.quat);
    if ((poseResponse.time.tv_sec != firstSent.tv_sec + 1)
        || (poseResponse.sensor != 1)
        || (q_vec_distance(poseResponse.pos, pos) > 1e-10)
        || !isClose(x, 0) || !isClose(y, 0) || !isClose(z, 1)
        || !isClose(angle, 90 * M_PI / 180.0)
        )
    {
        std::cerr << "vrpn_Tracker_DeadReckoning_Rotation::test(): Got unexpected"
            << " predicted velocity response: pos (" << poseResponse.pos[Q_X] << ", "
            << poseResponse.pos[Q_Y] << ", " << poseResponse.pos[Q_Z] << "), quat ("
            << poseResponse.quat[Q_X] << ", " << poseResponse.quat[Q_Y] << ", "
            << poseResponse.quat[Q_Z] << ", " << poseResponse.quat[Q_W] << ")"
            << " from sensor " << poseResponse.sensor
            << std::endl;
        delete tr;
        delete t1;
        delete t0;
        c->removeReference();
        return 3;
    }

    // To test the behavior of the prediction code when we're moving around more
    // than one axis, and when we're starting from a non-identity orientation,
    // set sensor 1 to be rotated 180 degrees around X.  Then send a velocity
    // report that will produce a rotation of 180 degrees around Z.  The result
    // should match a prediction of 180 degrees around Y (plus or minus 180, plus
    // or minus Y axis are all equivalent).
    struct timeval oneSecond = { 1, 0 };
    struct timeval firstPlusOne = vrpn_TimevalSum(firstSent, oneSecond);
    q_type quat3;
    q_from_axis_angle(quat3, 1, 0, 0, M_PI);
    t0->report_pose(1, firstPlusOne, pos, quat3);
    q_type quat4;
    q_from_axis_angle(quat4, 0, 0, 1, M_PI);
    t0->report_pose_velocity(1, firstPlusOne, vel, quat4, 1.0);

    t0->mainloop();
    t1->mainloop();
    c->mainloop();
    tr->mainloop();

    q_to_axis_angle(&x, &y, &z, &angle, poseResponse.quat);
    if ((poseResponse.time.tv_sec != firstPlusOne.tv_sec + 1)
        || (poseResponse.sensor != 1)
        || (q_vec_distance(poseResponse.pos, pos) > 1e-10)
        || !isClose(x, 0) || !isClose(fabs(y), 1) || !isClose(z, 0)
        || !isClose(fabs(angle), M_PI)
        )
    {
        std::cerr << "vrpn_Tracker_DeadReckoning_Rotation::test(): Got unexpected"
            << " predicted pose + velocity response: pos (" << poseResponse.pos[Q_X] << ", "
            << poseResponse.pos[Q_Y] << ", " << poseResponse.pos[Q_Z] << "), quat ("
            << poseResponse.quat[Q_X] << ", " << poseResponse.quat[Q_Y] << ", "
            << poseResponse.quat[Q_Z] << ", " << poseResponse.quat[Q_W] << ")"
            << " from sensor " << poseResponse.sensor
            << std::endl;
        delete tr;
        delete t1;
        delete t0;
        c->removeReference();
        return 4;
    }

    // To test the behavior of the prediction code when we're moving around more
    // than one axis, and when we're starting from a non-identity orientation,
    // set sensor 0 to start out at identity.  Then in one second it will be
    // rotated 180 degrees around X.  Then in another second it will be rotated
    // additionally by 90 degrees around Z; the prediction should be another
    // 90 degrees around Z, which should turn out to compose to +/-180 degrees
    // around +/- Y, as in the velocity case above.
    // To make this work, we send a sequence of three poses, one second apart,
    // starting at the original time.  We do this on sensor 0, which has never
    // had a velocity report, so that it will be using the pose-only prediction.
    struct timeval firstPlusTwo = vrpn_TimevalSum(firstPlusOne, oneSecond);
    q_from_axis_angle(quat3, 1, 0, 0, M_PI);
    t0->report_pose(0, firstSent, pos, quat);
    t0->report_pose(0, firstPlusOne, pos, quat3);
    q_from_axis_angle(quat4, 0, 0, 1, M_PI / 2);
    q_type quat5;
    q_mult(quat5, quat4, quat3);
    t0->report_pose(0, firstPlusTwo, pos, quat5);

    t0->mainloop();
    t1->mainloop();
    c->mainloop();
    tr->mainloop();

    q_to_axis_angle(&x, &y, &z, &angle, poseResponse.quat);
    if ((poseResponse.time.tv_sec != firstPlusTwo.tv_sec + 1)
        || (poseResponse.sensor != 0)
        || (q_vec_distance(poseResponse.pos, pos) > 1e-10)
        || !isClose(x, 0) || !isClose(fabs(y), 1.0) || !isClose(z, 0)
        || !isClose(fabs(angle), M_PI)
        )
    {
        std::cerr << "vrpn_Tracker_DeadReckoning_Rotation::test(): Got unexpected"
            << " predicted pose + pose response: pos (" << poseResponse.pos[Q_X] << ", "
            << poseResponse.pos[Q_Y] << ", " << poseResponse.pos[Q_Z] << "), quat ("
            << poseResponse.quat[Q_X] << ", " << poseResponse.quat[Q_Y] << ", "
            << poseResponse.quat[Q_Z] << ", " << poseResponse.quat[Q_W] << ")"
            << " from sensor " << poseResponse.sensor
            << "; axis = (" << x << ", " << y << ", " << z << "), angle = "
            << angle
            << std::endl;
        delete tr;
        delete t1;
        delete t0;
        c->removeReference();
        return 5;
    }

    // Done; delete our objects and return 0 to indicate that
    // everything worked.
    delete tr;
    delete t1;
    delete t0;
    c->removeReference();
    return 0;
}
Ejemplo n.º 12
0
void vrpn_Tracker_DeadReckoning_Rotation::sendNewPrediction(vrpn_int32 sensor)
{
    //========================================================================
    // Figure out which rotation state we're supposed to use.
    if (sensor >= d_numSensors) {
        send_text_message(vrpn_TEXT_WARNING)
            << "sendNewPrediction: Asked for sensor " << sensor
            << " but I only have " << d_numSensors
            << "sensors.  Discarding.";
        return;
    }
    vrpn_Tracker_DeadReckoning_Rotation::RotationState &state =
        d_rotationStates[sensor];

    //========================================================================
    // If we haven't had a tracker report yet, nothing to send.
    if (state.d_lastReportTime.tv_sec == 0) {
        return;
    }

    //========================================================================
    // If we don't have permission to estimate velocity and haven't gotten it
    // either, then we just pass along the report.
    if (!state.d_receivedAngularVelocityReport && !d_estimateVelocity) {
        report_pose(sensor, state.d_lastReportTime, state.d_lastPosition,
                    state.d_lastOrientation);
        return;
    }

    //========================================================================
    // Estimate the future orientation based on the current angular velocity
    // estimate and the last reported orientation.  Predict it into the future
    // the amount we've been asked to.

    // Start with the previous orientation.
    q_type newOrientation;
    q_copy(newOrientation, state.d_lastOrientation);

    // Rotate it by the amount to rotate once for every integral multiple
    // of the rotation time we've been asked to go.
    double remaining = d_predictionTime;
    while (remaining > state.d_rotationInterval) {
        q_mult(newOrientation, state.d_rotationAmount, newOrientation);
        remaining -= state.d_rotationInterval;
    }

    // Then rotate it by the remaining fractional amount.
    double fractionTime = remaining / state.d_rotationInterval;
    q_type identity = { 0, 0, 0, 1 };
    q_type fractionRotation;
    q_slerp(fractionRotation, identity, state.d_rotationAmount, fractionTime);
    q_mult(newOrientation, fractionRotation, newOrientation);

    //========================================================================
    // Find out the future time for which we will be predicting by adding the
    // prediction interval to our last report time.
    struct timeval future_time;
    struct timeval delta;
    delta.tv_sec = static_cast<unsigned long>(d_predictionTime);
    double remainder = d_predictionTime - delta.tv_sec;
    delta.tv_usec = static_cast<unsigned long>(remainder * 1e6);
    future_time = vrpn_TimevalSum(delta, state.d_lastReportTime);

    //========================================================================
    // Pack our predicted tracker report for this future time.
    // Use the position portion of the report unaltered.
    if (0 != report_pose(sensor, future_time, state.d_lastPosition, newOrientation)) {
      fprintf(stderr, "vrpn_Tracker_DeadReckoning_Rotation::sendNewPrediction(): Can't report pose\n");
    }
}
// virtual
int vrpn_RedundantTransmission::pack_message(
    vrpn_uint32 len, timeval time, vrpn_uint32 type, vrpn_uint32 sender,
    const char *buffer, vrpn_uint32 class_of_service,
    vrpn_int32 numTransmissions, timeval *transmissionInterval)
{
    queuedMessage *qm;
    int ret;
    int i;

    if (!d_connection) {
        fprintf(stderr, "vrpn_RedundantTransmission::pack_message:  "
                        "Connection not defined!\n");
        return -1;
    }

    if (!d_isEnabled) {
        return d_connection->pack_message(len, time, type, sender, buffer,
                                          class_of_service);
    }

    ret = d_connection->pack_message(len, time, type, sender, buffer,
                                     vrpn_CONNECTION_LOW_LATENCY);

    // TODO:  check ret

    // use defaults?
    if (numTransmissions < 0) {
        numTransmissions = d_numTransmissions;
    }
    if (!transmissionInterval) {
        transmissionInterval = &d_transmissionInterval;
    }

    // fprintf(stderr, "In pack message with %d xmit at %d.%d\n",
    // numTransmissions, transmissionInterval->tv_sec,
    // transmissionInterval->tv_usec);

    if (!numTransmissions) {
        return ret;
    }

    // Special case - if transmissionInterval is 0, we send them all right
    // away, but force VRPN to use separate network packets.
    if (!transmissionInterval->tv_sec && !transmissionInterval->tv_usec) {
        for (i = 0; i < numTransmissions; i++) {
            d_connection->send_pending_reports();
            ret = d_connection->pack_message(len, time, type, sender, buffer,
                                             vrpn_CONNECTION_LOW_LATENCY);
            // TODO:  check ret
        }
        d_connection->send_pending_reports();
        return 0;
    }

    qm = new queuedMessage;
    if (!qm) {
        fprintf(stderr,
                "vrpn_RedundantTransmission::pack_message:  "
                "Out of memory;  can't queue message for retransmission.\n");
        return ret;
    }

    qm->p.payload_len = len;
    qm->p.msg_time = time;
    qm->p.type = type;
    qm->p.sender = sender;
    qm->p.buffer = new char[len];
    if (!qm->p.buffer) {
        fprintf(stderr,
                "vrpn_RedundantTransmission::pack_message:  "
                "Out of memory;  can't queue message for retransmission.\n");
        return ret;
    }
    memcpy((char *)qm->p.buffer, buffer, len);

    qm->remainingTransmissions = numTransmissions;
    qm->transmissionInterval = *transmissionInterval;
    qm->nextValidTime = vrpn_TimevalSum(time, *transmissionInterval);
    qm->next = d_messageList;

    d_numMessagesQueued++;

    // timeval now;
    // vrpn_gettimeofday(&now, NULL);
    // fprintf(stderr, "  Queued message to go at %d.%d (now is %d.%d)\n",
    // qm->nextValidTime.tv_sec, qm->nextValidTime.tv_usec,
    // now.tv_sec, now.tv_usec);

    d_messageList = qm;

    return ret;
}