예제 #1
0
/*
  main routine

  Program entry point
*/
int main(void)
{
  Uint8   TxBuf[BCMsgSize];
  Uint8   RxBuf[BCMsgSize];
  Uint8   Address;            // Address of device in bay
  Uint8   Destination;        // Address we will reply to
  Uint8   Param,Param2;
  Uint8   Volume = 0;
  Uint8   TempInt;
#ifdef CTRL_LED
  Uint8   TempInt;
  Uint8   RceiveAddress;
  Uint8   i;
  i = 0;
#endif
  MainInit();

  UART_TxStr("\r\nPower up\r\n");

  UART_Rx(RxData, 9);             // Input register setting command

  Address = BCAOutput + ReadPosition();
  BCMessageInit(Address);         // Set up the UART
  BCMessageReceive(RxBuf);        // Kick off receive

  if(ReadProductID()) // 1:pill 2:beats box 3:rave
    ChannelNumbers = 1;
  else
    ChannelNumbers = 5;//0:5 position headphone
  // Enter the main loop
  LED1_ON();
  LED2_ON();
  LED_GRAPHICAL_ON();
  SetLamps(3);
  for( ; ; ) {                              // Run forever
    Timer_Clear();
    DelayMS(LoopRate);
    TempInt++;
    SettingsControl();
    if (BCRXAvail)
    {                        // We have a new message
      //send data
#ifdef  SecondUART
#ifdef DumpComms
      UART_TxStr("Receive: ");
      for (TempInt = BCPSOH; TempInt <= BCPChecksum; TempInt++)
      {
        UART_TxUint8(RxBuf[TempInt]);
        UART_TxChar(' ');
      }
      UART_TxStr("\r\n");
#endif
#endif

      if ((RxBuf[BCPAddr] & 0b1111) == Address) // Check it is for us
      {
        Destination = RxBuf[BCPAddr] >> 4;  // Pre-setup assuming we will reply
        Destination &= 0b1111;
        Destination |= Address << 4;
        TxBuf[BCPAddr] = Destination;
        DelayMS(2);                         // Allow line turn around delay
        switch (RxBuf[BCPType])
        {
          case BCTInquire:                  // Master request of slave ID
            TxBuf[BCPType] = BCTInquireAnswer;
            TxBuf[BCPParam1] = ReadProductID();
            TxBuf[BCPParam2] = 0;
            BCMessageSend(TxBuf,true);      // Send the reply
            break;
          case BCTLamps: // Set lamps
            TxBuf[BCPType]   = BCTAck;
            TxBuf[BCPParam1] = 0;
            TxBuf[BCPParam2] = 0;
            BCMessageSend(TxBuf, true);           // Send the reply
            Param =  RxBuf[BCPParam1];
            SetLamps(Param);
            break;

          case BCTVolume:                   // Volume set
            TxBuf[BCPType] = BCTAck;
            TxBuf[BCPParam1] = 0;
            TxBuf[BCPParam2] = 0;
            BCMessageSend(TxBuf,true);      // Send the reply
            Volume = RxBuf[BCPParam1];       // Save parameter so we can receive next frame while processing this request
            Param2 = RxBuf[BCPParam2];       // Save parameter so we can receive next frame while processing this request
            Volume_Set(Volume,Param2);
            break;

          case BCTHeadphoneChGain:                   // Volume set
            TxBuf[BCPType] = BCTAck;
            TxBuf[BCPParam1] = 0;
            TxBuf[BCPParam2] = 0;
            BCMessageSend(TxBuf,true);      // Send the reply
            Param = RxBuf[BCPParam1];       // Save parameter so we can receive next frame while processing this request
            Param2 = RxBuf[BCPParam2];       // Save parameter so we can receive next frame while processing this request
            SetChannelAdjust(Param,Param2);
            break;

          case BCTHeadphoneChMax:                   // Volume set
            TxBuf[BCPType] = BCTAck;
            TxBuf[BCPParam1] = 0;
            TxBuf[BCPParam2] = 0;
            BCMessageSend(TxBuf,true);      // Send the reply
            Param = RxBuf[BCPParam1];       // Save parameter so we can receive next frame while processing this request
            Param2 = RxBuf[BCPParam2];       // Save parameter so we can receive next frame while processing this request
            SetChMaxVolume(Param, Param2);
            break;

          case BCTAudioFormat:              // Audio format set
            TxBuf[BCPType] = BCTAck;
            TxBuf[BCPParam1] = 0;
            TxBuf[BCPParam2] = 0;
            BCMessageSend(TxBuf,true);      // Send the reply
            Param = RxBuf[BCPParam1];       // Save parameter so we can receive next frame while processing this request
            WM8960_SetAudioFormat(Param);
            break;


          case BCTBrightness: // Set lamp brightness
            TxBuf[BCPType] = BCTAck;
            TxBuf[BCPParam1] = 0;
            TxBuf[BCPParam2] = 0;
            BCMessageSend(TxBuf, true);           // Send the reply
            Param = RxBuf[BCPParam1];       // Save parameter so we can receive next frame while processing this request
            Param2 = RxBuf[BCPParam2];       // Save parameter so we can receive next frame while processing this request
            if (Param == 1)
              LED1 = Param2;
            else if(Param == 2)
              LED2 = Param2;
            break;

          case BCTReset:                   // Volume set
            TxBuf[BCPType] = BCTAck;
            TxBuf[BCPParam1] = 0;
            TxBuf[BCPParam2] = 0;
            BCMessageSend(TxBuf,true);      // Send the reply
            Param = RxBuf[BCPParam1];       // Save parameter so we can receive next frame while processing this request
            BCMessageReceive(RxBuf);        // Kick off receive of next frame
            asm("jmp 0x0000");//reset
            break;

          default:  // Unknown command
            TxBuf[BCPType] = BCTNAck;
            TxBuf[BCPParam1] = BCNUnkownType;
            TxBuf[BCPParam2] = RxBuf[BCPType];
            BCMessageSend(TxBuf,true);      // Send the reply
           // BCMessageReceive(RxBuf);        // Kick off receive of next frame
            break;
        }

        //this send command tv change to 031-517-209 board
#if VideoTrackCtrl
        Uint8   VideoTrack;
        Uint8   SendVideoFlag;
        if(Volume > 47)
        {
          if(SendVideoFlag)
          {
            SendVideoFlag = false;

            if(Address == BCARightBay)
            {
              VideoTrack = 1;
            }
            else if(Address == BCALeftBay)
            {
              VideoTrack = 2;
            }

            TxBuf[BCPAddr] = Address << 4;
            TxBuf[BCPAddr] |= BDCLCD;
            TxBuf[BCPType]   = BCTPlayTrack;
            TxBuf[BCPParam1] = VideoTrack;
            TxBuf[BCPParam2] = 0;
            BCMessageSend(TxBuf, true);           // Send the reply
          }
        }
        else
        {
          SendVideoFlag = 1;
        }
#endif
      }


      //this for ctrl LED it itself
#ifdef CTRL_LED
      if(RxBuf[BCPType] == BCTVolume)
      {
        RceiveAddress = RxBuf[BCPAddr] & 0b1111;
        a_Volume[RceiveAddress-BCAOutput] = RxBuf[BCPParam1];

        if((a_Volume[RceiveAddress-BCAOutput] > 47) && (LastVolume[RceiveAddress-BCAOutput] > 47))//have volume
        {
          if((RceiveAddress) == Address)
          {
            LED1_ON();
          }
          else
          {
            LED1_OFF();
          }
        }

        i = 0;
        for(TempInt = 0; TempInt < 4; ) //all  volume off
        {
          if((a_Volume[TempInt] <= 47) && (LastVolume[TempInt] <= 47))
            i++;
          TempInt++;
        }

        if(i >= TempInt)
        {
          LED1_ON();
        }

        LastVolume[RceiveAddress-BCAOutput] = a_Volume[RceiveAddress-BCAOutput];
      }
#endif
      BCMessageReceive(RxBuf);        // Kick off receive of next frame
    }

  }
예제 #2
0
/*
  main routine

  Program entry point
*/
int main(void)
{
  Uint8   TxBuf[BCMsgSize];
  Uint8   RxBuf[BCMsgSize];
  Uint8   Address;            // Address of device in bay
  Uint8   Destination;        // Address we will reply to
  Uint8   Param,Param2;


  MainInit();

  UART_TxStr("\r\nPower up\r\n");

  UART_Rx(RxData, 9);             // Input register setting command

  Address = BCAOutput + ReadPosition();
  BCMessageInit(Address);         // Set up the UART
  BCMessageReceive(RxBuf);        // Kick off receive

  // Enter the main loop

  for( ; ; ) {                              // Run forever
    DelayMS(LoopRate);
    SettingsControl();

    if (BCRXAvail) {                        // We have a new message
      if ((RxBuf[BCPAddr] & 0b1111) == Address)  { // Check it is for us
        Destination = RxBuf[BCPAddr] >> 4;  // Pre-setup assuming we will reply
        Destination &= 0b1111;
        Destination |= Address << 4;
        TxBuf[BCPAddr] = Destination;
        DelayMS(2);                         // Allow line turn around delay
        switch (RxBuf[BCPType]) {
          case BCTInquire:                  // Master request of slave ID
            TxBuf[BCPType] = BCTInquireAnswer;
            TxBuf[BCPParam1] = ReadProductID();
            TxBuf[BCPParam2] = 0;
            BCMessageSend(TxBuf,true);      // Send the reply
            BCMessageReceive(RxBuf);        // Kick off receive of next frame
            break;

          case BCTVolume:                   // Volume set
            TxBuf[BCPType] = BCTAck;
            TxBuf[BCPParam1] = 0;
            TxBuf[BCPParam2] = 0;
            BCMessageSend(TxBuf,true);      // Send the reply
            Param = RxBuf[BCPParam1];       // Save parameter so we can receive next frame while processing this request
            BCMessageReceive(RxBuf);        // Kick off receive of next frame
//            Timer_Clear();
            Volume_Set(Param);
//            UART_TxStr("Volume_Set took ");
//            UART_TxNum(Timer_Read());
//            UART_TxStr("mS\r\n");
            break;

          case BCTHeadphoneChGain:                   // Volume set
            TxBuf[BCPType] = BCTAck;
            TxBuf[BCPParam1] = 0;
            TxBuf[BCPParam2] = 0;
            BCMessageSend(TxBuf,true);      // Send the reply
            Param = RxBuf[BCPParam1];       // Save parameter so we can receive next frame while processing this request
            Param2 = RxBuf[BCPParam2];       // Save parameter so we can receive next frame while processing this request
            BCMessageReceive(RxBuf);        // Kick off receive of next frame
//            Timer_Clear();
            SetChannelAdjust(Param,Param2);
//            UART_TxStr("Volume_Set took ");
//            UART_TxNum(Timer_Read());
//            UART_TxStr("mS\r\n");
            break;

          case BCTHeadphoneChMax:                   // Volume set
            TxBuf[BCPType] = BCTAck;
            TxBuf[BCPParam1] = 0;
            TxBuf[BCPParam2] = 0;
            BCMessageSend(TxBuf,true);      // Send the reply
            Param = RxBuf[BCPParam1];       // Save parameter so we can receive next frame while processing this request
            Param2 = RxBuf[BCPParam2];       // Save parameter so we can receive next frame while processing this request
            BCMessageReceive(RxBuf);        // Kick off receive of next frame
            SetChMaxVolume(Param, Param2);
            break;

          case BCTAudioFormat:              // Audio format set
            TxBuf[BCPType] = BCTAck;
            TxBuf[BCPParam1] = 0;
            TxBuf[BCPParam2] = 0;
            BCMessageSend(TxBuf,true);      // Send the reply
            Param = RxBuf[BCPParam1];       // Save parameter so we can receive next frame while processing this request
            BCMessageReceive(RxBuf);        // Kick off receive of next frame
//            Timer_Clear();
            WM8960_SetAudioFormat(Param);
//            UART_TxStr("WM8960_SetAudioFormat took ");
//            UART_TxNum(Timer_Read());
//            UART_TxStr("mS\r\n");
            break;

          default:  // Unknown command
            TxBuf[BCPType] = BCTNAck;
            TxBuf[BCPParam1] = BCNUnkownType;
            TxBuf[BCPParam2] = RxBuf[BCPType];
            BCMessageSend(TxBuf,true);      // Send the reply
            BCMessageReceive(RxBuf);        // Kick off receive of next frame
            break;
        }
      }
    }
  }
예제 #3
0
/* YAPOCClient main function */
void main(){

  Initialize();

  
  //wait until at least one client is connected to output port 
  while (outCommandPort.IsReceiving() <= 0){
    YARPTime::DelayInSeconds(1);
  }
  

  //change velocity
  //note that this can also be done by changing default velocities in the 
  //configuration file
  short axes[] = {NECK_TILT,LEFTEYE_PAN,RIGHTEYE_PAN,EYE_TILT,NECK_PAN, HEAD_ROLL, HEAD_TILT, NECK_CRANE};
  float newvel[8] = {0.08, 0.5, 0.5, 0.3, 0.3, 0.2, 0.2, 0.5};
  ChangeVelocity(8,axes,newvel);

  //time variables for regulating movement frequency
  time_t curtime;
  time(&curtime);
  time_t lastmove_neckpan = curtime;
  time_t lastmove_headtilt = curtime;
  time_t lastmove_headroll = curtime;
  time_t lastmove_expression = curtime;
  time_t lastmove_drive = curtime;
  time_t lastmove_neckcrane = curtime;
  time_t lastmove_basepan = curtime;
  time_t lastmove_lowerlid = curtime;
  

  //convert pixel distance to pan/tilt radians
  float pan_radians_per_pixel = -0.5f * radians_per_degree;
  float tilt_radians_per_pixel = -0.5f * radians_per_degree;
 
  float posbuffer[N_AXIS];

  PositionCommand eye_com, neck_pan_com, base_pan_com, head_tilt_com, head_roll_com, neck_crane_com;
  
  int first = 1;
  int current_emotion = CALM; 
  int modify_emotion = 0;
  int current_drive;

  //initially randomly choose current drive, 50% SOCIAL 50% PLAY
  if (rand() > 16383)
    current_drive = SOCIAL;
  else
    current_drive = PLAY;
  
  printf("current drive is %d\n",current_drive);
  
  
  FiveBoxesInARow target_leftboxes, target_rightboxes;
  int total_pixels, total_sal, last_total_sal = 0, sum_delta_sal = 0, delta_iter = 0;
  
  //continuous loop
  while (1){

    //read position from encoder
    ReadPosition(posbuffer);
    if(DEBUG) printf("Read position:");
    for (int j = 0; j < N_MEIAXES; j++)
      if(DEBUG) printf(" %f ",posbuffer[j]);
    if(DEBUG) printf("\n");
    
    //read target pos, produced by vision modules
    in_pos.Read();
    StereoPosData& target_pos = in_pos.Content();
    if(DEBUG) printf("The position as of this time is %4g,%4g  %4g,%4g, %d :",
	     target_pos.xl, target_pos.yl,
	     target_pos.xr, target_pos.yr, target_pos.valid);
    
    
    modify_emotion = 0;

    //if current drive is SOCIAL, read boxes from skin detectors
    if (current_drive == SOCIAL){
      in_leftboxes.Read();
      in_rightboxes.Read();
    }
    else {
      //if current drive is PLAY, read boxes from color detectors
      in_attnboxes.Read();
    }
    
    if (current_drive == SOCIAL){
      target_leftboxes = in_leftboxes.Content();
      target_rightboxes = in_rightboxes.Content();
      
      //calculate total pixels from all detected boxes
      total_pixels = target_leftboxes.box1.total_pixels+target_leftboxes.box2.total_pixels+
	target_leftboxes.box3.total_pixels+target_leftboxes.box4.total_pixels+target_leftboxes.box5.total_pixels
	+ target_rightboxes.box1.total_pixels+target_rightboxes.box2.total_pixels+
	target_rightboxes.box3.total_pixels+target_rightboxes.box4.total_pixels+target_rightboxes.box5.total_pixels;
      total_sal = target_leftboxes.box1.total_sal+target_leftboxes.box2.total_sal+
	target_leftboxes.box3.total_sal+target_leftboxes.box4.total_sal+target_leftboxes.box5.total_sal
	+ target_rightboxes.box1.total_sal+target_rightboxes.box2.total_sal+
	target_rightboxes.box3.total_sal+target_rightboxes.box4.total_sal+target_rightboxes.box5.total_sal;
      
      printf("Total pixels = %d  saliency = %d \n",total_pixels, total_sal);
      
    }
    else {
      target_leftboxes = in_attnboxes.Content();
      
      //calculate total pixels from all detected boxes
       total_pixels = target_leftboxes.box1.total_pixels+target_leftboxes.box2.total_pixels+
	      target_leftboxes.box3.total_pixels+target_leftboxes.box4.total_pixels+target_leftboxes.box5.total_pixels;

       total_sal = target_leftboxes.box1.total_sal+target_leftboxes.box2.total_sal+
	        target_leftboxes.box3.total_sal+target_leftboxes.box4.total_sal+target_leftboxes.box5.total_sal;
      
       printf("Total pixels = %d  saliency = %d \n",total_pixels, total_sal);
      
    }
    
    double delta_pan_pixels;
    double delta_tilt_pixels;
    
    //translate target pos to pan and tilt delta pixels
    delta_pan_pixels = target_pos.xl - 63;
    delta_tilt_pixels = target_pos.yl - 63;

    if(DEBUG)
      printf("%4g,%4g\n", delta_pan_pixels, delta_tilt_pixels);
    
    //at startup, generate CALM expression
    if (first == 1){
      FaceExpression(CALM_pos,posbuffer);
      first = 0;
    }

    //move eye pan and tilt motors according to target pos
    eye_com.ClearAll();
    float new_eye_pos_pan;
    new_eye_pos_pan = posbuffer[LEFTEYE_PAN] + 
      (pan_radians_per_pixel * delta_pan_pixels); 
    float new_eye_pos_tilt;
    new_eye_pos_tilt = posbuffer[EYE_TILT] +
      (tilt_radians_per_pixel * delta_tilt_pixels);
    eye_com.Set(LEFTEYE_PAN, new_eye_pos_pan);
    eye_com.Set(RIGHTEYE_PAN, -new_eye_pos_pan);
    eye_com.Set(EYE_TILT, new_eye_pos_tilt);
    eye_com.Execute();
    
    //get current time
    time(&curtime);   
 
    /*if total saliency (total pixels of all boxes) haven't changed much in 
    the last 20 iterations and total saliency jumps by more than 10000, 
    generate surprised expression */
    if (delta_iter > 20 && abs (last_total_sal - total_sal) > 10000){
      printf("--> Surprised\n");
      FaceExpression(SURPRISED_pos, posbuffer);
      modify_emotion = 1;
      current_emotion = SURPRISED;
      lastmove_expression = curtime;
      
      short axis[1];
      axis[0] = NECK_TILT;
      float newvelo[1];
      newvelo[0] = 0.16f;
      
      ChangeVelocity(1,axis,newvelo);
      
      head_tilt_com.ClearAll();
      head_tilt_com.Set(NECK_TILT, posbuffer[NECK_TILT] + 0.15f);
      
      head_tilt_com.Execute();
      
      
      newvelo[0]= 0.08f;
      ChangeVelocity(1,axis,newvelo);

    }
    else {
      //if total saliency > threshold and current emotion is not FEAR, generate
      //fear expression
      if (total_sal > 550000 && current_emotion != FEAR){
	printf("--> Fear\n");
	FaceExpression(FEAR_pos, posbuffer);

	modify_emotion = 1;
	current_emotion = FEAR;
	lastmove_expression = curtime;

	
	short axis[1];
	axis[0] = NECK_TILT;
	float newvelo[1];
	newvelo[0] = 0.25f;
	
	ChangeVelocity(1,axis,newvelo);
	
	head_tilt_com.ClearAll();
	head_tilt_com.Set(NECK_TILT, posbuffer[NECK_TILT] + 0.3f);
	
	head_tilt_com.Execute();
	
	newvelo[0]= 0.08f;
	ChangeVelocity(1,axis,newvelo);
      }
      
      
    }
    
    //if change in total saliency is less than threshold, increment delta_iter
    if (abs(total_sal - last_total_sal) < 5000){
      delta_iter++;
    }
    //otherwise, reset to 0
    else
      delta_iter = 0;
    

    //if the last expression change occurs more than 3 secs ago
    if (curtime - lastmove_expression > 3){
      //if current emotion is surprised, become happy
      if (current_emotion == SURPRISED){
	printf("--> Happy\n");
	FaceExpression(HAPPY_pos, posbuffer);
	modify_emotion = 1;
	current_emotion = HAPPY;
	
	//tilt neck forward
	head_tilt_com.ClearAll();
	head_tilt_com.Set(NECK_TILT, posbuffer[NECK_TILT] - 0.08f);	
	head_tilt_com.Execute();
      }
      //if current emotion is fear, become sad
      else if (current_emotion == FEAR){ 
	printf("--> Sad\n");
	FaceExpression(SAD_pos,posbuffer);
	modify_emotion = 1;
	current_emotion = SAD;
	
	//tilt neck forward
	head_tilt_com.ClearAll();
	head_tilt_com.Set(NECK_TILT, posbuffer[NECK_TILT] - 0.08f);
	head_tilt_com.Execute();
	
      }	
      else
	{
	//if total saliency is low and not currently sad, about 50% chance of
	  //becoming sad
	if (total_sal < 25000 && current_emotion != SAD){
	  if (rand() > 16000){
	    printf("--> Sad\n");
	    FaceExpression(SAD_pos,posbuffer);
	    modify_emotion = 1;
	    current_emotion = SAD;
	    
	    head_tilt_com.ClearAll();
	    head_tilt_com.Set(NECK_TILT, posbuffer[NECK_TILT] - 0.08f);
	    
	    head_tilt_com.Execute();
	    
	  }
	  // else do nothing
	}
	
	//if total saliency is high, either become calm or happy
	if (total_sal > 60000){
	  if (rand() > 16000){
	    printf("--> Calm");
	    FaceExpression(CALM_pos,posbuffer);
	    modify_emotion = 1;
	    current_emotion = CALM;
	    
	  }
	  else {
	    FaceExpression(HAPPY_pos,posbuffer);
	    modify_emotion = 1; current_emotion = HAPPY;
	    
	    head_tilt_com.ClearAll();
	    head_tilt_com.Set(NECK_TILT, posbuffer[NECK_TILT] + 0.08f);
	    
	    head_tilt_com.Execute();
	  }
	}
      }
      lastmove_expression = curtime;

    }
    
    last_total_sal = total_sal;
    

    //Upper eyelids are adjusted during face expression, so if there's no
    //change in face expression, we need to adjust eyelids here.
    if (modify_emotion == 0){
      //move eyelids
      float uleftlid_diff, urightlid_diff;
      
      if (current_emotion == HAPPY){
	uleftlid_diff = posbuffer[EYE_TILT]/0.28 * 400 + HAPPY_pos[LEFT_UPPER_LID - 8];
	urightlid_diff = - (posbuffer[EYE_TILT]/0.28 * 400) + HAPPY_pos[RIGHT_UPPER_LID - 8];
      }
      if (current_emotion == SAD){
	uleftlid_diff = posbuffer[EYE_TILT]/0.28 * 400 + SAD_pos[LEFT_UPPER_LID - 8];
	urightlid_diff = - (posbuffer[EYE_TILT]/0.28 * 400) + SAD_pos[RIGHT_UPPER_LID - 8];
      }
	if (current_emotion == SURPRISED){
	  uleftlid_diff = posbuffer[EYE_TILT]/0.28 * 400 + SURPRISED_pos[LEFT_UPPER_LID - 8];
	  urightlid_diff = - (posbuffer[EYE_TILT]/0.28 * 400) + SURPRISED_pos[RIGHT_UPPER_LID - 8];
	}
      if (current_emotion == FEAR){
	uleftlid_diff = posbuffer[EYE_TILT]/0.28 * 400 + FEAR_pos[LEFT_UPPER_LID - 8];
	urightlid_diff = - (posbuffer[EYE_TILT]/0.28 * 400) + FEAR_pos[RIGHT_UPPER_LID - 8];
      }
      if (current_emotion == CALM){
	uleftlid_diff = posbuffer[EYE_TILT]/0.28 * 400 + CALM_pos[LEFT_UPPER_LID - 8];
	urightlid_diff = - (posbuffer[EYE_TILT]/0.28 * 400) + CALM_pos[RIGHT_UPPER_LID - 8];
      }
      
      MoveOneAxis(LEFT_UPPER_LID, -uleftlid_diff);
        MoveOneAxis(RIGHT_UPPER_LID, -urightlid_diff);
      
    }
     
     //if left eye pan is beyond threshold & the last neckpan move is more than
     //1 sec ago, move neckpan accordingly
     float neck_eye_pan_diff;
     neck_eye_pan_diff = posbuffer[LEFTEYE_PAN];
     if (curtime - lastmove_neckpan > 1 && (neck_eye_pan_diff > 0.16f || neck_eye_pan_diff < -0.16f))
      {
	neck_pan_com.ClearAll();
	neck_pan_com.Set(NECK_PAN, 
			 posbuffer[NECK_PAN] + (-1.25f *neck_eye_pan_diff));
	neck_pan_com.Execute();
	lastmove_neckpan = curtime;
    
      }
     
  
     //if neck pan is beyond threshold & the last base pan move is more than 
     //2 secs ago, move base_pan accordingly
     float neck_base_pan_diff = posbuffer[NECK_PAN];
     if (curtime - lastmove_basepan > 2 && (neck_base_pan_diff > 0.6f || neck_base_pan_diff < -0.6f)){ 
       //printf("neck_base_pan_diff = %g,  ",neck_base_pan_diff);
       base_pan_com.ClearAll();
       base_pan_com.Set(BASE_PAN, posbuffer[BASE_PAN] + (-0.4f * neck_base_pan_diff));
       //printf("currently at %g , send base pan to %g \n",posbuffer[BASE_PAN], posbuffer[BASE_PAN] + (-1.0f * neck_base_pan_diff));
       base_pan_com.Execute();
       lastmove_basepan = curtime;
  }

     
     //if eye tilt is beyond threshold and the last head tilt movement happens //in more than 2 sec ago, move head and neck tilt accordingly
     float head_eye_tilt_diff, head_roll_tilt_diff;
     head_eye_tilt_diff = posbuffer[EYE_TILT];
     if (curtime - lastmove_headtilt > 1 && (head_eye_tilt_diff > 0.16f || head_eye_tilt_diff < -0.16f))
    {

      head_tilt_com.ClearAll();
      head_tilt_com.Set(HEAD_TILT,
			posbuffer[HEAD_TILT] +
			 head_eye_tilt_diff);
      
      //we know posbuffer[HEAD_TILT] is always >= 0
      if (head_eye_tilt_diff < 0 && posbuffer[HEAD_TILT] < -head_eye_tilt_diff){
	head_roll_tilt_diff = -posbuffer[HEAD_TILT];
      }
      else
	head_roll_tilt_diff = head_eye_tilt_diff;
      
      
      head_tilt_com.Set(HEAD_ROLL, posbuffer[HEAD_ROLL] + (-0.7f * head_roll_tilt_diff));

      head_tilt_com.Set(NECK_TILT, posbuffer[NECK_TILT] + (0.4f * head_eye_tilt_diff));      
      
      head_tilt_com.Execute();

      lastmove_headtilt = curtime;

    }

  //if head tilt is beyond threshold and the last neck crane move is more than
     //2 secs ago, move neck crane accordingly.     
  float neck_tilt_crane_diff = posbuffer[NECK_TILT];
  float head_tilt_crane_diff = posbuffer[HEAD_TILT];
  if (curtime - lastmove_neckcrane > 2 && (head_tilt_crane_diff > 0.2f || head_tilt_crane_diff < 0.05f))//neck_tilt_crane_diff < -0.08f))
    {
      neck_crane_com.ClearAll();
      if (head_tilt_crane_diff > 0.2f)
	neck_crane_com.Set(NECK_CRANE, posbuffer[NECK_CRANE] + (-2.0f * head_tilt_crane_diff));
      if (neck_tilt_crane_diff < -0.08f)
	neck_crane_com.Set(NECK_CRANE, posbuffer[NECK_CRANE] + (-3.0f * neck_tilt_crane_diff));
      neck_crane_com.Execute();
      lastmove_neckcrane = curtime;
      
    }
 
  //randomly switch drive
  if ( lastmove_drive - curtime > 10 && rand() > 5000){
    if (current_drive == SOCIAL){
      current_drive = PLAY;
      printf("current drive is PLAY\n");
    }
    else{
      current_drive = SOCIAL;
      printf("current drive is SOCIAL\n");
    }
    lastmove_drive = curtime;
  }
     
     
}   
   
}