Beispiel #1
0
int main(int argc, const char* argv[])
{
  std::string opt_ip = "198.18.0.1";;
  std::string opt_data_folder = std::string(ROMEOTK_DATA_FOLDER);
  bool opt_Reye = false;
  bool opt_calib = true;

  // Learning folder in /tmp/$USERNAME
  std::string username;
  // Get the user login name
  vpIoTools::getUserName(username);

  // Create a log filename to save new files...
  std::string learning_folder;
#if defined(_WIN32)
  learning_folder ="C:/temp/" + username;
#else
  learning_folder ="/tmp/" + username;
#endif
  // Test if the output path exist. If no try to create it
  if (vpIoTools::checkDirectory(learning_folder) == false) {
    try {
      // Create the dirname
      vpIoTools::makeDirectory(learning_folder);
    }
    catch (vpException &e) {
      std::cout << "Cannot create " << learning_folder << std::endl;
      std::cout << "Error: " << e.getMessage() <<std::endl;
      return 0;
    }
  }



  for (unsigned int i=0; i<argc; i++) {
    if (std::string(argv[i]) == "--ip")
      opt_ip = argv[i+1];
    else if ( std::string(argv[i]) == "--reye")
      opt_Reye = true;
    else if ( std::string(argv[i]) == "--calib")
      opt_calib = true;
    else if (std::string(argv[i]) == "--help") {
      std::cout << "Usage: " << argv[0] << "[--ip <robot address>]" << std::endl;
      return 0;
    }
  }

  std::vector<std::string> chain_name(2); // LArm or RArm
  chain_name[0] = "LArm";
  chain_name[1] = "RArm";


  /************************************************************************************************/
  /** Open the grabber for the acquisition of the images from the robot*/
  vpNaoqiGrabber g;
  g.setFramerate(15);

  // Initialize constant transformations
  vpHomogeneousMatrix eMc;

  if (opt_Reye)
  {
    std::cout << "Using camera Eye Right" << std::endl;
    g.setCamera(3); // CameraRightEye
    eMc = g.get_eMc(vpCameraParameters::perspectiveProjWithDistortion,"CameraRightEye");
  }
  else
  {
    std::cout << "Using camera Eye Right" << std::endl;
    g.setCamera(2); // CameraLeftEye
    eMc = g.get_eMc(vpCameraParameters::perspectiveProjWithDistortion,"CameraLeftEye");
  }
  std::cout << "eMc: " << eMc << std::endl;
  if (! opt_ip.empty())
    g.setRobotIp(opt_ip);
  g.open();
  std::string camera_name = g.getCameraName();
  std::cout << "Camera name: " << camera_name << std::endl;

  vpCameraParameters cam = g.getCameraParameters(vpCameraParameters::perspectiveProjWithDistortion);
  std::cout << "Camera parameters: " << cam << std::endl;


  /** Initialization Visp Image, display and camera paramenters*/
  vpImage<unsigned char> I(g.getHeight(), g.getWidth());
  vpDisplayX d(I);
  vpDisplay::setTitle(I, "Camera view");

  //Initialize opencv color image
  cv::Mat cvI = cv::Mat(cv::Size(g.getWidth(), g.getHeight()), CV_8UC3);



  /************************************************************************************************/


  /** Initialization target pen*/

  std::string opt_name_file_color_pen_target = opt_data_folder + "/" +"target/box_blob/color.txt";


  vpBlobsTargetTracker pen_tracker;
  bool status_pen_tracker;
  vpHomogeneousMatrix cMpen;

  const double L = 0.025/2;
  std::vector <vpPoint> points(4);
  points[2].setWorldCoordinates(-L,-L, 0) ;
  points[1].setWorldCoordinates(-L,L, 0) ;
  points[0].setWorldCoordinates(L,L, 0) ;
  points[3].setWorldCoordinates(L,-L,0) ;


  const double L1 = 0.021/2;
  std::vector <vpPoint> points_pen(4);
  points_pen[2].setWorldCoordinates(-L1,-L1, 0) ;
  points_pen[1].setWorldCoordinates(-L1,L1, 0) ;
  points_pen[0].setWorldCoordinates(L1,L1, 0) ;
  points_pen[3].setWorldCoordinates(L1,-L1,0) ;


  pen_tracker.setName("pen");
  pen_tracker.setCameraParameters(cam);
  pen_tracker.setPoints(points_pen);
  //pen_tracker.setManualBlobInit(true);
  pen_tracker.setFullManual(true);

  pen_tracker.setLeftHandTarget(true);

  if(!pen_tracker.loadHSV(opt_name_file_color_pen_target))
  {
    std::cout << "Error opening the file "<< opt_name_file_color_pen_target << std::endl;
  }



  vpHomogeneousMatrix pen_M_point(0.0, 0.0, 0.163, 0.0, 0.0, 0.0);


  /************************************************************************************************/


  /** Initialize the qrcode tracker*/
  bool status_qrcode_tracker;
  vpHomogeneousMatrix cM_tab;
  vpQRCodeTracker qrcode_tracker;
  qrcode_tracker.setCameraParameters(cam);
  qrcode_tracker.setQRCodeSize(0.045);
  qrcode_tracker.setMessage("romeo_left_arm");



  /************************************************************************************************/
  /** Initialization target hands*/

  std::string opt_name_file_color_target_path = opt_data_folder + "/" +"target/";
  std::string opt_name_file_color_target_l = opt_name_file_color_target_path + chain_name[0] +"/color.txt";
  std::string opt_name_file_color_target_r = opt_name_file_color_target_path + chain_name[1] +"/color.txt";

  std::string opt_name_file_color_target1_l = opt_name_file_color_target_path + chain_name[0] +"/color1.txt";
  std::string opt_name_file_color_target1_r = opt_name_file_color_target_path + chain_name[1] +"/color1.txt";

  std::vector<vpBlobsTargetTracker*> hand_tracker;
  std::vector<bool>  status_hand_tracker(2);
  std::vector<vpHomogeneousMatrix>  cMo_hand(2);


  vpBlobsTargetTracker hand_tracker_l;
  hand_tracker_l.setName(chain_name[0]);
  hand_tracker_l.setCameraParameters(cam);
  hand_tracker_l.setPoints(points);
  hand_tracker_l.setLeftHandTarget(true);

  if(!hand_tracker_l.loadHSV(opt_name_file_color_target_l))
    std::cout << "Error opening the file "<< opt_name_file_color_target_l << std::endl;


  vpBlobsTargetTracker hand_tracker_r;
  hand_tracker_r.setName(chain_name[1]);
  hand_tracker_r.setCameraParameters(cam);
  hand_tracker_r.setPoints(points);
  hand_tracker_r.setLeftHandTarget(false);

  if(!hand_tracker_r.loadHSV(opt_name_file_color_target1_r))
    std::cout << "Error opening the file "<< opt_name_file_color_target_r << std::endl;

  hand_tracker.push_back(&hand_tracker_l);
  hand_tracker.push_back(&hand_tracker_r);


  /************************************************************************************************/

  // Constant transformation Target Frame to Arm end-effector (WristPitch)
  std::vector<vpHomogeneousMatrix> hand_Me_Arm(2);
  std::string filename_transform = std::string(ROMEOTK_DATA_FOLDER) + "/transformation.xml";
  // Create twist matrix from pen to Arm end-effector (WristPitch)
  std::vector <vpVelocityTwistMatrix> pen_Ve_Arm(2);
  // Create twist matrix from target Frame to Arm end-effector (WristPitch)
  std::vector <vpVelocityTwistMatrix> hand_Ve_Arm(2);

  // Constant transformation Target Frame to pen to target Hand
  std::vector<vpHomogeneousMatrix> pen_Mhand(2);


  for (unsigned int i = 0; i < 2; i++)
  {

    std::string name_transform = "qrcode_M_e_" + chain_name[i];
    vpXmlParserHomogeneousMatrix pm; // Create a XML parser

    if (pm.parse(hand_Me_Arm[i], filename_transform, name_transform) != vpXmlParserHomogeneousMatrix::SEQUENCE_OK) {
      std::cout << "Cannot found the homogeneous matrix named " << name_transform << "." << std::endl;
      return 0;
    }
    else
      std::cout << "Homogeneous matrix " << name_transform <<": " << std::endl << hand_Me_Arm[i] << std::endl;

    hand_Ve_Arm[i].buildFrom(hand_Me_Arm[i]);

  }

  /************************************************************************************************/

  /** Create a new istance NaoqiRobot*/
  vpNaoqiRobot robot;
  if (! opt_ip.empty())
    robot.setRobotIp(opt_ip);
  robot.open();

  std::vector<std::string> jointNames = robot.getBodyNames("Head");
  //jointNames.pop_back(); // We don't consider  the last joint of the head = HeadRoll
  std::vector<std::string> jointNamesLEye = robot.getBodyNames("LEye");
  std::vector<std::string> jointNamesREye = robot.getBodyNames("REye");

  jointNames.insert(jointNames.end(), jointNamesLEye.begin(), jointNamesLEye.end());
  std::vector<std::string> jointHeadNames_tot = jointNames;
  jointHeadNames_tot.push_back(jointNamesREye.at(0));
  jointHeadNames_tot.push_back(jointNamesREye.at(1));
  /************************************************************************************************/

  // Initialize state

  State_t state;
  state = CalibratePen;

  /************************************************************************************************/

  std::vector<bool> grasp_servo_converged(2);
  grasp_servo_converged[0]= false;
  grasp_servo_converged[1]= false;


  std::vector<vpMatrix> eJe(2);


  // Initialize arms servoing
  vpServoArm servo_larm_l;
  vpServoArm servo_larm_r;

  std::vector<vpServoArm*> servo_larm;
  servo_larm.push_back(&servo_larm_l);
  servo_larm.push_back(&servo_larm_r);

  std::vector < std::vector<std::string > > jointNames_arm(2);

  jointNames_arm[0] = robot.getBodyNames(chain_name[0]);
  jointNames_arm[1] = robot.getBodyNames(chain_name[1]);
  // Delete last joint Hand, that we don't consider in the servo
  jointNames_arm[0].pop_back();
  jointNames_arm[1].pop_back();

  std::vector<std::string> jointArmsNames_tot = jointNames_arm[0];

  //jointArmsNames_tot.insert(jointArmsNames_tot.end(), jointNames_arm[1].begin(), jointNames_arm[1].end());


  const unsigned int numArmJoints =  jointNames_arm[0].size();
  std::vector<vpHomogeneousMatrix> pen_dMpen(2);

  // Vector containing the joint velocity vectors of the arms
  std::vector<vpColVector> q_dot_arm;
  // Vector containing the joint velocity vectors of the arms for the secondary task
  std::vector<vpColVector> q_dot_arm_sec;
  // Vector joint position of the arms
  std::vector<vpColVector> q;
  // Vector joint real velocities of the arms
  std::vector<vpColVector> q_dot_real;

  vpColVector   q_temp(numArmJoints);
  q_dot_arm.push_back(q_temp);
  q_dot_arm.push_back(q_temp);

  q_dot_arm_sec.push_back(q_temp);
  q_dot_arm_sec.push_back(q_temp);

  q.push_back(q_temp);
  q.push_back(q_temp);

  q_dot_real.push_back(q_temp);
  q_dot_real.push_back(q_temp);



  // Initialize the joint avoidance scheme from the joint limits
  std::vector<vpColVector> jointMin;
  std::vector<vpColVector> jointMax;

  jointMin.push_back(q_temp);
  jointMin.push_back(q_temp);

  jointMax.push_back(q_temp);
  jointMax.push_back(q_temp);

  for (unsigned int i = 0; i< 2; i++)
  {
    jointMin[i] = robot.getJointMin(chain_name[i]);
    jointMax[i] = robot.getJointMax(chain_name[i]);

    jointMin[i].resize(numArmJoints,false);
    jointMax[i].resize(numArmJoints,false);

    //        std::cout <<  jointMin[i].size() << std::endl;
    //        std::cout <<  jointMax[i].size() << std::endl;
    //         std::cout << "max " <<  jointMax[i] << std::endl;
  }



  std::vector<bool> first_time_arm_servo(2);
  first_time_arm_servo[0] = true;
  first_time_arm_servo[1] = true;

  std::vector< double> servo_arm_time_init(2);
  servo_arm_time_init[0] = 0.0;
  servo_arm_time_init[1] = 0.0;

  std::vector<int> cpt_iter_servo_grasp(2);
  cpt_iter_servo_grasp[0] = 0;
  cpt_iter_servo_grasp[1] = 0;

  unsigned int index_hand = 0; //Left Arm

  //  std::string handMpen_name = "handMpen_" + chain_name[index_hand];
  //  std::string filename_transform_pen = std::string(ROMEOTK_DATA_FOLDER) +"/pen/transformation_pen.xml";


  //  vpHomogeneousMatrix hand_M_pen ;
  //  if (state != CalibratePen)
  //  {

  //    vpXmlParserHomogeneousMatrix pm; // Create a XML parser

  //    if (pm.parse(hand_M_pen, filename_transform_pen, handMpen_name) != vpXmlParserHomogeneousMatrix::SEQUENCE_OK) {
  //      std::cout << "Cannot found the homogeneous matrix named " << handMpen_name << "." << std::endl;
  //      return 0;
  //    }
  //    else
  //      std::cout << "Homogeneous matrix " << handMpen_name <<": " << std::endl << hand_M_pen << std::endl;

  //  }

  //    vpHomogeneousMatrix M_offset;
  //    M_offset[1][3] = 0.00;
  //    M_offset[0][3] = 0.00;
  //    M_offset[2][3] = 0.00;

  vpHomogeneousMatrix M_offset;
  M_offset.buildFrom(0.0, 0.0, 0.0, 0.0, 0.0, 0.0) ;

  double d_t = 0.01;
  double d_r = 0.0;

  bool first_time_pen_pose = true;

  /************************************************************************************************/
  /** Initialization drawing*/


  //  vpHomogeneousMatrix p1(0.03, 0.0, 0.0, 0.0, 0.0, 0.0);
  //  vpHomogeneousMatrix p2(0.03, 0.03, 0.0, 0.0, 0.0, 0.0);
  //  vpHomogeneousMatrix p3(0.0, 0.03, 0.0, 0.0, 0.0, 0.0);
  //  vpHomogeneousMatrix p4(0.0, 0.0, 0.0, 0.0, 0.0, 0.0);

  std::vector<vpHomogeneousMatrix> P;

  //  P.push_back(p1);
  //  P.push_back(p2);
  //  P.push_back(p3);
  //  P.push_back(p4);


  for (unsigned int i = 0;i<50;i++ )
  {
    vpHomogeneousMatrix p1(0.002*i, 0.0, 0.0, 0.0, 0.0, 0.0);
    P.push_back(p1);
  }

  /************************************************************************************************/

  vpMouseButton::vpMouseButtonType button;

  std::vector<vpHomogeneousMatrix> cMpen_d;
  unsigned int index_p = 0;


  //AL::ALValue names_head     = AL::ALValue::array("NeckYaw","NeckPitch","HeadPitch","HeadRoll","LEyeYaw", "LEyePitch","LEyeYaw", "LEyePitch" );

  //  AL::ALValue angles_head      = AL::ALValue::array(vpMath::rad(1.6), vpMath::rad(28.6), vpMath::rad(6.6), vpMath::rad(4.8), vpMath::rad(0.6) , vpMath::rad(13.6), 0.0, 0.0  );
  //  float fractionMaxSpeed  = 0.1f;
  //  robot.getProxy()->setAngles(jointHeadNames_tot, angles_head, fractionMaxSpeed);



  while(1) {
    double loop_time_start = vpTime::measureTimeMs();


    g.acquire(cvI);
    vpImageConvert::convert(cvI, I);
    vpDisplay::display(I);
    bool click_done = vpDisplay::getClick(I, button, false);

    //        if (state < VSpen)
    //        {

    char key[10];
    bool ret = vpDisplay::getKeyboardEvent(I, key, false);
    std::string s = key;

    if (ret)
    {
      if (s == "r")
      {
        M_offset.buildFrom(0.0, 0.0, 0.0, 0.0, 0.0, 0.0) ;

        d_t = 0.0;
        d_r = 0.2;
        std::cout << "Rotation mode. " << std::endl;
      }

      if (s == "t")
      {
        M_offset.buildFrom(0.0, 0.0, 0.0, 0.0, 0.0, 0.0) ;

        d_t = 0.01;
        d_r = 0.0;
        std::cout << "Translation mode. " << std::endl;
      }

      if (s == "h")
      {
        hand_tracker[index_hand]->setManualBlobInit(true);
        hand_tracker[index_hand]->setForceDetection(true);
      }
      if (s == "q")
      {
        pen_tracker.setFullManual(true);
        pen_tracker.setForceDetection(true);
      }

      else if ( s == "+")
      {
        unsigned int value = hand_tracker[index_hand]->getGrayLevelMaxBlob() +10;
        hand_tracker[index_hand]->setGrayLevelMaxBlob(value);
        std::cout << "Set to "<< value << "the value of  " << std::endl;

      }
      else if (s == "-")
      {
        unsigned int value = hand_tracker[1]->getGrayLevelMaxBlob()-10;
        hand_tracker[index_hand]->setGrayLevelMaxBlob(value-10);
        std::cout << "Set to "<< value << " GrayLevelMaxBlob. " << std::endl;
      }

      //          |x
      //      z\  |
      //        \ |
      //         \|_____ y
      //

      //      else if (s == "4") //-y
      //      {
      //        M_offset.buildFrom(0.0, -d_t, 0.0, 0.0, -d_r, 0.0) ;
      //      }

      //      else if (s == "6")  //+y
      //      {
      //        M_offset.buildFrom(0.0, d_t, 0.0, 0.0, d_r, 0.0) ;
      //      }

      //      else if (s == "8")  //+x
      //      {
      //        M_offset.buildFrom(d_t, 0.0, 0.0, d_r, 0.0, 0.0) ;
      //      }

      //      else if (s == "2") //-x
      //      {
      //        M_offset.buildFrom(-d_t, 0.0, 0.0, -d_r, 0.0, 0.0) ;
      //      }

      //      //            else if (s == "7")//-z
      //      //            {
      //      //                M_offset.buildFrom(0.0, 0.0, -d_t, 0.0, 0.0, -d_r) ;
      //      //            }
      //      //            else if (s == "9") //+z
      //      //            {
      //      //                M_offset.buildFrom(0.0, 0.0, d_t, 0.0, 0.0, d_r) ;
      //      //            }

      //      cMpen_d = cMpen_d[index_p] * M_offset;


    }

    if (state < WaitPreDraw)
    {
      status_hand_tracker[index_hand] = hand_tracker[index_hand]->track(cvI,I);

      if (status_hand_tracker[index_hand] ) { // display the tracking results
        cMo_hand[index_hand] = hand_tracker[index_hand]->get_cMo();
        //printPose("cMo qrcode: ", cMo_hand);
        // The qrcode frame is only displayed when PBVS is active or learning

        vpDisplay::displayFrame(I, cMo_hand[index_hand], cam, 0.04, vpColor::none, 3);
      }

    }

    status_pen_tracker = pen_tracker.track(cvI,I);

    if (status_pen_tracker ) { // display the tracking results
      cMpen = pen_tracker.get_cMo();
      //printPose("cMo qrcode: ", cMo_hand);
      // The qrcode frame is only displayed when PBVS is active or learning

      vpDisplay::displayFrame(I, cMpen, cam, 0.04, vpColor::none, 3);
    }


    if (state == CalibratePen &&  status_hand_tracker[index_hand] )
    {

      if (status_pen_tracker)
      {
        vpDisplay::displayText(I, vpImagePoint(I.getHeight() - 10, 10), "Left click to calibrate pen", vpColor::red);

        cMpen = pen_tracker.get_cMo();
        //printPose("cMo qrcode: ", cMo_hand);
        vpDisplay::displayFrame(I, cMpen, cam, 0.04, vpColor::none, 1);

        vpHomogeneousMatrix pen_Me_Arm; // Homogeneous matrix from the pen to the left wrist
        pen_Mhand[index_hand] = cMpen.inverse() *  cMo_hand[index_hand];
        pen_Me_Arm = pen_Mhand[index_hand] * hand_Me_Arm[index_hand] ; // from pen to WristPitch


        if (click_done && button == vpMouseButton::button1) {


          pen_Ve_Arm[index_hand].buildFrom(pen_Me_Arm);
          //          vpXmlParserHomogeneousMatrix p_; // Create a XML parser

          //          if (p_.save(hand_M_pen, "transformation_pen.xml", handMpen_name) != vpXmlParserHomogeneousMatrix::SEQUENCE_OK)
          //          {
          //            std::cout << "Cannot save the Homogeneous matrix" << std::endl;
          //            return 0;
          //          }
          //          std::cout << "Homogeneous matrix from the hand to the pen saved " << std::endl;
          //          hand_M_pen.print();

          state = WaitPreDraw;
          click_done = false;

        }

      }

    }



    if (state == WaitPreDraw  )
    {
      vpDisplay::displayText(I, vpImagePoint(I.getHeight() - 10, 10), "Left click to continue", vpColor::red);

      status_qrcode_tracker = qrcode_tracker.track(I);

      if (status_qrcode_tracker) { // display the tracking results
        cM_tab = qrcode_tracker.get_cMo();
        //printPose("cMo qrcode: ", cMo_qrcode);
        // The qrcode frame is only displayed when PBVS is active or learning

        vpDisplay::displayFrame(I, cM_tab, cam, 0.07, vpColor::none, 2);
      }

      // Point of the pen wrt camera with same orientation of pen target
      vpHomogeneousMatrix cM_point = cMpen * pen_M_point;


      // Point of the pen wrt camera with same orientation of the table
      vpHomogeneousMatrix cMpen_t = cM_tab;
      cMpen_t[0][3] = cM_point[0][3];
      cMpen_t[1][3] = cM_point[1][3];
      cMpen_t[2][3] = cM_point[2][3];


      for (unsigned int i = 0; i < P.size(); i++)
      {
        // Point of the pen wrt camera with table R at P[i]
        vpHomogeneousMatrix cMpen_p1 = cMpen_t * P[i];

        vpDisplay::displayFrame(I, cMpen_t, cam, 0.03, vpColor::none, 2);

        //vpDisplay::displayFrame(I, cMpen_p1, cam, 0.03, vpColor::none, 2);

        // Point of the pen wrt camera with pen target R at P[i]
        vpHomogeneousMatrix cMpen_p1_rot_hand = cMpen;
        cMpen_p1_rot_hand[0][3] = cMpen_p1[0][3];
        cMpen_p1_rot_hand[1][3] = cMpen_p1[1][3];
        cMpen_p1_rot_hand[2][3] = cMpen_p1[2][3];
        //vpDisplay::displayFrame(I, cMpen_p1_rot_hand, cam, 0.03, vpColor::none, 2);


        // Desired pose Pen target at P[i]
        cMpen_d.push_back( cMpen_p1_rot_hand * pen_M_point.inverse());
        vpDisplay::displayFrame(I, cMpen_d[i], cam, 0.01, vpColor::none, 1);


      }



      if (click_done && button == vpMouseButton::button1 ) {

        state = PreDraw;
        click_done = false;
      }

    }

    if (state == PreDraw && status_pen_tracker )
    {

      vpDisplay::displayText(I, vpImagePoint(I.getHeight() - 10, 10), "Left click to start the servo", vpColor::red);

      //      if (first_time_pen_pose)
      //      {
      //        // Compute desired pen position cMpen_d
      //        cMpen_d = cMpen * M_offset;
      //        first_time_pen_pose = false;
      //      }

      for (unsigned int i = 0; i < P.size(); i++)
        vpDisplay::displayFrame(I, cMpen_d[i], cam, 0.01, vpColor::none, 1);

      //      vpDisplay::displayFrame(I, cMpen *pen_Mhand[1] , cam, 0.05, vpColor::none, 3);
      //      vpDisplay::displayFrame(I, cMpen *pen_Mhand[0] , cam, 0.05, vpColor::none, 3);

      if (click_done && button == vpMouseButton::button1 ) {

        state = VSpen;
        click_done = false;
      }
    }


    if (state == VSpen)
    {
      //Get Actual position of the arm joints
      q[0] = robot.getPosition(jointNames_arm[0]);
      q[1] = robot.getPosition(jointNames_arm[1]);

      //  if(status_pen_tracker && status_hand_tracker[index_hand] )
      if(status_pen_tracker )
      {

        if (! grasp_servo_converged[0]) {
          //                    for (unsigned int i = 0; i<2; i++)
          //                    {

          unsigned int i = 0;
          //vpAdaptiveGain lambda(0.8, 0.05, 8);
          // vpAdaptiveGain lambda(0.2, 0.01, 2);

          // servo_larm[i]->setLambda(lambda);
          servo_larm[i]->setLambda(0.2);


          eJe[i] = robot.get_eJe(chain_name[i]);

          servo_larm[i]->set_eJe(eJe[i]);
          servo_larm[i]->m_task.set_cVe(pen_Ve_Arm[i]);

          pen_dMpen[i] = cMpen_d[index_p].inverse() * cMpen;
          //          printPose("pen_dMpen: ", pen_dMpen[i]);


          servo_larm[i]->setCurrentFeature(pen_dMpen[i]) ;

          vpDisplay::displayFrame(I, cMpen_d[index_p] , cam, 0.05, vpColor::none, 3);


          //                    vpDisplay::displayFrame(I, cMpen *pen_Mhand[1] , cam, 0.05, vpColor::none, 3);
          //                    vpDisplay::displayFrame(I, cMpen *pen_Mhand[0] , cam, 0.05, vpColor::none, 3);

          if (first_time_arm_servo[i]) {
            std::cout << "-- Start visual servoing of the arm" << chain_name[i] << "." << std::endl;
            servo_arm_time_init[i] = vpTime::measureTimeSecond();
            first_time_arm_servo[i] = false;
          }

          q_dot_arm[i] =  - servo_larm[i]->computeControlLaw(servo_arm_time_init[i]);

          // Compute joint limit avoidance
          q_dot_arm_sec[0]  = servo_larm[0]->m_task.secondaryTaskJointLimitAvoidance(q[0], q_dot_real[0], jointMin[0], jointMax[0]);

          cpt_iter_servo_grasp[i] ++;


          vpColVector q_dot_tot = q_dot_arm[0] + q_dot_arm_sec[0];

          robot.setVelocity(jointArmsNames_tot, q_dot_tot);

          vpTranslationVector t_error_grasp = pen_dMpen[0].getTranslationVector();
          vpRotationMatrix R_error_grasp;
          pen_dMpen[0].extract(R_error_grasp);
          vpThetaUVector tu_error_grasp;
          tu_error_grasp.buildFrom(R_error_grasp);
          double theta_error_grasp;
          vpColVector u_error_grasp;
          tu_error_grasp.extract(theta_error_grasp, u_error_grasp);
          std::cout << "error: " << sqrt(t_error_grasp.sumSquare()) << " " << vpMath::deg(theta_error_grasp) << std::endl;


          //                    if (cpt_iter_servo_grasp[0] > 100) {

          //                        vpDisplay::displayText(I, vpImagePoint(10,10), "Cannot converge. Click to continue", vpColor::red);
          //                    }
          //                    double error_t_treshold = 0.007;

          //                    if ( (sqrt(t_error_grasp.sumSquare()) < error_t_treshold) && (theta_error_grasp < vpMath::rad(3)) || (click_done && button == vpMouseButton::button1 /*&& cpt_iter_servo_grasp > 150*/) )
          //                    {
          //                        robot.stop(jointArmsNames_tot);

          //                        state = End;
          //                        grasp_servo_converged[0] = true;

          //                        if (click_done && button == vpMouseButton::button1)
          //                            click_done = false;
          //                    }


        }

      }
      else {
        // state grasping but one of the tracker fails
        robot.stop(jointArmsNames_tot);
      }

    }

    if (state == End)
    {
      std::cout<< "End" << std::endl;
    }

    vpDisplay::flush(I) ;

    if (click_done && button == vpMouseButton::button3) { // Quit the loop
      break;
    }

    else     if (click_done && button == vpMouseButton::button1) { // Quit the loop

      if (index_p < P.size())
        index_p ++;
    }
    //std::cout << "Loop time: " << vpTime::measureTimeMs() - loop_time_start << std::endl;

  }

  robot.stop(jointArmsNames_tot);


}
int main(int argc, const char* argv[])
{
  std::string opt_ip = "131.254.10.126";
  bool opt_language_english = true;
  bool opt_debug = false;

  for (unsigned int i=0; i<argc; i++) {
    if (std::string(argv[i]) == "--ip")
      opt_ip = argv[i+1];
    else if (std::string(argv[i]) == "--fr")
      opt_language_english = false;
    else if (std::string(argv[i]) == "--debug")
      opt_debug = true;
    else if (std::string(argv[i]) == "--help") {
      std::cout << "Usage: " << argv[0] << "[--ip <robot address>] [--fr]" << std::endl;
      return 0;
    }
  }

  std::string camera_name = "CameraTopPepper";

  // Open the grabber for the acquisition of the images from the robot
  vpNaoqiGrabber g;
  if (! opt_ip.empty())
    g.setRobotIp(opt_ip);
  g.setFramerate(30);
  g.setCamera(0);
  g.open();


  vpCameraParameters cam = vpNaoqiGrabber::getIntrinsicCameraParameters(AL::kQVGA,camera_name, vpCameraParameters::perspectiveProjWithDistortion);
  vpHomogeneousMatrix eMc = vpNaoqiGrabber::getExtrinsicCameraParameters(camera_name,vpCameraParameters::perspectiveProjWithDistortion);

  std::cout << "eMc:" << std::endl << eMc << std::endl;
  std::cout << "cam:" << std::endl << cam << std::endl;
  vpNaoqiRobot robot;

  // Connect to the robot
  if (! opt_ip.empty())
    robot.setRobotIp(opt_ip);
  robot.open();



  if (robot.getRobotType() != vpNaoqiRobot::Pepper)
  {
    std::cout << "ERROR: You are not connected to Pepper, but to a different Robot. Check the IP. " << std::endl;
    return 0;
  }

  std::vector<std::string> jointNames_head = robot.getBodyNames("Head");

  // Open Proxy for the speech
  AL::ALTextToSpeechProxy tts(opt_ip, 9559);
  std::string phraseToSay;
  if (opt_language_english)
  {
    tts.setLanguage("English");
    phraseToSay = " \\emph=2\\ Hi,\\pau=200\\ How are you ?";
  }
  else
  {
    tts.setLanguage("French");
    phraseToSay = " \\emph=2\\ Bonjour,\\pau=200\\ comment vas  tu ?";
  }

  // Inizialize PeoplePerception
  AL::ALPeoplePerceptionProxy people_proxy(opt_ip, 9559);
  AL::ALMemoryProxy m_memProxy(opt_ip, 9559);
  people_proxy.subscribe("People", 30, 0.0);
  std::cout << "period: " << people_proxy.getCurrentPeriod() << std::endl;

  // Open Proxy for the recognition speech
  AL::ALSpeechRecognitionProxy asr(opt_ip, 9559);
  //  asr.unsubscribe("Test_ASR");
  //   return 0 ;

  asr.setVisualExpression(false);
  asr.setLanguage("English");
  std::vector<std::string> vocabulary;
  vocabulary.push_back("follow me");
  vocabulary.push_back("stop");

  // Set the vocabulary
  asr.setVocabulary(vocabulary,false);

  // Start the speech recognition engine with user Test_ASR
  asr.subscribe("Test_ASR");
  std::cout << "Speech recognition engine started" << std::endl;

  // Proxy to control the leds
  AL::ALLedsProxy led_proxy(opt_ip, 9559);

  //Declare plots
  vpPlot * plotter_diff_vel; vpPlot * plotter_vel;
  vpPlot * plotter_error;  vpPlot * plotter_distance;

  if (opt_debug)
  {
    // Plotting
    plotter_diff_vel = new vpPlot (2);
    plotter_diff_vel->initGraph(0, 2);
    plotter_diff_vel->initGraph(1, 2);
    plotter_diff_vel->setTitle(0,  "HeadYaw");
    plotter_diff_vel->setTitle(1,  "HeadPitch");

    plotter_vel= new vpPlot (1);
    plotter_vel->initGraph(0, 5);
    plotter_vel->setLegend(0, 0, "vx");
    plotter_vel->setLegend(0, 1, "vy");
    plotter_vel->setLegend(0, 2, "wz");
    plotter_vel->setLegend(0, 3, "q_yaw");
    plotter_vel->setLegend(0, 4, "q_pitch");

    plotter_error = new vpPlot(1);
    plotter_error->initGraph(0, 3);
    plotter_error->setLegend(0, 0, "x");
    plotter_error->setLegend(0, 1, "y");
    plotter_error->setLegend(0, 2, "Z");

    plotter_distance = new vpPlot (1);
    plotter_distance->initGraph(0, 1);
    plotter_distance->setLegend(0, 0, "dist");
  }

  try {
    vpImage<unsigned char> I(g.getHeight(), g.getWidth());
    vpDisplayX d(I);
    vpDisplay::setTitle(I, "ViSP viewer");

    vpFaceTrackerOkao face_tracker(opt_ip,9559);

    double dist = 0.0; // Distance between person detected from peoplePerception and faceDetection

    // Set Visual Servoing:
    vpServo task;
    task.setServo(vpServo::EYEINHAND_L_cVe_eJe) ;
    task.setInteractionMatrixType(vpServo::CURRENT, vpServo::PSEUDO_INVERSE);
    //    vpAdaptiveGain lambda_adapt;
    //    lambda_adapt.initStandard(1.6, 1.8, 15);
    vpAdaptiveGain lambda_base(1.2, 1.0, 10); // 2.3, 0.7, 15
    vpAdaptiveGain lambda_nobase(5, 2.9, 15); // 4, 0.5, 15
    task.setLambda(lambda_base) ;

    double Z = 0.9;
    double Zd = 0.9;
    bool stop_vxy = false;
    bool move_base = true;
    bool move_base_prev = true;

    // Create the desired  visual feature
    vpFeaturePoint s;
    vpFeaturePoint sd;
    vpImagePoint ip(I.getHeight()/2, I.getWidth()/2);
    // Create the current x visual feature
    vpFeatureBuilder::create(s, cam, ip);
    vpFeatureBuilder::create(sd, cam, ip);

    //   sd.buildFrom( I.getWidth()/2, I.getHeight()/2, Zd);

    AL::ALValue limit_yaw = robot.getProxy()->getLimits("HeadYaw");
    std::cout << limit_yaw[0][0] << " " << limit_yaw[0][1] << std::endl;

    // Add the feature
    task.addFeature(s, sd) ;

    vpFeatureDepth s_Z, s_Zd;
    s_Z.buildFrom(s.get_x(), s.get_y(), Z , 0); // log(Z/Z*) = 0 that's why the last parameter is 0
    s_Zd.buildFrom(sd.get_x(), sd.get_y(), Zd , 0); // log(Z/Z*) = 0 that's why the last parameter is 0

    // Add the feature
    task.addFeature(s_Z, s_Zd);

    // Jacobian 6x5 (vx,vy,wz,q_yaq,q_pitch)
    vpMatrix tJe(6,5);
    tJe[0][0]= 1;
    tJe[1][1]= 1;
    tJe[5][2]= 1;
    vpMatrix eJe(6,5);

    double servo_time_init = 0;

    vpImagePoint head_cog_cur;
    vpImagePoint head_cog_des(I.getHeight()/2, I.getWidth()/2);
    vpColVector q_dot;

    bool reinit_servo = true;
    unsigned long loop_iter = 0;

    std::vector<std::string> recognized_names;
    std::map<std::string,unsigned int> detected_face_map;
    bool detection_phase = true;
    unsigned int f_count = 0;
    //    AL::ALValue leg_names  = AL::ALValue::array("HipRoll","HipPitch", "KneePitch" );
    //    AL::ALValue values  = AL::ALValue::array(0.0, 0.0, 0.0 );

    // robot.getProxy()->setMoveArmsEnabled(false,false);
    std::vector<std::string> arm_names = robot.getBodyNames("LArm");
    std::vector<std::string> rarm_names = robot.getBodyNames("RArm");
    std::vector<std::string> leg_names(3);
    leg_names[0]= "HipRoll";
    leg_names[1]= "HipPitch";
    leg_names[2]= "KneePitch";

    arm_names.insert( arm_names.end(), rarm_names.begin(), rarm_names.end() );
    arm_names.insert( arm_names.end(), leg_names.begin(), leg_names.end() );

    std::vector<float> arm_values(arm_names.size(),0.0);

    for (unsigned int i = 0; i < arm_names.size(); i++ )
      std::cout << arm_names[i]<< std::endl;

    robot.getPosition(arm_names, arm_values,false);

    vpImagePoint best_cog_face_peoplep;
    bool person_found = false;

    double t_prev = vpTime::measureTimeSecond();

    while(1) {
      if (reinit_servo) {
        servo_time_init = vpTime::measureTimeSecond();
        t_prev = vpTime::measureTimeSecond();
        reinit_servo = false;
        led_proxy.fadeRGB("FaceLeds","white",0.1);
      }

      double t = vpTime::measureTimeMs();
      if (0) // (opt_debug)
      {
        g.acquire(I);
      }
      vpDisplay::display(I);
      // Detect face
      bool face_found = face_tracker.detect();
      stop_vxy = false;

      //std::cout << "Loop time face_tracker: " << vpTime::measureTimeMs() - t << " ms" << std::endl;

      // Check speech recognition result

      AL::ALValue result_speech = m_memProxy.getData("WordRecognized");

      if ( ((result_speech[0]) == vocabulary[0]) && (double (result_speech[1]) > 0.4 )) //move
      {
        //std::cout << "Recognized: " << result_speech[0] << "with confidence of " << result_speech[1] << std::endl;
        task.setLambda(lambda_base) ;

        move_base = true;
      }
      else if ( (result_speech[0] == vocabulary[1]) && (double(result_speech[1]) > 0.4 )) //stop
      {
        //std::cout << "Recognized: " << result_speech[0] << "with confidence of " << result_speech[1] << std::endl;
        task.setLambda(lambda_nobase) ;
        move_base = false;
      }

      if (move_base != move_base_prev)
      {
        if (move_base)
        {
          phraseToSay = "Ok, I will follow you.";
          tts.post.say(phraseToSay);
        }
        else
        {
          phraseToSay = "Ok, I will stop.";
          tts.post.say(phraseToSay);
        }

      }

      //robot.setStiffness(arm_names,0.0);
      //robot.getProxy()->setAngles(arm_names,arm_values,1.0);
      //robot.getProxy()->setAngles("HipRoll",0.0,1.0);

      //std::cout << "Loop time check_speech: " << vpTime::measureTimeMs() - t << " ms" << std::endl;

      move_base_prev = move_base;

      if (face_found) {
        std::ostringstream text;
        text << "Found " << face_tracker.getNbObjects() << " face(s)";
        vpDisplay::displayText(I, 10, 10, text.str(), vpColor::red);
        for(size_t i=0; i < face_tracker.getNbObjects(); i++) {
          vpRect bbox = face_tracker.getBBox(i);
          if (i == 0)
            vpDisplay::displayRectangle(I, bbox, vpColor::red, false, 2);
          else
            vpDisplay::displayRectangle(I, bbox, vpColor::green, false, 1);
          vpDisplay::displayText(I, (int)bbox.getTop()-10, (int)bbox.getLeft(), face_tracker.getMessage(i) , vpColor::red);
        }

        led_proxy.post.fadeRGB("FaceLeds","blue",0.1);

        double u = face_tracker.getCog(0).get_u();
        double v = face_tracker.getCog(0).get_v();
        if (u<= g.getWidth() && v <= g.getHeight())
          head_cog_cur.set_uv(u,v);

        vpRect bbox = face_tracker.getBBox(0);
        std::string name = face_tracker.getMessage(0);

        //std::cout << "Loop time face print " << vpTime::measureTimeMs() - t << " ms" << std::endl;
      }

      AL::ALValue result = m_memProxy.getData("PeoplePerception/VisiblePeopleList");

      //std::cout << "Loop time get Data PeoplePerception " << vpTime::measureTimeMs() - t << " ms" << std::endl;

      person_found = false;
      if (result.getSize() > 0)
      {
        AL::ALValue info = m_memProxy.getData("PeoplePerception/PeopleDetected");
        int num_people = info[1].getSize();
        std::ostringstream text;
        text << "Found " << num_people << " person(s)";
        vpDisplay::displayText(I, 10, 10, text.str(), vpColor::red);

        person_found = true;

        if (face_found) // Try to find the match between two detection
        {
          vpImagePoint cog_face;
          double dist_min = 1000;
          unsigned int index_person = 0;

          for (unsigned int i = 0; i < num_people; i++)
          {

            float alpha =  info[1][i][2];
            float beta =  info[1][i][3];
            //Z = Zd;
            // Centre of face into the image
            float x =  g.getWidth()/2 -  g.getWidth() * beta;
            float y =  g.getHeight()/2  + g.getHeight() * alpha;
            cog_face.set_uv(x,y);
            dist = vpImagePoint::distance(cog_face, head_cog_cur);

            if (dist < dist_min)
            {
              dist_min = dist;
              best_cog_face_peoplep = cog_face;
              index_person  = i;
            }
          }

          vpDisplay::displayCross(I, best_cog_face_peoplep, 10, vpColor::white);

          if (dist_min < 55.)
          {
            Z = info[1][index_person][1]; // Current distance
          }

        }
        else // Take the first one on the list
        {
          float alpha =  info[1][0][2];
          float beta =  info[1][0][3];
          //Z = Zd;
          // Centre of face into the image
          float x =  g.getWidth()/2 -  g.getWidth() * beta;
          float y =  g.getHeight()/2  + g.getHeight() * alpha;
          head_cog_cur.set_uv(x,y);
          Z = info[1][0][1]; // Current distance

        }

      }
      else
      {
        std::cout << "No distance computed " << std::endl;
        stop_vxy = true;
        robot.getProxy()->setAngles("HipRoll",0.0,1.0);
        //Z = Zd;
      }
      //          float alpha =  info[1][0][2];
      //          float beta =  info[1][0][3];
      //          //Z = Zd;
      //          // Centre of face into the image
      //          float x =  g.getWidth()/2 -  g.getWidth() * beta;
      //          float y =  g.getHeight()/2  + g.getHeight() * alpha;

      //          vpImagePoint cog_face(y,x);
      //          dist = vpImagePoint::distance(cog_face,head_cog_cur);
      //          if (dist < 55.)
      //            Z = info[1][0][1]; // Current distance
      //          else
      //            stop_vxy = true;
      //        }
      //        else
      //        {
      //          std::cout << "No distance computed " << std::endl;
      //          stop_vxy = true;
      //          //Z = Zd;
      //        }


      //std::cout << "Loop time before VS: " << vpTime::measureTimeMs() - t << " ms" << std::endl;
      if (face_found || person_found )
      {
        // Get Head Jacobian (6x2)
        vpMatrix torso_eJe_head;
        robot.get_eJe("Head",torso_eJe_head);

        // Add column relative to the base rotation (Wz)
        //        vpColVector col_wz(6);
        //        col_wz[5] = 1;
        for (unsigned int i = 0; i < 6; i++)
          for (unsigned int j = 0; j < torso_eJe_head.getCols(); j++)
            tJe[i][j+3] = torso_eJe_head[i][j];

        // std::cout << "tJe" << std::endl << tJe << std::endl;

        //        vpHomogeneousMatrix torsoMHeadPith( robot.getProxy()->getTransform(jointNames_head[jointNames_head.size()-1], 0, true));// get transformation  matrix between torso and HeadRoll
        vpHomogeneousMatrix torsoMHeadPith( robot.getProxy()->getTransform("HeadPitch", 0, true));// get transformation  matrix between torso and HeadRoll

        vpVelocityTwistMatrix HeadPitchVLtorso(torsoMHeadPith.inverse());

        for(unsigned int i=0; i< 3; i++)
          for(unsigned int j=0; j< 3; j++)
            HeadPitchVLtorso[i][j+3] = 0;

        //std::cout << "HeadPitchVLtorso: " << std::endl << HeadPitchVLtorso << std::endl;
        // Transform the matrix
        eJe = HeadPitchVLtorso *tJe;

        // std::cout << "eJe" << std::endl << eJe << std::endl;

        task.set_eJe( eJe );
        task.set_cVe( vpVelocityTwistMatrix(eMc.inverse()) );

        vpDisplay::displayCross(I, head_cog_des, 10, vpColor::blue);
        vpDisplay::displayCross(I, head_cog_cur, 10, vpColor::green);
        //  std::cout << "head_cog_des:" << std::endl << head_cog_des << std::endl;
        //  std::cout << "head_cog_cur:" << std::endl << head_cog_cur << std::endl;

        // Update the current x feature
        double x,y;
        vpPixelMeterConversion::convertPoint(cam, head_cog_cur, x, y);
        s.buildFrom(x, y, Z);
        //s.set_xyZ(head_cog_cur.get_u(), head_cog_cur.get_v(), Z);

        // Update log(Z/Z*) feature. Since the depth Z change, we need to update the intection matrix
        s_Z.buildFrom(s.get_x(), s.get_y(), Z, log(Z/Zd)) ;

        q_dot = task.computeControlLaw(vpTime::measureTimeSecond() - servo_time_init);

        //std::cout << "Loop time compute VS: " << vpTime::measureTimeMs() - t << " ms" << std::endl;

        vpMatrix P = task.getI_WpW();
        double alpha = -3.3;
        double min = limit_yaw[0][0];
        double max = limit_yaw[0][1];

        vpColVector z_q2 (q_dot.size());
        vpColVector q_yaw = robot.getPosition(jointNames_head[0]);

        z_q2[3] = 2 * alpha * q_yaw[0]/ pow((max - min),2);

        vpColVector q3 = P * z_q2;
        //if (q3.euclideanNorm()<10.0)
        q_dot =  q_dot + q3;

        std::vector<float> vel(jointNames_head.size());
        vel[0] = q_dot[3];
        vel[1] = q_dot[4];

        // Compute the distance in pixel between the target and the center of the image
        double distance = vpImagePoint::distance(head_cog_cur, head_cog_des);
        //if (distance > 0.03*I.getWidth())
         std::cout << "q:" << std::endl << q_dot << std::endl;
        //  std::cout << "vel" << std::endl << q_dot << std::endl;
        //std::cout << "distance" << std::endl << distance <<" -- " << 0.03*I.getWidth() << " ++ " << I.getWidth() << std::endl;

        //        if (distance > 0.1*I.getWidth())
        robot.setVelocity(jointNames_head,vel);
        //        else
        //        {
        //          std::cout << "Setting hipRoll to zero" << std::endl;
        //          robot.getProxy()->setAngles("HipRoll",0.0,1.0);
        //        }
        // std::cout << "errorZ: " << task.getError()[2] << std::endl;
        // std::cout << "stop_vxy: " << stop_vxy << std::endl;

        if (std::fabs(Z -Zd) < 0.05 || stop_vxy || !move_base)
          robot.setBaseVelocity(0.0, 0.0, q_dot[2]);
        else
          robot.setBaseVelocity(q_dot[0], q_dot[1], q_dot[2]);

        if (opt_debug)
        {
          vpColVector vel_head = robot.getJointVelocity(jointNames_head);
          for (unsigned int i=0 ; i < jointNames_head.size() ; i++) {
            plotter_diff_vel->plot(i, 1, loop_iter, q_dot[i+3]);
            plotter_diff_vel->plot(i, 0, loop_iter, vel_head[i]);
          }
          plotter_error->plot(0,loop_iter,task.getError());
          plotter_vel->plot(0,loop_iter, q3);
          plotter_distance->plot(0,0,loop_iter,Z);
        }

        //        if (detection_phase)
        //        {

        //          //if (score >= 0.4 && distance < 0.06*I.getWidth() && bbox.getSize() > 3000)
        //          if (distance < 0.06*I.getWidth() && bbox.getSize() > 3000)
        //          {
        //            if (opt_debug)
        //            {
        //              vpDisplay::displayRectangle(I, bbox, vpColor::red, false, 1);
        //              vpDisplay::displayText(I, (int)bbox.getTop()-10, (int)bbox.getLeft(), name, vpColor::red);
        //            }
        //            detected_face_map[name]++;
        //            f_count++;
        //          }
        //          else
        //          {
        //            if (opt_debug)
        //            {
        //              vpDisplay::displayRectangle(I, bbox, vpColor::green, false, 1);
        //              vpDisplay::displayText(I, (int)bbox.getTop()-10, (int)bbox.getLeft(), name, vpColor::green);
        //            }
        //          }
        //          if (f_count>10)
        //          {
        //            detection_phase = false;
        //            f_count = 0;
        //          }
        //        }
        //        else
        //        {
        //          std::string recognized_person_name = std::max_element(detected_face_map.begin(), detected_face_map.end(), pred)->first;
        //          unsigned int times = std::max_element(detected_face_map.begin(), detected_face_map.end(), pred)->second;

        //          if (!in_array(recognized_person_name, recognized_names) && recognized_person_name != "Unknown") {

        //            if (opt_language_english)
        //            {
        //              phraseToSay = "\\emph=2\\ Hi \\wait=200\\ \\emph=2\\" + recognized_person_name + "\\pau=200\\ How are you ?";
        //            }
        //            else
        //            {
        //              phraseToSay = "\\emph=2\\ Salut \\wait=200\\ \\emph=2\\" + recognized_person_name + "\\pau=200\\ comment vas  tu ?";;
        //            }

        //            std::cout << phraseToSay << std::endl;
        //            tts.post.say(phraseToSay);
        //            recognized_names.push_back(recognized_person_name);
        //          }
        //          if (!in_array(recognized_person_name, recognized_names) && recognized_person_name == "Unknown"
        //              && times > 15)
        //          {

        //            if (opt_language_english)
        //            {
        //              phraseToSay = "\\emph=2\\ Hi \\wait=200\\ \\emph=2\\. I don't know you! \\emph=2\\ What's your name?";
        //            }
        //            else
        //            {
        //              phraseToSay = " \\emph=2\\ Salut \\wait=200\\ \\emph=2\\. Je ne te connais pas! \\emph=2\\  Comment t'appelles-tu ?";
        //            }

        //            std::cout << phraseToSay << std::endl;
        //            tts.post.say(phraseToSay);
        //            recognized_names.push_back(recognized_person_name);
        //          }

        //          detection_phase = true;
        //          detected_face_map.clear();
        //        }
      }
      else {
        robot.stop(jointNames_head);
        robot.stopBase();
        std::cout << "Stop!" << std::endl;
        reinit_servo = true;
      }

      //if (opt_debug)
      vpDisplay::flush(I);
      if (vpDisplay::getClick(I, false))
        break;
      loop_iter ++;
      std::cout << "Loop time: " << vpTime::measureTimeMs() - t << " ms" << std::endl;
    }
    robot.stop(jointNames_head);
    robot.stopBase();

    tts.setLanguage("French");
  //  tts.exit();

    people_proxy.unsubscribe("People");
   // people_proxy.exit();

    asr.unsubscribe("Test_ASR");
    asr.setVisualExpression(true);

  //  asr.exit();

    tts.setLanguage("French");
 //   tts.exit();

    led_proxy.fadeRGB("FaceLeds","white",0.1);
  //  led_proxy.exit();

    vpDisplay::getClick(I, true);

  }
  catch(vpException &e) {
    std::cout << e.getMessage() << std::endl;
  }
  catch (const AL::ALError& e)
  {
    std::cerr << "Caught exception " << e.what() << std::endl;
  }

  std::cout << "The end: stop the robot..." << std::endl;

  //tts.setLanguage("French");
  //  tts.exit();


  robot.stop(jointNames_head);
  robot.stopBase();

  return 0;
}
int main(int argc, const char* argv[])
{
  std::string opt_ip = "198.18.0.1";;
  std::string opt_data_folder = std::string(ROMEOTK_DATA_FOLDER);
  bool opt_Reye = false;


  // Learning folder in /tmp/$USERNAME
  std::string username;
  // Get the user login name
  vpIoTools::getUserName(username);

  // Create a log filename to save new files...
  std::string learning_folder;
#if defined(_WIN32)
  learning_folder ="C:/temp/" + username;
#else
  learning_folder ="/tmp/" + username;
#endif
  // Test if the output path exist. If no try to create it
  if (vpIoTools::checkDirectory(learning_folder) == false) {
    try {
      // Create the dirname
      vpIoTools::makeDirectory(learning_folder);
    }
    catch (vpException &e) {
      std::cout << "Cannot create " << learning_folder << std::endl;
      std::cout << "Error: " << e.getMessage() <<std::endl;
      return 0;
    }
  }



  for (unsigned int i=0; i<argc; i++) {
    if (std::string(argv[i]) == "--ip")
      opt_ip = argv[i+1];
    else if ( std::string(argv[i]) == "--reye")
      opt_Reye = true;
    else if (std::string(argv[i]) == "--help") {
      std::cout << "Usage: " << argv[0] << "[--ip <robot address>]" << std::endl;
      return 0;
    }
  }

  std::vector<std::string> chain_name(2); // LArm or RArm
  chain_name[0] = "LArm";
  chain_name[1] = "RArm";


  /************************************************************************************************/
  /** Open the grabber for the acquisition of the images from the robot*/
  vpNaoqiGrabber g;
  g.setFramerate(15);

  // Initialize constant transformations
  vpHomogeneousMatrix eMc;

  if (opt_Reye)
  {
    std::cout << "Using camera Eye Right" << std::endl;
    g.setCamera(3); // CameraRightEye
    eMc = g.get_eMc(vpCameraParameters::perspectiveProjWithDistortion,"CameraRightEye");
  }
  else
  {
    std::cout << "Using camera Eye Right" << std::endl;
    g.setCamera(2); // CameraLeftEye
    eMc = g.get_eMc(vpCameraParameters::perspectiveProjWithDistortion,"CameraLeftEye");
  }
  std::cout << "eMc: " << eMc << std::endl;
  if (! opt_ip.empty())
    g.setRobotIp(opt_ip);
  g.open();
  std::string camera_name = g.getCameraName();
  std::cout << "Camera name: " << camera_name << std::endl;

  vpCameraParameters cam = g.getCameraParameters(vpCameraParameters::perspectiveProjWithDistortion);
  std::cout << "Camera parameters: " << cam << std::endl;


  /** Initialization Visp Image, display and camera paramenters*/
  vpImage<unsigned char> I(g.getHeight(), g.getWidth());
  vpDisplayX d(I);
  vpDisplay::setTitle(I, "Camera view");

  //Initialize opencv color image
  cv::Mat cvI = cv::Mat(cv::Size(g.getWidth(), g.getHeight()), CV_8UC3);



  /************************************************************************************************/


  /** Initialization target box*/

  std::string opt_name_file_color_box_target = opt_data_folder + "/" +"target/plate_blob/color.txt";


  vpBlobsTargetTracker box_tracker;
  bool status_box_tracker;
  vpHomogeneousMatrix cMbox;

  const double L = 0.04/2;
  std::vector <vpPoint> points(4);
  points[2].setWorldCoordinates(-L,-L, 0) ;
  points[1].setWorldCoordinates(-L,L, 0) ;
  points[0].setWorldCoordinates(L,L, 0) ;
  points[3].setWorldCoordinates(L,-L,0) ;

  box_tracker.setName("plate");
  box_tracker.setCameraParameters(cam);
  box_tracker.setPoints(points);
  box_tracker.setGrayLevelMaxBlob(60);


  box_tracker.setLeftHandTarget(false);

  if(!box_tracker.loadHSV(opt_name_file_color_box_target))
  {
    std::cout << "Error opening the file "<< opt_name_file_color_box_target << std::endl;
  }



  /************************************************************************************************/
  /** Initialization target hands*/

  std::string opt_name_file_color_target_path = opt_data_folder + "/" +"target/";
  std::string opt_name_file_color_target_l = opt_name_file_color_target_path + chain_name[0] +"/color.txt";
  std::string opt_name_file_color_target_r = opt_name_file_color_target_path + chain_name[1] +"/color.txt";

  std::string opt_name_file_color_target1_l = opt_name_file_color_target_path + chain_name[0] +"/color1.txt";
  std::string opt_name_file_color_target1_r = opt_name_file_color_target_path + chain_name[1] +"/color1.txt";

  std::vector<vpBlobsTargetTracker*> hand_tracker;
  std::vector<bool>  status_hand_tracker(2);
  std::vector<vpHomogeneousMatrix>  cMo_hand(2);

  const double L1 = 0.025/2;
  std::vector <vpPoint> points1(4);
  points1[2].setWorldCoordinates(-L1,-L1, 0) ;
  points1[1].setWorldCoordinates(-L1,L1, 0) ;
  points1[0].setWorldCoordinates(L1,L1, 0) ;
  points1[3].setWorldCoordinates(L1,-L1,0) ;


  vpBlobsTargetTracker hand_tracker_l;
  hand_tracker_l.setName(chain_name[0]);
  hand_tracker_l.setCameraParameters(cam);
  hand_tracker_l.setPoints(points1);
  hand_tracker_l.setLeftHandTarget(true);

  if(!hand_tracker_l.loadHSV(opt_name_file_color_target_l))
    std::cout << "Error opening the file "<< opt_name_file_color_target_l << std::endl;


  vpBlobsTargetTracker hand_tracker_r;
  hand_tracker_r.setName(chain_name[1]);
  hand_tracker_r.setCameraParameters(cam);
  hand_tracker_r.setPoints(points);
  hand_tracker_r.setLeftHandTarget(false);

  if(!hand_tracker_r.loadHSV(opt_name_file_color_target1_r))
    std::cout << "Error opening the file "<< opt_name_file_color_target_r << std::endl;

  hand_tracker.push_back(&hand_tracker_l);
  hand_tracker.push_back(&hand_tracker_r);


  /************************************************************************************************/

  // Constant transformation Target Frame to Arm end-effector (WristPitch)
  std::vector<vpHomogeneousMatrix> hand_Me_Arm(2);
  std::string filename_transform = std::string(ROMEOTK_DATA_FOLDER) + "/transformation.xml";
  // Create twist matrix from box to Arm end-effector (WristPitch)
  std::vector <vpVelocityTwistMatrix> box_Ve_Arm(2);
  // Create twist matrix from target Frame to Arm end-effector (WristPitch)
  std::vector <vpVelocityTwistMatrix> hand_Ve_Arm(2);

  // Constant transformation Target Frame to box to target Hand
  std::vector<vpHomogeneousMatrix> box_Mhand(2);


  for (unsigned int i = 0; i < 2; i++)
  {

    std::string name_transform = "qrcode_M_e_" + chain_name[i];
    vpXmlParserHomogeneousMatrix pm; // Create a XML parser

    if (pm.parse(hand_Me_Arm[i], filename_transform, name_transform) != vpXmlParserHomogeneousMatrix::SEQUENCE_OK) {
      std::cout << "Cannot found the homogeneous matrix named " << name_transform << "." << std::endl;
      return 0;
    }
    else
      std::cout << "Homogeneous matrix " << name_transform <<": " << std::endl << hand_Me_Arm[i] << std::endl;

    hand_Ve_Arm[i].buildFrom(hand_Me_Arm[i]);

  }

  /************************************************************************************************/

  /** Create a new istance NaoqiRobot*/
  vpNaoqiRobot robot;
  if (! opt_ip.empty())
    robot.setRobotIp(opt_ip);
  robot.open();

  std::vector<std::string> jointNames = robot.getBodyNames("Head");
  //jointNames.pop_back(); // We don't consider  the last joint of the head = HeadRoll
  std::vector<std::string> jointNamesLEye = robot.getBodyNames("LEye");
  std::vector<std::string> jointNamesREye = robot.getBodyNames("REye");

  jointNames.insert(jointNames.end(), jointNamesLEye.begin(), jointNamesLEye.end());
  std::vector<std::string> jointHeadNames_tot = jointNames;
  jointHeadNames_tot.push_back(jointNamesREye.at(0));
  jointHeadNames_tot.push_back(jointNamesREye.at(1));

  /************************************************************************************************/

  std::vector<bool> grasp_servo_converged(2);
  grasp_servo_converged[0]= false;
  grasp_servo_converged[1]= false;


  std::vector<vpMatrix> eJe(2);


  // Initialize arms servoing
  vpServoArm servo_larm_l;
  vpServoArm servo_larm_r;

  std::vector<vpServoArm*> servo_larm;
  servo_larm.push_back(&servo_larm_l);
  servo_larm.push_back(&servo_larm_r);

  std::vector < std::vector<std::string > > jointNames_arm(2);

  jointNames_arm[0] = robot.getBodyNames(chain_name[0]);
  jointNames_arm[1] = robot.getBodyNames(chain_name[1]);
  // Delete last joint Hand, that we don't consider in the servo
  jointNames_arm[0].pop_back();
  jointNames_arm[1].pop_back();

  std::vector<std::string> jointArmsNames_tot = jointNames_arm[0];

  jointArmsNames_tot.insert(jointArmsNames_tot.end(), jointNames_arm[1].begin(), jointNames_arm[1].end());


  const unsigned int numArmJoints =  jointNames_arm[0].size();
  std::vector<vpHomogeneousMatrix> box_dMbox(2);

  // Vector containing the joint velocity vectors of the arms
  std::vector<vpColVector> q_dot_arm;
  // Vector containing the joint velocity vectors of the arms for the secondary task
  std::vector<vpColVector> q_dot_arm_sec;
  // Vector joint position of the arms
  std::vector<vpColVector> q;
  // Vector joint real velocities of the arms
  std::vector<vpColVector> q_dot_real;

  vpColVector   q_temp(numArmJoints);
  q_dot_arm.push_back(q_temp);
  q_dot_arm.push_back(q_temp);

  q_dot_arm_sec.push_back(q_temp);
  q_dot_arm_sec.push_back(q_temp);

  q.push_back(q_temp);
  q.push_back(q_temp);

  q_dot_real.push_back(q_temp);
  q_dot_real.push_back(q_temp);



  // Initialize the joint avoidance scheme from the joint limits
  std::vector<vpColVector> jointMin;
  std::vector<vpColVector> jointMax;

  jointMin.push_back(q_temp);
  jointMin.push_back(q_temp);

  jointMax.push_back(q_temp);
  jointMax.push_back(q_temp);

  for (unsigned int i = 0; i< 2; i++)
  {
    jointMin[i] = robot.getJointMin(chain_name[i]);
    jointMax[i] = robot.getJointMax(chain_name[i]);

    jointMin[i].resize(numArmJoints,false);
    jointMax[i].resize(numArmJoints,false);

    //        std::cout <<  jointMin[i].size() << std::endl;
    //        std::cout <<  jointMax[i].size() << std::endl;
    //         std::cout << "max " <<  jointMax[i] << std::endl;
  }



  std::vector<bool> first_time_arm_servo(2);
  first_time_arm_servo[0] = true;
  first_time_arm_servo[1] = true;

  std::vector< double> servo_arm_time_init(2);
  servo_arm_time_init[0] = 0.0;
  servo_arm_time_init[1] = 0.0;

  std::vector<int> cpt_iter_servo_grasp(2);
  cpt_iter_servo_grasp[0] = 0;
  cpt_iter_servo_grasp[1] = 0;

  unsigned int index_hand = 1;


  //    vpHomogeneousMatrix M_offset;
  //    M_offset[1][3] = 0.00;
  //    M_offset[0][3] = 0.00;
  //    M_offset[2][3] = 0.00;

  vpHomogeneousMatrix M_offset;
  M_offset.buildFrom(0.0, 0.0, 0.0, 0.0, 0.0, 0.0) ;

  double d_t = 0.01;
  double d_r = 0.0;

  bool first_time_box_pose = true;


  /************************************************************************************************/

  vpMouseButton::vpMouseButtonType button;

  vpHomogeneousMatrix elMb; // Homogeneous matrix from right wrist roll to box
  vpHomogeneousMatrix cMbox_d; // Desired box final pose

  State_t state;
  state = CalibrateRigthArm;


  //AL::ALValue names_head     = AL::ALValue::array("NeckYaw","NeckPitch","HeadPitch","HeadRoll","LEyeYaw", "LEyePitch","REyeYaw", "REyePitch" );
// Big Plate
 // AL::ALValue angles_head      = AL::ALValue::array(vpMath::rad(-23.2), vpMath::rad(16.6), vpMath::rad(10.3), vpMath::rad(0.0), 0.0 , 0.0, 0.0, 0.0  );
  // small plate
    AL::ALValue angles_head      = AL::ALValue::array(vpMath::rad(-15.2), vpMath::rad(17.6), vpMath::rad(10.3), vpMath::rad(0.0), 0.0 , vpMath::rad(9.8), 0.0, 0.0  );
  float fractionMaxSpeed  = 0.1f;
  robot.getProxy()->setAngles(jointHeadNames_tot, angles_head, fractionMaxSpeed);



  while(1) {
    double loop_time_start = vpTime::measureTimeMs();


    g.acquire(cvI);
    vpImageConvert::convert(cvI, I);
    vpDisplay::display(I);
    bool click_done = vpDisplay::getClick(I, button, false);

    //        if (state < VSBox)
    //        {

    char key[10];
    bool ret = vpDisplay::getKeyboardEvent(I, key, false);
    std::string s = key;

    if (ret)
    {
      if (s == "r")
      {
        M_offset.buildFrom(0.0, 0.0, 0.0, 0.0, 0.0, 0.0) ;

        d_t = 0.0;
        d_r = 0.2;
        std::cout << "Rotation mode. " << std::endl;
      }

      if (s == "t")
      {
        M_offset.buildFrom(0.0, 0.0, 0.0, 0.0, 0.0, 0.0) ;

        d_t = 0.01;
        d_r = 0.0;
        std::cout << "Translation mode. " << std::endl;
      }

      if (s == "h")
      {
        hand_tracker[index_hand]->setManualBlobInit(true);
        hand_tracker[index_hand]->setForceDetection(true);
      }
      else if ( s == "+")
      {
        unsigned int value = hand_tracker[index_hand]->getGrayLevelMaxBlob() +10;
        hand_tracker[index_hand]->setGrayLevelMaxBlob(value);
        std::cout << "Set to "<< value << "the value of  " << std::endl;

      }
      else if (s == "-")
      {
        unsigned int value = hand_tracker[1]->getGrayLevelMaxBlob()-10;
        hand_tracker[index_hand]->setGrayLevelMaxBlob(value-10);
        std::cout << "Set to "<< value << " GrayLevelMaxBlob. " << std::endl;
      }

      //          |x
      //      z\  |
      //        \ |
      //         \|_____ y
      //

      else if (s == "4") //-y
      {
        M_offset.buildFrom(0.0, -d_t, 0.0, 0.0, -d_r, 0.0) ;
      }

      else if (s == "6")  //+y
      {
        M_offset.buildFrom(0.0, d_t, 0.0, 0.0, d_r, 0.0) ;
      }

      else if (s == "8")  //+x
      {
        M_offset.buildFrom(d_t, 0.0, 0.0, d_r, 0.0, 0.0) ;
      }

      else if (s == "2") //-x
      {
        M_offset.buildFrom(-d_t, 0.0, 0.0, -d_r, 0.0, 0.0) ;
      }

      else if (s == "7")//-z
      {
        M_offset.buildFrom(0.0, 0.0, -d_t, 0.0, 0.0, -d_r) ;
      }
      else if (s == "9") //+z
      {
        M_offset.buildFrom(0.0, 0.0, d_t, 0.0, 0.0, d_r) ;
      }

      cMbox_d = cMbox_d * M_offset;


    }


    if (state < WaitPreGrasp)
    {
      status_hand_tracker[index_hand] = hand_tracker[index_hand]->track(cvI,I);

      if (status_hand_tracker[index_hand] ) { // display the tracking results
        cMo_hand[index_hand] = hand_tracker[index_hand]->get_cMo();
        printPose("cMo right arm: ", cMo_hand[index_hand]);
        // The qrcode frame is only displayed when PBVS is active or learning

        vpDisplay::displayFrame(I, cMo_hand[index_hand], cam, 0.04, vpColor::none, 3);
      }

    }

    //        }

    status_box_tracker = box_tracker.track(cvI,I);
    if (status_box_tracker ) { // display the tracking results
      cMbox = box_tracker.get_cMo();
      printPose("cMo box: ", cMbox);
      // The qrcode frame is only displayed when PBVS is active or learning

      vpDisplay::displayFrame(I, cMbox, cam, 0.04, vpColor::none, 1);


      //            if (first_time_box_pose)
      //            {
      //                // Compute desired box position cMbox_d
      //                cMbox_d = cMbox * M_offset;
      //                first_time_box_pose = false;
      //            }

      // vpDisplay::displayFrame(I, cMbox_d, cam, 0.04, vpColor::red, 1);

    }


    if (state == CalibrateRigthArm &&  status_box_tracker && status_hand_tracker[index_hand] )
    {
      vpDisplay::displayText(I, vpImagePoint(I.getHeight() - 10, 10), "Left click to calibrate right hand", vpColor::red);

      vpHomogeneousMatrix box_Me_Arm; // Homogeneous matrix from the box to the left wrist

      box_Mhand[index_hand] = cMbox.inverse() *  cMo_hand[index_hand];
      box_Me_Arm = box_Mhand[index_hand] * hand_Me_Arm[index_hand] ; // from box to WristPitch



      // vpDisplay::displayFrame(I, cMo_hand[index_hand] * (cMbox.inverse() *  cMo_hand[index_hand]).inverse() , cam, 0.04, vpColor::green, 1);


      if (click_done && button == vpMouseButton::button1 ) {

        box_Ve_Arm[index_hand].buildFrom(box_Me_Arm);
        index_hand = 0;
        state = CalibrateLeftArm;

        // BIG plate
        //AL::ALValue names_head     = AL::ALValue::array("NeckYaw","NeckPitch","HeadPitch","HeadRoll","LEyeYaw", "LEyePitch","LEyeYaw", "LEyePitch" );
  //      AL::ALValue angles_head      = AL::ALValue::array(vpMath::rad(8.7), vpMath::rad(16.6), vpMath::rad(10.3), vpMath::rad(0.0), 0.0 , 0.0, 0.0, 0.0  );
        // Small Plate
        AL::ALValue angles_head      = AL::ALValue::array(vpMath::rad(2.4), vpMath::rad(17.6), vpMath::rad(10.3), vpMath::rad(0.0), 0.0 , vpMath::rad(9.8), 0.0, 0.0  );

        float fractionMaxSpeed  = 0.1f;
        robot.getProxy()->setAngles(jointHeadNames_tot, angles_head, fractionMaxSpeed);


        click_done = false;

      }

    }


    if (state == CalibrateLeftArm && status_box_tracker && status_hand_tracker[index_hand] )
    {
      vpDisplay::displayText(I, vpImagePoint(I.getHeight() - 10, 10), "Left click to calibrate left hand", vpColor::red);

      vpHomogeneousMatrix box_Me_Arm; // Homogeneous matrix from the box to the left wrist
      box_Mhand[index_hand] = cMbox.inverse() *  cMo_hand[index_hand];
      box_Me_Arm = box_Mhand[index_hand] * hand_Me_Arm[index_hand] ; // from box to WristPitch

      //            if (first_time_box_pose)
      //            {
      //                // Compute desired box position cMbox_d
      //                cMbox_d = cMbox * M_offset;
      //                first_time_box_pose = false;

      //            }

      // vpDisplay::displayFrame(I, cMbox_d, cam, 0.04, vpColor::red, 1);

      // vpDisplay::displayFrame(I, cMo_hand[index_hand] * (cMbox.inverse() *  cMo_hand[index_hand]).inverse() , cam, 0.04, vpColor::green, 1);


      if (click_done && button == vpMouseButton::button1 ) {

        box_Ve_Arm[index_hand].buildFrom(box_Me_Arm);
        state = WaitPreGrasp;
        click_done = false;
        //AL::ALValue names_head     = AL::ALValue::array("NeckYaw","NeckPitch","HeadPitch","HeadRoll","LEyeYaw", "LEyePitch","LEyeYaw", "LEyePitch" );
        AL::ALValue angles_head      = AL::ALValue::array(vpMath::rad(-6.8), vpMath::rad(16.6), vpMath::rad(10.3), vpMath::rad(0.0), 0.0 , 0.0, 0.0, 0.0  );
        float fractionMaxSpeed  = 0.05f;
        robot.getProxy()->setAngles(jointHeadNames_tot, angles_head, fractionMaxSpeed);
      }

    }


    if (state == WaitPreGrasp  )
    {
      index_hand = 1;


      if (click_done && button == vpMouseButton::button1 ) {

        state = PreGraps;
        click_done = false;
      }

    }



    if (state == PreGraps && status_box_tracker )
    {

      vpDisplay::displayText(I, vpImagePoint(I.getHeight() - 10, 10), "Left click to start the servo", vpColor::red);

      if (first_time_box_pose)
      {
        // Compute desired box position cMbox_d
        cMbox_d = cMbox * M_offset;
        first_time_box_pose = false;
      }

      vpDisplay::displayFrame(I, cMbox_d , cam, 0.05, vpColor::none, 3);
      vpDisplay::displayFrame(I, cMbox *box_Mhand[1] , cam, 0.05, vpColor::none, 3);
      vpDisplay::displayFrame(I, cMbox *box_Mhand[0] , cam, 0.05, vpColor::none, 3);

      if (click_done && button == vpMouseButton::button1 ) {

        state = VSBox;
        click_done = false;

        hand_tracker[index_hand] = &box_tracker; // Trick to use keys
      }
    }


    if (state == VSBox)
    {

      //Get Actual position of the arm joints
      q[0] = robot.getPosition(jointNames_arm[0]);
      q[1] = robot.getPosition(jointNames_arm[1]);

      //  if(status_box_tracker && status_hand_tracker[index_hand] )
      if(status_box_tracker )
      {

        if (! grasp_servo_converged[0]) {
          //                    for (unsigned int i = 0; i<2; i++)
          //                    {

          unsigned int i = 0;
          //vpAdaptiveGain lambda(0.8, 0.05, 8);
          vpAdaptiveGain lambda(0.4, 0.02, 4);

          //servo_larm[i]->setLambda(lambda);
          servo_larm[i]->setLambda(0.2);


          eJe[i] = robot.get_eJe(chain_name[i]);

          servo_larm[i]->set_eJe(eJe[i]);
          servo_larm[i]->m_task.set_cVe(box_Ve_Arm[i]);

          box_dMbox[i] = cMbox_d.inverse() * cMbox;
          printPose("box_dMbox: ", box_dMbox[i]);
          servo_larm[i]->setCurrentFeature(box_dMbox[i]) ;

          vpDisplay::displayFrame(I, cMbox_d , cam, 0.05, vpColor::none, 3);


          //                    vpDisplay::displayFrame(I, cMbox *box_Mhand[1] , cam, 0.05, vpColor::none, 3);
          //                    vpDisplay::displayFrame(I, cMbox *box_Mhand[0] , cam, 0.05, vpColor::none, 3);

          if (first_time_arm_servo[i]) {
            std::cout << "-- Start visual servoing of the arm" << chain_name[i] << "." << std::endl;
            servo_arm_time_init[i] = vpTime::measureTimeSecond();
            first_time_arm_servo[i] = false;
          }

          q_dot_arm[i] =  - servo_larm[i]->computeControlLaw(servo_arm_time_init[i]);


          q_dot_real[0] = robot.getJointVelocity(jointNames_arm[0]);
          q_dot_real[1] = robot.getJointVelocity(jointNames_arm[1]);

          //                    std::cout << "real_q:  " << std::endl <<  real_q << std::endl;

          //                    std::cout << " box_Ve_Arm[i]:  " << std::endl << box_Ve_Arm[i] << std::endl;
          //                    std::cout << "  eJe[i][i]:  " << std::endl <<  eJe[i] << std::endl;

          //vpColVector real_v = (box_Ve_Arm[i] * eJe[i]) *  q_dot_real[0];
          vpColVector real_v = (box_Ve_Arm[i] * eJe[i]) *  q_dot_arm[0];



          //                    std::cout << "real_v:  " << std::endl <<real_v << std::endl;

          //          vpVelocityTwistMatrix cVo(cMo_hand);
          //          vpMatrix cJe = cVo * oJo;
          // Compute the feed-forward terms
          //          vpColVector sec_ter = 0.5 * ((servo_head.m_task_head.getTaskJacobianPseudoInverse() *  (servo_head.m_task_head.getInteractionMatrix() * cJe)) * q_dot_larm);
          //          std::cout <<"Second Term:" <<sec_ter << std::endl;
          //q_dot_head = q_dot_head + sec_ter;



          // Compute joint limit avoidance
          q_dot_arm_sec[0]  = servo_larm[0]->m_task.secondaryTaskJointLimitAvoidance(q[0], q_dot_real[0], jointMin[0], jointMax[0]);
          //q_dot_arm_sec[1]  = servo_larm[1]->m_task.secondaryTaskJointLimitAvoidance(q[1], q_dot_real[1], jointMin[1], jointMax[1]);

          // vpColVector q_dot_arm_head = q_dot_larm + q2_dot;

          //q_dot_arm_head.stack(q_dot_tot);
          //          robot.setVelocity(joint_names_arm_head,q_dot_arm_head);
          //robot.setVelocity(jointNames_arm[i], q_dot_larm);

          //          if (opt_plotter_arm) {
          //            plotter_arm->plot(0, cpt_iter_servo_grasp, servo_larm.m_task.getError());
          //            plotter_arm->plot(1, cpt_iter_servo_grasp, q_dot_larm);
          //          }

          //          if (opt_plotter_q_sec_arm)
          //          {
          //            plotter_q_sec_arm->plot(0,loop_iter,q2_dot);
          //            plotter_q_sec_arm->plot(1,loop_iter,q_dot_larm + q2_dot);

          //          }


          cpt_iter_servo_grasp[i] ++;


          //                    }

          //                    // Visual servoing slave

          //                    i = 1;

          //                    //vpAdaptiveGain lambda(0.4, 0.02, 4);

          //                    servo_larm[i]->setLambda(0.07);

          //                    servo_larm[i]->set_eJe(robot.get_eJe(chain_name[i]));
          //                    servo_larm[i]->m_task.set_cVe(hand_Ve_Arm[i]);

          //                    box_dMbox[i] = (cMbox *box_Mhand[1]).inverse() *  cMo_hand[1] ;

          //                    printPose("box_dMbox: ", box_dMbox[i]);
          //                    servo_larm[i]->setCurrentFeature(box_dMbox[i]) ;

          //                    vpDisplay::displayFrame(I, cMbox_d , cam, 0.025, vpColor::red, 2);

          //                    if (first_time_arm_servo[i]) {
          //                        std::cout << "-- Start visual servoing of the arm" << chain_name[i] << "." << std::endl;
          //                        servo_arm_time_init[i] = vpTime::measureTimeSecond();
          //                        first_time_arm_servo[i] = false;
          //                    }

          //                    q_dot_arm[i] =  - servo_larm[i]->computeControlLaw(servo_arm_time_init[i]);



          eJe[1] = robot.get_eJe(chain_name[1]);
          //                    q_dot_arm[1] += (box_Ve_Arm[1] * eJe[1]).pseudoInverse() * real_v;

          q_dot_arm[1] = (box_Ve_Arm[1] * eJe[1]).pseudoInverse() * real_v;

          vpColVector q_dot_tot = q_dot_arm[0] + q_dot_arm_sec[0];

          q_dot_tot.stack( q_dot_arm[1] + q_dot_arm_sec[1]);

          robot.setVelocity(jointArmsNames_tot, q_dot_tot);

          vpTranslationVector t_error_grasp = box_dMbox[0].getTranslationVector();
          vpRotationMatrix R_error_grasp;
          box_dMbox[0].extract(R_error_grasp);
          vpThetaUVector tu_error_grasp;
          tu_error_grasp.buildFrom(R_error_grasp);
          double theta_error_grasp;
          vpColVector u_error_grasp;
          tu_error_grasp.extract(theta_error_grasp, u_error_grasp);
          std::cout << "error: " << t_error_grasp << " " << vpMath::deg(theta_error_grasp) << std::endl;


          //                    if (cpt_iter_servo_grasp[0] > 100) {

          //                        vpDisplay::displayText(I, vpImagePoint(10,10), "Cannot converge. Click to continue", vpColor::red);
          //                    }
          //                    double error_t_treshold = 0.007;

          //                    if ( (sqrt(t_error_grasp.sumSquare()) < error_t_treshold) && (theta_error_grasp < vpMath::rad(3)) || (click_done && button == vpMouseButton::button1 /*&& cpt_iter_servo_grasp > 150*/) )
          //                    {
          //                        robot.stop(jointArmsNames_tot);

          //                        state = End;
          //                        grasp_servo_converged[0] = true;

          //                        if (click_done && button == vpMouseButton::button1)
          //                            click_done = false;
          //                    }


        }

      }
      else {
        // state grasping but one of the tracker fails
        robot.stop(jointArmsNames_tot);
      }


    }


    if (state == End)
    {
      std::cout<< "End" << std::endl;
    }

    vpDisplay::flush(I) ;

    if (click_done && button == vpMouseButton::button3) { // Quit the loop
      break;
    }
    //std::cout << "Loop time: " << vpTime::measureTimeMs() - loop_time_start << std::endl;

  }

  robot.stop(jointArmsNames_tot);




}