Example #1
0
int test(double x,double y,double z,double alpha){
  //intial pose
  vpHomogeneousMatrix cMo(x,y,z,-vpMath::rad(0),vpMath::rad(0),alpha);
  //Desired pose
  vpHomogeneousMatrix cdMo(vpHomogeneousMatrix(0.0,0.0,1.0,vpMath::rad(0),vpMath::rad(0),-vpMath::rad(0)));

  //source and destination objects for moment manipulation
  vpMomentObject src(6);
  vpMomentObject dst(6);

  //init and run the simulation
  initScene(cMo, cdMo, src, dst); //initialize graphical scene (for interface)

  vpMatrix mat = execute(cMo, cdMo, src, dst);

  if(fabs(mat[0][0]-(-1)) > std::numeric_limits<double>::epsilon()*1e10) return -1;
  if(fabs(mat[0][1]-(0)) > std::numeric_limits<double>::epsilon()*1e10) return -1;
  if(fabs(mat[0][2]-(0)) > std::numeric_limits<double>::epsilon()*1e10) return -1;

  if(fabs(mat[1][0]-(0)) > std::numeric_limits<double>::epsilon()*1e10) return -1;
  if(fabs(mat[1][1]-(-1)) > std::numeric_limits<double>::epsilon()*1e10) return -1;
  if(fabs(mat[1][2]-(0)) > std::numeric_limits<double>::epsilon()*1e10) return -1;

  if(fabs(mat[2][0]-(0)) > std::numeric_limits<double>::epsilon()*1e10) return -1;
  if(fabs(mat[2][1]-(0)) > std::numeric_limits<double>::epsilon()*1e10) return -1;
  if(fabs(mat[2][2]-(-1)) > std::numeric_limits<double>::epsilon()*1e10) return -1;
  if(fabs(mat[2][5]-(0)) > std::numeric_limits<double>::epsilon()*1e10) return -1;

  if(fabs(mat[3][0]-(0)) > std::numeric_limits<double>::epsilon()*1e10) return -1;
  if(fabs(mat[3][1]-(0)) > std::numeric_limits<double>::epsilon()*1e10) return -1;
  if(fabs(mat[3][2]-(0)) > std::numeric_limits<double>::epsilon()*1e10) return -1;
  if(fabs(mat[3][5]-(0)) > std::numeric_limits<double>::epsilon()*1e10) return -1;

  if(fabs(mat[4][0]-(0)) > std::numeric_limits<double>::epsilon()*1e10) return -1;
  if(fabs(mat[4][1]-(0)) > std::numeric_limits<double>::epsilon()*1e10) return -1;
  if(fabs(mat[4][2]-(0)) > std::numeric_limits<double>::epsilon()*1e10) return -1;
  if(fabs(mat[4][5]-(0)) > std::numeric_limits<double>::epsilon()*1e10) return -1;

  if(fabs(mat[5][0]-(0)) > std::numeric_limits<double>::epsilon()*1e10) return -1;
  if(fabs(mat[5][1]-(0)) > std::numeric_limits<double>::epsilon()*1e10) return -1;
  if(fabs(mat[5][2]-(0)) > std::numeric_limits<double>::epsilon()*1e10) return -1;
  if(fabs(mat[5][5]-(-1)) > std::numeric_limits<double>::epsilon()*1e10) return -1;

  return 0;
}
Example #2
0
int main()
{
  try {
    // We want to calibrate the hand to eye extrinsic camera parameters from 6 couple of poses: cMo and wMe
    const unsigned int N = 15;
    // Input: six couple of poses used as input in the calibration proces
    std::vector<vpHomogeneousMatrix> cMo(N) ; // eye (camera) to object transformation. The object frame is attached to the calibrartion grid
    std::vector<vpHomogeneousMatrix>  wMe(N) ; // world to hand (end-effector) transformation
    // Output: Result of the calibration
    vpHomogeneousMatrix  eMc; // hand (end-effector) to eye (camera) transformation

    // Initialize an eMc transformation used to produce the simulated input transformations cMo and wMe
    
//vpTranslationVector etc(0.1, 0.2, 0.3);
    vpTranslationVector etc(0.05, 0, 0.2);
    vpThetaUVector erc;
    /*erc[0] = vpMath::rad(10); // 10 deg
    erc[1] = vpMath::rad(-10); // -10 deg
    erc[2] = vpMath::rad(25); */// 25 deg
    erc[0] = vpMath::rad(0); // 10 deg
    erc[1] = vpMath::rad(0); // -10 deg
    erc[2] = vpMath::rad(0); 

    /*eMc.buildFrom(etc, erc);
    std::cout << "Simulated hand to eye transformation: eMc " << std::endl ;
    std::cout << eMc << std::endl ;
    std::cout << "Theta U rotation: " << vpMath::deg(erc[0]) << " " << vpMath::deg(erc[1]) << " " << vpMath::deg(erc[2]) << std::endl;*/

    vpColVector v_c(6) ; // camera velocity used to produce 6 simulated poses

/*Estimated pose on input data 0: 0.1569787825  0.1461834835  0.5301261328  -0.1929363152  -0.05977090224  -3.086850305  
Estimated pose on input data 1: 0.1644980938  -0.01189407408  0.5553413668  0.2166557182  -0.05044182731  3.088872343  
Estimated pose on input data 2: -0.08150101233  -0.06427513577  0.8229730225  0.6375887635  0.2929315537  -3.0412121  
Estimated pose on input data 3: -0.07399590334  0.2463715544  0.8084672335  -0.5425092233  0.9201825297  2.951574841  
Estimated pose on input data 4: 0.1182872853  0.06179490234  0.5342609156  0.290296242  0.8278289665  2.878703416  
Estimated pose on input data 5: 0.07240424921  0.05818089849  0.5080845294  0.8878156838  -0.03319297731  2.92028639  
Estimated pose on input data 6: 0.128864062  0.01258769986  0.5791513843  -0.01963916351  1.400887526  -2.710874876  
Estimated pose on input data 7: 0.1331430092  0.1285754873  0.5841843523  -0.07130638491  1.664699711  2.571484173  
Estimated pose on input data 8: 0.1475986772  0.08083976421  0.4742733255  0.3274904787  1.251618151  2.748407772  
Estimated pose on input data 9: 0.06660012206  0.04565492383  0.9904218065  -0.2386496107  0.5378999186  3.049756264  
Estimated pose on input data 10: 0.135244671  0.006638499199  0.5525444996  -0.1161145018  1.197898419  -2.754131838  
Estimated pose on input data 11: 0.1133506203  0.05656750283  0.56323877  0.01858298003  0.670425518  -2.802539113  
Estimated pose on input data 12: 0.09667463795  0.03492932371  0.7513400654  -0.2549253893  1.239009966  2.848669567  
Estimated pose on input data 13: 0.111754616  0.0352982772  0.6037117746  -0.2635997725  1.343112867  -2.740843596  
Estimated pose on input data 14: 0.1304978931  0.05777044613  0.4837973463  0.09932414819  0.6587037148  3.055926128  */

/*Estimated pose on input data 0: 0.1570413653  0.1462814767  0.5296167593  -0.1941770803  -0.05941143538  -3.086854663  
Estimated pose on input data 1: 0.1645591308  -0.01178757497  0.5548852438  0.2174172067  -0.05050699552  3.088872578  
Estimated pose on input data 2: -0.08140123626  -0.06415861852  0.8225177987  0.6387268082  0.2934568774  -3.041051984  
Estimated pose on input data 3: -0.07389514878  0.2465329982  0.8080147574  -0.5427311886  0.9203214735  2.951500448  
Estimated pose on input data 4: 0.1183504128  0.06189642877  0.5338128543  0.290828389  0.8278402557  2.878681592  
Estimated pose on input data 5: 0.07245283491  0.05827456634  0.5074674243  0.8885114659  -0.03282148657  2.920083464  
Estimated pose on input data 6: 0.12894082  0.01269200688  0.5787056135  -0.01986987293  1.400704506  -2.711025518  
Estimated pose on input data 7: 0.133226549  0.1287034045  0.5838253899  -0.07109766974  1.664064991  2.571849871  
Estimated pose on input data 8: 0.1476536754  0.08092558365  0.4738385251  0.32787776  1.251768622  2.748314156  
Estimated pose on input data 9: 0.06674342233  0.04580250865  0.9896494674  -0.2363088805  0.5380505543  3.049988544  
Estimated pose on input data 10: 0.1353199539  0.006734768036  0.552106057  -0.1165157893  1.197817012  -2.754244687  
Estimated pose on input data 11: 0.1134173579  0.05667957541  0.562798548  0.01827158984  0.6700808586  -2.802593791  
Estimated pose on input data 12: 0.09678548583  0.03505801317  0.75085561  -0.2546886466  1.238795585  2.848785012  
Estimated pose on input data 13: 0.1118341527  0.03541269363  0.6032772496  -0.2637376516  1.3428496  -2.740983394  
Estimated pose on input data 14: 0.1305641085  0.05786662886  0.483384278  0.0992989732  0.6584820877  3.055982463 */


cMo[0].buildFrom(0.1570413653 , 0.1462814767 , 0.5296167593 , -0.1941770803 , -0.05941143538 , -3.086854663);
cMo[1].buildFrom(0.1645591308 , -0.01178757497 , 0.5548852438 , 0.2174172067 , -0.05050699552 , 3.088872578);
cMo[2].buildFrom(-0.08140123626 , -0.06415861852 , 0.8225177987 , 0.6387268082 , 0.2934568774 , -3.041051984);
cMo[3].buildFrom(-0.07389514878 , 0.2465329982 , 0.8080147574 , -0.5427311886 , 0.9203214735 , 2.951500448 );
cMo[4].buildFrom(0.1183504128 , 0.06189642877 , 0.5338128543 , 0.290828389 , 0.8278402557 , 2.878681592   );
cMo[5].buildFrom(0.07245283491 , 0.05827456634 , 0.5074674243,  0.8885114659 , -0.03282148657 , 2.920083464 );
cMo[6].buildFrom( 0.12894082 , 0.01269200688 , 0.5787056135 , -0.01986987293 , 1.400704506 , -2.711025518  );
cMo[7].buildFrom(0.133226549 , 0.1287034045 , 0.5838253899 , -0.07109766974 , 1.664064991 , 2.571849871 );
cMo[8].buildFrom( 0.1476536754 , 0.08092558365 , 0.4738385251 , 0.32787776 , 1.251768622,  2.748314156 );
cMo[9].buildFrom(0.06674342233 , 0.04580250865 , 0.9896494674,  -0.2363088805 , 0.5380505543 , 3.049988544 );
cMo[10].buildFrom(0.1353199539 , 0.006734768036 , 0.552106057 , -0.1165157893 , 1.197817012 , -2.754244687  );
cMo[11].buildFrom(0.1134173579 , 0.05667957541 , 0.562798548,  0.01827158984 , 0.6700808586,  -2.802593791  );
cMo[12].buildFrom(0.09678548583 , 0.03505801317 , 0.75085561 , -0.2546886466 , 1.238795585 , 2.848785012  );
cMo[13].buildFrom(0.1118341527 , 0.03541269363 , 0.6032772496 , -0.2637376516 , 1.3428496 , -2.740983394  );
cMo[14].buildFrom(0.1305641085 , 0.05786662886,  0.483384278 , 0.0992989732 , 0.6584820877 , 3.055982463  );

 /*rvec0 [0.1606333657728691; 0.1554771292435871; 0.523395656853085] [-0.2028827068163479; -0.07146025385754685; -3.087095760857872]
 rvec1 [0.1681941750976325; -0.001966200118312488; 0.5488226016042185] [0.2330312513835265; -0.04491445863364658; 3.086889567871931]
 rvec2 [-0.07535065781353516; -0.0494231619041778; 0.8121574680164582] [0.6269708574795875; 0.2466336555047598; -3.047031344161096]
 rvec3 [-0.06802549373045808; 0.2595903683998133; 0.7964257887135282] [0.5288740859743205; -0.9590380501349665; -2.946336745490846]
 rvec4 [0.12187613626985; 0.07101719070919629; 0.5278476386226427] [0.2978167684302557; 0.856630660575602; 2.869907478228704]
 rvec5 [0.07595327201575928; 0.06701357139477045; 0.5027460905513198] [0.8912948190468262; -0.008142441201194372; 2.913745579204999]
 rvec6 [0.132733282475315; 0.02272610788496898; 0.5730526114864242] [-0.03007532729248057; 1.379964566058304; -2.724720351422941]
 rvec7 [0.1370725415848025; 0.1385564871667145; 0.5767143586165552] [-0.06277331632480748; 1.69196990365581; 2.558673516092988]
 rvec8 [0.1507250495868063; 0.08898453438435538; 0.4678466347770635] [0.3364169076342961; 1.279870522980729; 2.736186215384778]
 rvec9 [0.07331123838561378; 0.06286631535354439; 0.980297548164113] [-0.2302529418021804; 0.5684685241373887; 3.047030697639466]
 rvec10 [0.1389003764503458; 0.01635260272872881; 0.5463305723417353] [-0.1266843616136658; 1.176396083254107; -2.765891437223603]
 rvec11 [0.1170763510447753; 0.06632308915598384; 0.5567537655740936] [0.007781882768882385; 0.6513072031809303; -2.808179904285889]
 rvec12 [0.1017381935405406; 0.04799307809762116; 0.743317825159861] [-0.2461148227376482; 1.266603710301319; 2.841674404874994]
 rvec13 [0.1157802508889457; 0.04581540120783949; 0.5972930741156408] [-0.2739393358455662; 1.322462552325598; -2.754638766920345]
 rvec14 [0.1336917033827539; 0.0661776007059256; 0.4775114452700749] [0.1113893043239393; 0.6867963031591187; 3.050765919593558]*/
 
 /* rvec0 [0.1597930883553831; 0.1468007367442987; 0.5282961701119935] [-0.1968521018387337; -0.05964684515549047; -3.086641073146096]
 rvec1 [0.1673763925455965; -0.01114616076427374; 0.553279655621999] [0.2239915080197494; -0.05005752718967757; 3.088376811445953]
 rvec2 [-0.07709719572166263; -0.06311291825387691; 0.8202092073793362] [0.6219896900804657; 0.2876488737041963; -3.044963439387781]
 rvec3 [-0.06973561289067365; 0.2471255554578212; 0.805719925349877] [-0.5362409014255265; 0.9272355894724263; 2.951290732839171]
 rvec4 [0.1210411206055177; 0.06238084798406756; 0.5324010655079093] [0.2973959444235151; 0.8322373133058337; 2.878093597238552]
 rvec5 [0.07508944088821654; 0.05879690189843891; 0.5070670063652497] [0.8932206395919406; -0.03117966337021329; 2.918242087859566]
 rvec6 [0.1318496740636458; 0.0132932944215828; 0.5772437747839489] [-0.02665105916432204; 1.401478703953037; -2.713171896524004]
 rvec7 [0.1361734623849172; 0.1291969099332706; 0.5818604734340637] [-0.06441618020387928; 1.666431046373741; 2.573408993461815]
 rvec8 [0.1500497707905874; 0.0813631742783348; 0.4722210658185951] [0.3346209323627573; 1.254500083452833; 2.748388621696409]
 rvec9 [0.07173548938696282; 0.04679841960380481; 0.9882799057094008] [-0.2295202979093593; 0.5438325909344705; 3.050345194607081]
 rvec10 [0.1380846144556481; 0.007346052126947021; 0.5505733029088267] [-0.1232907800995376; 1.197896788755784; -2.756013443872642]
 rvec11 [0.1162531426061057; 0.05721641313429964; 0.5614795673624928] [0.01109493482067169; 0.6727074488226285; -2.803240757451364]
 rvec12 [0.1005589170954649; 0.03581030081212384; 0.7496742244991828] [-0.2478478271228602; 1.242473587369607; 2.850021720345595]
 rvec13 [0.1148657511867909; 0.03601171150638215; 0.6019740529052237] [-0.2706978393095179; 1.343857646300739; -2.742391791707475]
 rvec14 [0.1329692185754732; 0.05829478232313912; 0.4817358432812112] [0.1078725939205944; 0.6637727816622113; 3.055743294810342]*/
 
 
cMo[0].buildFrom(0.1597930883553831, 0.1468007367442987, 0.5282961701119935,-0.1968521018387337, -0.05964684515549047, -3.086641073146096);
cMo[1].buildFrom(0.1673763925455965, -0.01114616076427374, 0.553279655621999,0.2239915080197494, -0.05005752718967757, 3.088376811445953);
cMo[2].buildFrom(-0.07709719572166263, -0.06311291825387691, 0.8202092073793362,0.6219896900804657, 0.2876488737041963, -3.044963439387781);
cMo[3].buildFrom(-0.06973561289067365, 0.2471255554578212, 0.805719925349877,-0.5362409014255265, 0.9272355894724263, 2.951290732839171);
cMo[4].buildFrom(0.1210411206055177, 0.06238084798406756, 0.5324010655079093,0.2973959444235151, 0.8322373133058337, 2.878093597238552);
cMo[5].buildFrom(0.07508944088821654, 0.05879690189843891, 0.5070670063652497,0.8932206395919406, -0.03117966337021329, 2.918242087859566);
cMo[6].buildFrom(0.1318496740636458, 0.0132932944215828, 0.5772437747839489,-0.02665105916432204, 1.401478703953037, -2.713171896524004);
cMo[7].buildFrom(0.1361734623849172, 0.1291969099332706, 0.5818604734340637,-0.06441618020387928, 1.666431046373741, 2.573408993461815);
cMo[8].buildFrom(0.1500497707905874, 0.0813631742783348, 0.4722210658185951,0.3346209323627573, 1.254500083452833, 2.748388621696409);
cMo[9].buildFrom(0.07173548938696282, 0.04679841960380481, 0.9882799057094008,-0.2295202979093593, 0.5438325909344705, 3.050345194607081);
cMo[10].buildFrom(0.1380846144556481, 0.007346052126947021, 0.5505733029088267,-0.1232907800995376, 1.197896788755784, -2.756013443872642);
cMo[11].buildFrom(0.1162531426061057, 0.05721641313429964, 0.5614795673624928,0.01109493482067169, 0.6727074488226285, -2.803240757451364);
cMo[12].buildFrom(0.1005589170954649, 0.03581030081212384, 0.7496742244991828,-0.2478478271228602, 1.242473587369607, 2.850021720345595);
cMo[13].buildFrom(0.1148657511867909,0.03601171150638215, 0.6019740529052237,-0.2706978393095179,1.343857646300739, -2.742391791707475);
cMo[14].buildFrom(0.1329692185754732, 0.05829478232313912, 0.4817358432812112,0.1078725939205944, 0.6637727816622113, 3.055743294810342);
 
/*  rvec0 [0.1670271366433932; 0.1412310989392187; 0.5291817104298804] [-0.2158501122137233; -0.04102562316131209; -3.086462501033879]
 rvec1 [0.174732459685928; -0.01679913253246702; 0.5522224159363196] [0.2461084308599157; -0.06243825852964164; 3.087091867254426]
 rvec2 [-0.06616300409680606; -0.07144422525798637; 0.822412920334844] [0.6163890473443646; 0.2990544162277649; -3.047056418830492]
 rvec3 [-0.05870935563512666; 0.2389436813297934; 0.8107597728870847] [0.5213771113951537; -0.9121090301250817; -2.960378152694366]
 rvec4 [0.1281525936746428; 0.05693383934516558; 0.5327733518837586] [0.3179210001194651; 0.8200242142582296; 2.88367687151767]
 rvec5 [0.08180651786871981; 0.05366036316002259; 0.5077531214512626] [0.9150117372925481; -0.04466326358959161; 2.914221695870326]
 rvec6 [0.1395093199674059; 0.007415567899493163; 0.577117662504006] [-0.04261329013241258; 1.41745949022017; -2.711336977155535]
 rvec7 [0.1440317716370368; 0.1231050167128023; 0.5827247954590138] [-0.04665269265730402; 1.658836044602898; 2.585772326995824]
 rvec8 [0.1564030388064615; 0.0763727666089834; 0.4722951567730267] [0.354293229181613; 1.24485453932061; 2.756275049720885]
 rvec9 [0.08483478780487425; 0.03691454820089227; 0.9903368602303289] [-0.2115118140952619; 0.5254383450695477; 3.056702831376301]
 rvec10 [0.1454078173575387; 0.001721636178656781; 0.5504152051438634] [-0.1384204054268272; 1.213040205258829; -2.753677593947966]
 rvec11 [0.1237204738872894; 0.05150067983657727; 0.5619672773339737] [-0.002655776810846245; 0.6873760817109954; -2.802941737408393]
 rvec12 [0.1105008417439839; 0.02827236018640317; 0.7504407471577624] [-0.2289243995608501; 1.229517790181478; 2.8622921610602]
 rvec13 [0.1228660494234161; 0.02992076313030763; 0.6022990540352411] [-0.2881653363559677; 1.359227700111445; -2.738474323741062]
 rvec14 [0.1393960912776154; 0.05334088954955449; 0.4818203087724587] [0.1293076026053152; 0.6488927042823395; 3.060803207504738]*/
 
cMo[0].buildFrom( 0.1670271366433932, 0.1412310989392187, 0.5291817104298804,-0.2158501122137233, -0.04102562316131209, -3.086462501033879);
cMo[1].buildFrom(0.174732459685928, -0.01679913253246702, 0.5522224159363196,0.2461084308599157, -0.06243825852964164, 3.087091867254426);
cMo[2].buildFrom( -0.06616300409680606, -0.07144422525798637, 0.822412920334844,0.6163890473443646, 0.2990544162277649, -3.047056418830492);
cMo[3].buildFrom(-0.05870935563512666, 0.2389436813297934, 0.8107597728870847,0.5213771113951537, -0.9121090301250817, -2.960378152694366);
cMo[4].buildFrom( 0.1281525936746428, 0.05693383934516558, 0.5327733518837586,0.3179210001194651, 0.8200242142582296, 2.88367687151767);
cMo[5].buildFrom(0.08180651786871981, 0.05366036316002259, 0.5077531214512626,0.9150117372925481, -0.04466326358959161, 2.914221695870326);
cMo[6].buildFrom( 0.1395093199674059, 0.007415567899493163, 0.577117662504006,-0.04261329013241258, 1.41745949022017, -2.711336977155535);
cMo[7].buildFrom(0.1440317716370368,0.1231050167128023, 0.5827247954590138,-0.04665269265730402, 1.658836044602898, 2.585772326995824);
cMo[8].buildFrom( 0.1564030388064615, 0.0763727666089834, 0.4722951567730267,0.354293229181613, 1.24485453932061, 2.756275049720885);
cMo[9].buildFrom( 0.08483478780487425, 0.03691454820089227, 0.9903368602303289,-0.2115118140952619, 0.5254383450695477, 3.056702831376301);
cMo[10].buildFrom( 0.1454078173575387, 0.001721636178656781, 0.5504152051438634,-0.1384204054268272,1.213040205258829, -2.753677593947966);
cMo[11].buildFrom( 0.1237204738872894, 0.05150067983657727, 0.5619672773339737,-0.002655776810846245, 0.6873760817109954, -2.802941737408393);
cMo[12].buildFrom( 0.1105008417439839, 0.02827236018640317, 0.7504407471577624,-0.2289243995608501, 1.229517790181478, 2.8622921610602);
cMo[13].buildFrom(0.1228660494234161, 0.02992076313030763, 0.6022990540352411,-0.2881653363559677, 1.359227700111445, -2.738474323741062);
cMo[14].buildFrom( 0.1393960912776154, 0.05334088954955449, 0.4818203087724587,0.1293076026053152, 0.6488927042823395, 3.060803207504738);



/*cMo[0] = cMo[0].inverse();
cMo[1] = cMo[1].inverse();
cMo[2] = cMo[2].inverse();
cMo[3] = cMo[3].inverse();
cMo[4] = cMo[4].inverse();*/

cv::Mat wme(4,4,cv::DataType<double>::type);

/*T_be1 =

   -0.4004   -0.0535    0.9148    0.1526
    0.0003   -0.9983   -0.0583    0.0044
    0.9163   -0.0230    0.3998    0.8041
         0         0         0    1.0000
*/


wMe[0][0][0] = -0.4004;
wMe[0][0][1] = -0.0535;
wMe[0][0][2] = 0.9148;
wMe[0][0][3] = 0.1526;
wMe[0][1][0] = 0.0003;
wMe[0][1][1] = -0.9983;
wMe[0][1][2] = -0.0583;
wMe[0][1][3] = 0.0044;
wMe[0][2][0] = 0.9163;
wMe[0][2][1] = -0.0230;
wMe[0][2][2] = 0.3998;
wMe[0][2][3] = 0.8041;

wMe[0] = wMe[0].inverse();

/*T_be2 =

    -0.4154   -0.0187    0.9095    0.1792
    0.0838   -0.9963    0.0178    0.1368
    0.9058    0.0836    0.4154    0.8136
         0         0         0    1.0000*/

wMe[1][0][0] = -0.4154;
wMe[1][0][1] = -0.0187;
wMe[1][0][2] = 0.9095;
wMe[1][0][3] = 0.1792;
wMe[1][1][0] = 0.0838;
wMe[1][1][1] = -0.9963;
wMe[1][1][2] = 0.0178;
wMe[1][1][3] =  0.1368;
wMe[1][2][0] = 0.9058 ;
wMe[1][2][1] = 0.0836;
wMe[1][2][2] = 0.4154 ;
wMe[1][2][3] = 0.8136;

wMe[1] = wMe[1].inverse();

/*T_be3 =


    0.1052    0.1733    0.9792    0.3298
    0.0429   -0.9846    0.1696    0.1947
    0.9935    0.0241   -0.1110    0.6216
         0         0         0    1.0000*/


wMe[2][0][0] = 0.1052 ;
wMe[2][0][1] = 0.1733 ;
wMe[2][0][2] = 0.9792;
wMe[2][0][3] = 0.3298;
wMe[2][1][0] = 0.0429;
wMe[2][1][1] =-0.9846;
wMe[2][1][2] = 0.1696;
wMe[2][1][3] =  0.1947;
wMe[2][2][0] =0.9935;
wMe[2][2][1] = 0.0241;
wMe[2][2][2] = -0.1110;
wMe[2][2][3] = 0.6216;

wMe[2] = wMe[2].inverse();


/* T_be4 =

    0.0678   -0.5774    0.8136    0.3720
   -0.0403   -0.8164   -0.5761   -0.0944
    0.9969    0.0063   -0.0786    0.6249
         0         0         0    1.0000
*/

wMe[3][0][0] = 0.0678;
wMe[3][0][1] = -0.5774 ;
wMe[3][0][2] = 0.8136  ;
wMe[3][0][3] =  0.3720;
wMe[3][1][0] = -0.0403;
wMe[3][1][1] = -0.8164;
wMe[3][1][2] = -0.5761;
wMe[3][1][3] = -0.0944;
wMe[3][2][0] = 0.9969;
wMe[3][2][1] =  0.0063;
wMe[3][2][2] = -0.0786;
wMe[3][2][3] = 0.6249;

wMe[3] = wMe[3].inverse();

   /*
T_be5 =

   -0.3865   -0.5722    0.7233    0.1988
    0.2390   -0.8196   -0.5206    0.0121
    0.8908   -0.0284    0.4536    0.7745
         0         0         0    1.0000*/
         
       
wMe[4][0][0] = -0.3865;
wMe[4][0][1] = -0.5722;
wMe[4][0][2] =  0.7233;
wMe[4][0][3] =  0.1988;
wMe[4][1][0] = 0.2390;
wMe[4][1][1] = -0.8196;
wMe[4][1][2] = -0.5206;
wMe[4][1][3] = 0.0121;
wMe[4][2][0] = 0.8908;
wMe[4][2][1] = -0.0284;
wMe[4][2][2] = 0.4536;
wMe[4][2][3] = 0.7745;

wMe[4] = wMe[4].inverse();


/*T_be6 =

   -0.7626   -0.0764    0.6423    0.2651
    0.1273   -0.9913    0.0332    0.0548
    0.6342    0.1071    0.7657    0.7880
         0         0         0    1.0000*/

wMe[5][0][0] = -0.7626;
wMe[5][0][1] = -0.0764 ;
wMe[5][0][2] =  0.6423;
wMe[5][0][3] =  0.2651;
wMe[5][1][0] = 0.1273 ;
wMe[5][1][1] = -0.9913;
wMe[5][1][2] = 0.0332;
wMe[5][1][3] = 0.0548;
wMe[5][2][0] = 0.6342;
wMe[5][2][1] = 0.1071;
wMe[5][2][2] = 0.7657;
wMe[5][2][3] = 0.7880;

wMe[5] = wMe[5].inverse();



/*T_be7 =

   -0.2697    0.7872    0.5546    0.1178
   -0.0949   -0.5949    0.7982    0.1315
    0.9583    0.1627    0.2351    0.7655
         0         0         0    1.0000*/


wMe[6][0][0] = -0.2697;
wMe[6][0][1] = 0.7872 ;
wMe[6][0][2] =  0.5546;
wMe[6][0][3] =  0.1178;
wMe[6][1][0] = -0.0949;
wMe[6][1][1] = -0.5949;
wMe[6][1][2] = 0.7982;
wMe[6][1][3] = 0.1315;
wMe[6][2][0] = 0.9583;
wMe[6][2][1] = 0.1627;
wMe[6][2][2] = 0.2351;
wMe[6][2][3] = 0.7655;

wMe[6] = wMe[6].inverse();


/*T_be8 =

   -0.1456   -0.9175    0.3700    0.1975
    0.0973   -0.3855   -0.9176   -0.0398
    0.9845   -0.0976    0.1454    0.7808
         0         0         0    1.0000*/

wMe[7][0][0] = -0.1456;
wMe[7][0][1] = -0.9175;
wMe[7][0][2] =  0.3700;
wMe[7][0][3] =  0.1975;
wMe[7][1][0] = 0.0973;
wMe[7][1][1] = -0.3855;
wMe[7][1][2] = -0.9176;
wMe[7][1][3] = -0.0398;
wMe[7][2][0] = 0.9845;
wMe[7][2][1] = -0.0976;
wMe[7][2][2] = 0.1454;
wMe[7][2][3] = 0.7808;

wMe[7] = wMe[7].inverse();


/*T_be9 =

   -0.3755   -0.7617    0.5280    0.1443
    0.2437   -0.6308   -0.7367   -0.0192
    0.8942   -0.1479    0.4224    0.7956
         0         0         0    1.0000*/


wMe[8][0][0] = -0.3755;
wMe[8][0][1] = -0.7617;
wMe[8][0][2] =  0.5280 ;
wMe[8][0][3] =  0.1443;
wMe[8][1][0] = 0.2437;
wMe[8][1][1] =-0.6308;
wMe[8][1][2] = -0.7367 ;
wMe[8][1][3] = -0.0192;
wMe[8][2][0] = 0.8942;
wMe[8][2][1] = -0.1479;
wMe[8][2][2] = 0.4224;
wMe[8][2][3] = 0.7956;

wMe[8] = wMe[8].inverse();


/*T_be10 =


   -0.1198   -0.3680    0.9221    0.5571
    0.0602   -0.9298   -0.3632    0.0788
    0.9910    0.0120    0.1335    0.8067
         0         0         0    1.0000*/

wMe[9][0][0] = -0.1198;
wMe[9][0][1] = -0.3680;
wMe[9][0][2] = 0.9221 ;
wMe[9][0][3] =0.5571;
wMe[9][1][0] =0.0602;
wMe[9][1][1] = -0.9298 ;
wMe[9][1][2] =-0.3632;
wMe[9][1][3] = 0.0788 ;
wMe[9][2][0] =0.9910;
wMe[9][2][1] =0.0120;
wMe[9][2][2] =0.1335 ;
wMe[9][2][3] = 0.8067;

wMe[9] = wMe[9].inverse();




/*T_be11 =

   -0.3123    0.7088    0.6325    0.1088
   -0.1564   -0.6951    0.7017    0.1619
    0.9370    0.1202    0.3279    0.7734
         0         0         0    1.0000*/
         
wMe[10][0][0] = -0.3123;
wMe[10][0][1] = 0.7088;
wMe[10][0][2] = 0.6325;
wMe[10][0][3] =0.1088;
wMe[10][1][0] =-0.1564 ;
wMe[10][1][1] = -0.6951;
wMe[10][1][2] = 0.7017;
wMe[10][1][3] = 0.1619;
wMe[10][2][0] = 0.9370;
wMe[10][2][1] =0.1202;
wMe[10][2][2] =0.3279;
wMe[10][2][3] = 0.7734;

wMe[10] = wMe[10].inverse();




/*T_be12 =

   -0.2224    0.4684    0.8551    0.1112
   -0.2306   -0.8774    0.4206    0.1484
    0.9473   -0.1037    0.3032    0.7667
         0         0         0    1.0000*/
         
wMe[11][0][0] = -0.2224;
wMe[11][0][1] = 0.4684;
wMe[11][0][2] = 0.8551;
wMe[11][0][3] =0.1112;
wMe[11][1][0] =-0.2306 ;
wMe[11][1][1] = -0.8774 ;
wMe[11][1][2] =0.4206;
wMe[11][1][3] = 0.1484;
wMe[11][2][0] = 0.9473;
wMe[11][2][1] =-0.1037;
wMe[11][2][2] =0.3032 ;
wMe[11][2][3] = 0.7667;

wMe[11] = wMe[11].inverse();


/*T_be13 =

   -0.0878   -0.7416    0.6651    0.3389
    0.0151   -0.6686   -0.7435    0.0917
    0.9960   -0.0552    0.0699    0.7806
         0         0         0    1.0000*/
         
         
wMe[12][0][0] = -0.0878;
wMe[12][0][1] = -0.7416;
wMe[12][0][2] = 0.6651;
wMe[12][0][3] =0.3389;
wMe[12][1][0] =0.0151;
wMe[12][1][1] =-0.6686  ;
wMe[12][1][2] =-0.7435;
wMe[12][1][3] =0.0917;
wMe[12][2][0] =0.9960;
wMe[12][2][1] =-0.0552;
wMe[12][2][2] =0.0699;
wMe[12][2][3] =0.7806;

wMe[12] = wMe[12].inverse();




/*T_be14 =

   -0.4084    0.7378    0.5375    0.1909
   -0.1498   -0.6350    0.7578    0.1257
    0.9004    0.2290    0.3699    0.7660
         0         0         0    1.0000*/
         
wMe[13][0][0] = -0.4084;
wMe[13][0][1] = 0.7378;
wMe[13][0][2] =0.5375;
wMe[13][0][3] =0.1909;
wMe[13][1][0] =-0.1498 ;
wMe[13][1][1] = -0.6350;
wMe[13][1][2] =0.7578 ;
wMe[13][1][3] =  0.1257;
wMe[13][2][0] =0.9004 ;
wMe[13][2][1] = 0.2290 ;
wMe[13][2][2] =0.3699  ;
wMe[13][2][3] = 0.7660;

wMe[13] = wMe[13].inverse();




/*T_be15 =

   -0.3175   -0.4333    0.8435    0.1187
    0.0831   -0.8988   -0.4304    0.0654
    0.9446   -0.0666    0.3213    0.7620
         0         0         0    1.0000*/
         
wMe[14][0][0] = -0.3175;
wMe[14][0][1] = -0.4333 ;
wMe[14][0][2] = 0.8435;
wMe[14][0][3] =0.1187;
wMe[14][1][0] = 0.0831;
wMe[14][1][1] = -0.8988;
wMe[14][1][2] =-0.4304 ;
wMe[14][1][3] = 0.0654;
wMe[14][2][0] = 0.9446 ;
wMe[14][2][1] =-0.0666;
wMe[14][2][2] =0.3213 ;
wMe[14][2][3] = 0.7620;

wMe[14] = wMe[14].inverse();


/*wMe[6][0][0] = 0.1251;
wMe[6][0][1] = -0.3291;
wMe[6][0][2] =0.9360;
wMe[6][0][3] =0.4350;
wMe[6][1][0] = 0.0764;
wMe[6][1][1] =  -0.9374;
wMe[6][1][2] =-0.3398;
wMe[6][1][3] = -0.0258;
wMe[6][2][0] = 0.9892;
wMe[6][2][1] = 0.1140;
wMe[6][2][2] = -0.0922;
wMe[6][2][3] = 0.6420;
wMe[6] = wMe[6].inverse();*/

/**/


    /*wMe[0].buildFrom(0, 0, 0, 10*M_PI/180., -10*M_PI/180.,0);
    wMe[1].buildFrom(0, 0, 0, 20*M_PI/180.,-10*M_PI/180.,0);
    wMe[2].buildFrom(0, 0, 0, 30*M_PI/180.,-10*M_PI/180.,0);
    wMe[3].buildFrom(0, 0, 0, 10*M_PI/180.,-20*M_PI/180.,0);
    wMe[4].buildFrom(0, 0, 0, 20*M_PI/180.,-20*M_PI/180.,0);
    wMe[5].buildFrom(0, 0, 0, 30*M_PI/180.,-20*M_PI/180.,0);
    wMe[6].buildFrom(0, 0, 0, 10*M_PI/180.,-30*M_PI/180.,0);
    wMe[7].buildFrom(0, 0, 0, 20*M_PI/180.,-30*M_PI/180.,0);
    wMe[8].buildFrom(0, 0, 0, 30*M_PI/180.,-30*M_PI/180.,0);*/

    /*for (unsigned int i=0 ; i < N ; i++)
    {
      v_c = 0 ;
      if (i==0) {
        // Initialize first poses
        cMo[0].buildFrom(0, 0, 0.5, 0, 0, 0); // z=0.5 m
        wMe[0].buildFrom(0, 0, 0, 0, 0, 0); // Id
      }
      else if (i==1)
        v_c[3] = M_PI/8 ;
      else if (i==2)
        v_c[4] = M_PI/8 ;
      else if (i==3)
        v_c[5] = M_PI/10 ;
      else if (i==4)
        v_c[0] = 0.5 ;
      else if (i==5)
        v_c[1] = 0.8 ;

      vpHomogeneousMatrix cMc; // camera displacement
      cMc = vpExponentialMap::direct(v_c) ; // Compute the camera displacement due to the velocity applied to the camera
      if (i > 0) {
        // From the camera displacement cMc, compute the wMe and cMo matrices
        cMo[i] = cMc.inverse() * cMo[i-1];
        wMe[i] = wMe[i-1] * eMc * cMc * eMc.inverse();
      }
    }

    if (0) {
      for (unsigned int i=0 ; i < N ; i++) {
        vpHomogeneousMatrix wMo;
        wMo = wMe[i] * eMc * cMo[i];
        std::cout << std::endl << "wMo[" << i << "] " << std::endl ;
        std::cout << wMo << std::endl ;
        std::cout << "cMo[" << i << "] " << std::endl ;
        std::cout << cMo[i] << std::endl ;
        std::cout << "wMe[" << i << "] " << std::endl ;
        std::cout << wMe[i] << std::endl ;
      }
    }*/

    // Reset the eMc matrix to eye
    eMc.eye();

vpHomogeneousMatrix c1Mb,c2Mb;

/*-0.2627605227  0.0151902301  0.9647415015  -0.8826177717  
-0.01615928012  -0.9998051093  0.01134111962  0.0979656024  
0.9647257566  -0.01260952965  0.2629547763  0.2804142289  
0  0  0  1  */ 


    // Compute the eMc hand to eye transformation from six poses
    // - cMo[6]: camera to object poses as six homogeneous transformations
    // - wMe[6]: world to hand (end-effector) poses as six homogeneous transformations

vpCalibration::calibrationTsai(cMo, wMe, eMc) ;

/*c1Mb[0][0] =-0.2627605227;
c1Mb[0][1] = 0.0151902301;
c1Mb[0][2] = 0.9647415015 ;
c1Mb[0][3] =  -0.8826177717;
c1Mb[1][0] = -0.01615928012;
c1Mb[1][1] =  -0.9998051093;
c1Mb[1][2] = 0.01134111962;
c1Mb[1][3] = 0.0979656024;
c1Mb[2][0] = 0.9647257566;
c1Mb[2][1] = -0.01260952965;
c1Mb[2][2] = 0.2629547763;
c1Mb[2][3] = 0.2804142289;*/

/*c1mb 0.09772591515  0.04706152302  0.9941000244  -0.612686742  
-0.8656071104  0.4969240654  0.06156950224  0.4536187228  
-0.491094671  -0.8665169855  0.08929914801  0.7723205071  
0  0  0  1 */

/*-0.2363178473  -0.02587436709  0.9713312474  -0.8784863904  
0.05264871432  -0.9985178827  -0.01378951562  0.09275778847  
0.9702484156  0.04788063271  0.2373298487  0.2305791122  
0  0  0  1 */

/* calib c1mb -0.243548266  -0.00578239501  0.969871541  -0.8739992614  
0.05078066467  -0.9986866964  0.006797541299  0.07978538541  
0.9685584991  0.05090625089  0.243522047  0.223042619  
0  0  0  1  */

c1Mb = eMc.inverse(); 

std::cout << " calib c1mb " << eMc << " " << c1Mb << std::endl;


/*-0.271553545  0.001448653885  0.9624222429  -0.8821152339  
0.02302455283  -0.9997028805  0.008001293053  -0.08084598513  
0.9621478796  0.02433212127  0.2714395065  0.2839962488  
0  0  0  1  */ 

/*c2mb = 0.09712912699  0.06303453402  0.9932736683  -0.6227154987  
-0.8697303448  0.4905733585  0.05391574121  0.2821384248  
-0.4838750458  -0.869117039  0.102472009  0.7628133232  
0  0  0  1*/

/*-0.2556456149  -0.01038676616  0.9667147639  -0.8797472439  
0.04013060571  -0.9991944352  -0.0001232892871  -0.08052809443  
0.965937293  0.03876333066  0.255856503  0.2052943186*/

c2Mb[0][0] =-0.2556456149;
c2Mb[0][1] = -0.01038676616;
c2Mb[0][2] = 0.9667147639;
c2Mb[0][3] = -0.8797472439;
c2Mb[1][0] = 0.04013060571;
c2Mb[1][1] = -0.9991944352;
c2Mb[1][2] = -0.0001232892871;
c2Mb[1][3] =  -0.08052809443;
c2Mb[2][0] = 0.965937293;
c2Mb[2][1] =0.03876333066;
c2Mb[2][2] = 0.255856503;
c2Mb[2][3] = 0.2052943186;


vpHomogeneousMatrix rect1,rect2,proj1,proj2;

/*0.999954 0.009229 -0.002472
-0.009128 0.999225 0.038285
0.002823 -0.038260 0.999264*/

rect1[0][0] = 0.999954;
rect1[0][1] = 0.009229;
rect1[0][2] = -0.002472;
rect1[0][3] = 0;
rect1[1][0] = -0.009128;
rect1[1][1] =  0.999225;
rect1[1][2] =  0.038285;
rect1[1][3] = -0.177012987;
rect1[2][0] =0.002823;
rect1[2][1] = -0.038260;
rect1[2][2] = 0.999264;
rect1[2][3] = 0; 

/*0.999990 -0.003522 0.002602
0.003453 0.999650 0.026232
-0.002694 -0.026222 0.999653*/

rect2[0][0] = 0.999990;
rect2[0][1] = -0.003522;
rect2[0][2] = 0.002602;
rect2[0][3] = 0;
rect2[1][0] = 0.003453;
rect2[1][1] =  0.999650;
rect2[1][2] =  -0.026232;
rect2[1][3] = 0;
rect2[2][0] =-0.002694 ;
rect2[2][1] = -0.026222;
rect2[2][2] = 0.999653;
rect2[2][3] = 0; 

/*770.860676 0.000000 342.122410 0.000000
0.000000 770.860676 258.604725 0.000000
0.000000 0.000000 1.000000 0.000000*/

proj1[0][0] = 770.860676;
proj1[0][1] = 0.000000;
proj1[0][2] = 342.122410;
proj1[0][3] = 0;
proj1[1][0] = 0.00;
proj1[1][1] =  770.860676;
proj1[1][2] =  258.604725;
proj1[1][3] = 0;
proj1[2][0] =0 ;
proj1[2][1] = 0;
proj1[2][2] = 1;
proj1[2][3] = 0; 

/*770.860676 0.000000 342.122410 0.000000
0.000000 770.860676 258.604725 0.000000
0.000000 0.000000 1.000000 0.000000*/

proj2[0][0] = 770.860676;
proj2[0][1] = 0.000000;
proj2[0][2] = 342.122410;
proj2[0][3] = 0;
proj2[1][0] = 0.00;
proj2[1][1] =  770.860676;
proj2[1][2] =  258.604725;
proj2[1][3] = 0;
proj2[2][0] =0 ;
proj2[2][1] = -136.36;
proj2[2][2] = 1;
proj2[2][3] = 0; 


/*0.999962 -0.007298 0.004851
0.006902 0.996990 0.077220
-0.005400 -0.077183 0.997002*/

rect1[0][0] = 0.999962;
rect1[0][1] = -0.007298;
rect1[0][2] = 0.004851;
rect1[0][3] = 0;
rect1[1][0] = 0.006902;
rect1[1][1] =  0.996990;
rect1[1][2] =  0.077220;
rect1[1][3] = 0;
rect1[2][0] = -0.005400;
rect1[2][1] = -0.077183;
rect1[2][2] = 0.997002;
rect1[2][3] = 0; 

/*0.999979 -0.003874 -0.005279
0.004258 0.997179 0.074938
0.004974 -0.074959 0.997174*/

rect2[0][0] = 0.999979;
rect2[0][1] = -0.003874;
rect2[0][2] = -0.005279;
rect2[0][3] = 0;
rect2[1][0] = 0.004258;
rect2[1][1] = 0.997179;
rect2[1][2] = 0.074938;
rect2[1][3] = 0;
rect2[2][0] = 0.004974 ;
rect2[2][1] = -0.074959;
rect2[2][2] = 0.997174;
rect2[2][3] = 0; 

/*761.095626 0.000000 307.990547 0.000000
0.000000 761.095626 189.171375 0.000000
0.000000 0.000000 1.000000 0.000000*/

proj1[0][0] = 761.095626;
proj1[0][1] = 0.000000;
proj1[0][2] = 307.9905;
proj1[0][3] = 0;
proj1[1][0] = 0.00;
proj1[1][1] =  761.095;
proj1[1][2] =  189.17;
proj1[1][3] = 0;
proj1[2][0] =0 ;
proj1[2][1] = 0;
proj1[2][2] = 1;
proj1[2][3] = 0; 

proj1[0][0] = 1;
proj1[0][1] = 0.000000;
proj1[0][2] = 0;
proj1[0][3] = 0;
proj1[1][0] = 0.00;
proj1[1][1] = 1;
proj1[1][2] = 0;
proj1[1][3] = 0;
proj1[2][0] = 0;
proj1[2][1] = 0;
proj1[2][2] = 1;
proj1[2][3] = 0;

/*770.860676 0.000000 342.122410 0.000000
0.000000 770.860676 258.604725 0.000000
0.000000 0.000000 1.000000 0.000000*/

/*proj2[0][0] = 761.095626;
proj2[0][1] = 0.000000;
proj2[0][2] = 307.9905;
proj2[0][3] = 0;
proj2[1][0] = 0.00;
proj2[1][1] =  761.095;
proj2[1][2] =  189.17;
proj2[1][3] = 135.47;
proj2[2][0] =0 ;
proj2[2][1] = 0;
proj2[2][2] = 1;
proj2[2][3] = 0;*/ 

proj2[0][0] = 1;
proj2[0][1] = 0.000000;
proj2[0][2] = 0;
proj2[0][3] = 0;
proj2[1][0] = 0.00;
proj2[1][1] = 1;
proj2[1][2] = 0;
proj2[1][3] = 135.47/761.09;
proj2[2][0] = 0;
proj2[2][1] = 0;
proj2[2][2] = 1;
proj2[2][3] = 0;

vpRotationMatrix K1;
vpRotationMatrix K2;

/*733.087064 0.000000 315.021816
0.000000 733.060377 240.070864
0.000000 0.000000 1.000000*/

K2[0][0] = 733.08;
K2[0][1] = 0.000000;
K2[0][2] = 315.02;
K2[1][0] = 0.00;
K2[1][1] = 733.06;
K2[1][2] = 240.0708;
K2[2][0] = 0;
K2[2][1] = 0;
K2[2][2] = 1;

/*729.503218 0.000000 303.279696
0.000000 729.585951 259.287176
0.000000 0.000000 1.000000*/

K1[0][0] = 729.50;
K1[0][1] = 0.000000;
K1[0][2] = 303.279;
K1[1][0] = 0.00;
K1[1][1] = 729.5859;
K1[1][2] = 259.2871;
K1[2][0] = 0;
K1[2][1] = 0;
K1[2][2] = 1;

std::cout << " intr matrix 1 " << proj1*rect1.inverse() << std::endl;

std::cout << " intr matrix 2 " << proj1.inverse()*proj2<< std::endl;

std::cout << " external matrix " << rect2*proj2.inverse()*proj1*rect1.inverse() << std::endl;

std::cout << " calib " << c2Mb*c1Mb.inverse() << " " << std::endl; 

    std::cout << std::endl << "Output: hand to eye calibration result: eMc estimated " << std::endl ;
    std::cout << eMc.inverse() << std::endl ;
    std::cout << eMc << std::endl ;
    eMc.extract(erc);
    std::cout << "Theta U rotation: " << vpMath::deg(erc[0]) << " " << vpMath::deg(erc[1]) << " " << vpMath::deg(erc[2]) << std::endl;
    return 0 ;
  }
  catch(vpException e) {
    std::cout << "Catch an exception: " << e << std::endl;
    return 1 ;
  }
}
int main()
{
  try {
    vpHomogeneousMatrix cdMo(0, 0, 0.75, 0, 0, 0);
    vpHomogeneousMatrix cMo(0.15, -0.1, 1., vpMath::rad(10), vpMath::rad(-10), vpMath::rad(50));

    // Define the target as 4 points
    vpPoint point[4] ;
    point[0].setWorldCoordinates(-0.1,-0.1, 0);
    point[1].setWorldCoordinates( 0.1,-0.1, 0);
    point[2].setWorldCoordinates( 0.1, 0.1, 0);
    point[3].setWorldCoordinates(-0.1, 0.1, 0);

#if defined(VISP_HAVE_OGRE)
    // Color image used as background texture.
    vpImage<unsigned char> background(480, 640, 255);

    // Parameters of our camera
    vpCameraParameters cam(840, 840, background.getWidth()/2, background.getHeight()/2);

    // Our object
    // A simulator with the camera parameters defined above,
    // and the background image size
    vpAROgre ogre;
    ogre.setShowConfigDialog(false);
    ogre.setCameraParameters(cam);
    ogre.addResource("./"); // Add the path to the Sphere.mesh resource
    ogre.init(background, false, true);

    // Create the scene that contains 4 spheres
    // Sphere.mesh contains a sphere with 1 meter radius
    std::vector<std::string> name(4);
    for (unsigned int i=0; i<4; i++) {
      std::ostringstream s; s << "Sphere" <<  i; name[i] = s.str();
      ogre.load(name[i], "Sphere.mesh");
      ogre.setScale(name[i], 0.02f, 0.02f, 0.02f); // Rescale the sphere to 2 cm radius
      // Set the position of each sphere in the object frame
      ogre.setPosition(name[i], vpTranslationVector(point[i].get_oX(), point[i].get_oY(), point[i].get_oZ()));
    }

    // Add an optional point light source
    Ogre::Light * light = ogre.getSceneManager()->createLight();
    light->setDiffuseColour(1, 1, 1); // scaled RGB values
    light->setSpecularColour(1, 1, 1); // scaled RGB values
    light->setPosition((Ogre::Real)cdMo[0][3], (Ogre::Real)cdMo[1][3], (Ogre::Real)(-cdMo[2][3]));
    light->setType(Ogre::Light::LT_POINT);
#endif

    vpServo task ;
    task.setServo(vpServo::EYEINHAND_CAMERA);
    task.setInteractionMatrixType(vpServo::CURRENT);
    task.setLambda(0.5);

    vpFeaturePoint p[4], pd[4] ;
    for (int i = 0 ; i < 4 ; i++) {
      point[i].track(cdMo);
      vpFeatureBuilder::create(pd[i], point[i]);
      point[i].track(cMo);
      vpFeatureBuilder::create(p[i], point[i]);
      task.addFeature(p[i], pd[i]);
    }

    vpHomogeneousMatrix wMc, wMo;
    vpSimulatorCamera robot;
    robot.setSamplingTime(0.040);
    robot.getPosition(wMc);
    wMo = wMc * cMo;

    for (unsigned int iter=0; iter < 150; iter ++) {
      robot.getPosition(wMc);
      cMo = wMc.inverse() * wMo;
      for (int i = 0 ; i < 4 ; i++) {
        point[i].track(cMo);
        vpFeatureBuilder::create(p[i], point[i]);
      }
#if defined(VISP_HAVE_OGRE)
      // Update the scene from the new camera position
      ogre.display(background, cMo);
#endif
      vpColVector v = task.computeControlLaw();
      robot.setVelocity(vpRobot::CAMERA_FRAME, v);
      vpTime::wait( robot.getSamplingTime() * 1000);
    }
    task.kill();
  }
  catch(vpException &e) {
    std::cout << "Catch an exception: " << e << std::endl;
  }
  catch(...) {
    std::cout << "Catch an exception " << std::endl;
  }
}
Example #4
0
static
void *mainLoop (void *_simu)
{
  // pointer copy of the vpSimulator instance
  vpSimulator *simu = (vpSimulator *)_simu ;

  // Simulation initialization
  simu->initMainApplication() ;


  /////////////////////////////////////////
  // sets the initial camera location
  vpHomogeneousMatrix cMo(-0.3,-0.2,3,
                          vpMath::rad(0),vpMath::rad(0),vpMath::rad(40))  ;

  ///////////////////////////////////
  // Initialize the robot
  vpRobotCamera robot ;
  robot.setSamplingTime(0.04); // 40ms 
  robot.setPosition(cMo) ;
  // Send the robot position to the visualizator
  simu->setCameraPosition(cMo) ;
  // Initialize the camera parameters
  vpCameraParameters cam ;
  simu->getCameraParameters(cam);  

  ////////////////////////////////////////
  // Desired visual features initialization

  // sets the points coordinates in the object frame (in meter)
  vpPoint point[4] ;
  point[0].setWorldCoordinates(-0.1,-0.1,0) ;
  point[1].setWorldCoordinates(0.1,-0.1,0) ;
  point[2].setWorldCoordinates(0.1,0.1,0) ;
  point[3].setWorldCoordinates(-0.1,0.1,0) ;

  // sets the desired camera location
  vpHomogeneousMatrix cMo_d(0,0,1,0,0,0) ;

  // computes the 3D point coordinates in the camera frame and its 2D coordinates
  for (int i = 0 ; i < 4 ; i++)
    point[i].project(cMo_d) ;

  // creates the associated features
  vpFeaturePoint pd[4] ;
  for (int i = 0 ; i < 4 ; i++)
    vpFeatureBuilder::create(pd[i],point[i]) ;



  ///////////////////////////////////////
  // Current visual features initialization

  // computes the 3D point coordinates in the camera frame and its 2D coordinates
  for (int i = 0 ; i < 4 ; i++)
    point[i].project(cMo) ;

  // creates the associated features
  vpFeaturePoint p[4] ;
  for (int i = 0 ; i < 4 ; i++)
    vpFeatureBuilder::create(p[i],point[i])  ;


  /////////////////////////////////
  // Task defintion
  vpServo task ;
  // we want an eye-in-hand control law ;
  task.setServo(vpServo::EYEINHAND_L_cVe_eJe) ;
  task.setInteractionMatrixType(vpServo::DESIRED,vpServo::PSEUDO_INVERSE) ;

  // Set the position of the camera in the end-effector frame
  vpHomogeneousMatrix cMe ;
  vpVelocityTwistMatrix cVe(cMe) ;
  task.set_cVe(cVe) ;
  // Set the Jacobian (expressed in the end-effector frame)
  vpMatrix eJe ;
  robot.get_eJe(eJe) ;
  task.set_eJe(eJe) ;

  // we want to see a point on a point
  for (int i = 0 ; i < 4 ; i++)
    task.addFeature(p[i],pd[i]) ;
  // Set the gain
  task.setLambda(1.0) ;
  // Print the current information about the task
  task.print();

  vpTime::wait(500);
  ////////////////////////////////////////////////
  // The control loop
  int k = 0;
  while(k++ < 200){
    double t = vpTime::measureTimeMs();
    
    
    // Update the current features
    for (int i = 0 ; i < 4 ; i++)
    {
      point[i].project(cMo) ;
      vpFeatureBuilder::create(p[i],point[i])  ;
    }
    
    // Update the robot Jacobian
    robot.get_eJe(eJe) ;
    task.set_eJe(eJe) ;
    
    // Compute the control law
    vpColVector v = task.computeControlLaw() ;
    
    // Send the computed velocity to the robot and compute the new robot position
    robot.setVelocity(vpRobot::ARTICULAR_FRAME, v) ;
    robot.getPosition(cMo) ;
    
    // Send the robot position to the visualizator
    simu->setCameraPosition(cMo) ;

    // Print the current information about the task
    task.print();
    
    // Wait 40 ms
    vpTime::wait(t,40); 
  }  
  task.kill();
  simu->closeMainApplication() ;


  void *a=NULL ;
  return a ;
  // return (void *);
}
Example #5
0
int main()
{
  // We want to calibrate the hand to eye extrinsic camera parameters from 6 couple of poses: cMo and wMe
  const int N = 6;
  // Input: six couple of poses used as input in the calibration proces
  std::vector<vpHomogeneousMatrix> cMo(N) ; // eye (camera) to object transformation. The object frame is attached to the calibrartion grid
  std::vector<vpHomogeneousMatrix>  wMe(N) ; // world to hand (end-effector) transformation
  // Output: Result of the calibration
  vpHomogeneousMatrix  eMc; // hand (end-effector) to eye (camera) transformation

  // Initialize an eMc transformation used to produce the simulated input transformations cMo and wMe
  vpTranslationVector etc(0.1, 0.2, 0.3); 
  vpThetaUVector erc;
  erc[0] = vpMath::rad(10); // 10 deg
  erc[1] = vpMath::rad(-10); // -10 deg
  erc[2] = vpMath::rad(25); // 25 deg

  eMc.buildFrom(etc, erc);
  std::cout << "Simulated hand to eye transformation: eMc " << std::endl ;
  std::cout << eMc << std::endl ;
  std::cout << "Theta U rotation: " << vpMath::deg(erc[0]) << " " << vpMath::deg(erc[1]) << " " << vpMath::deg(erc[2]) << std::endl;

  vpColVector v_c(6) ; // camera velocity used to produce 6 simulated poses
  for (int i=0 ; i < N ; i++)
  {
    v_c = 0 ;
    if (i==0) {
      // Initialize first poses 
      cMo[0].buildFrom(0, 0, 0.5, 0, 0, 0); // z=0.5 m
      wMe[0].buildFrom(0, 0, 0, 0, 0, 0); // Id
    }
    else if (i==1)
      v_c[3] = M_PI/8 ;
    else if (i==2)
      v_c[4] = M_PI/8 ;
    else if (i==3)
      v_c[5] = M_PI/10 ;
    else if (i==4)
      v_c[0] = 0.5 ;
    else if (i==5)
      v_c[1] = 0.8 ;

    vpHomogeneousMatrix cMc; // camera displacement
    cMc = vpExponentialMap::direct(v_c) ; // Compute the camera displacement due to the velocity applied to the camera
    if (i > 0) {
      // From the camera displacement cMc, compute the wMe and cMo matrices
      cMo[i] = cMc.inverse() * cMo[i-1];
      wMe[i] = wMe[i-1] * eMc * cMc * eMc.inverse();
    }
  }

  if (0) {
    for (int i=0 ; i < N ; i++) {
      vpHomogeneousMatrix wMo;
      wMo = wMe[i] * eMc * cMo[i];
      std::cout << std::endl << "wMo[" << i << "] " << std::endl ;
      std::cout << wMo << std::endl ;
      std::cout << "cMo[" << i << "] " << std::endl ;
      std::cout << cMo[i] << std::endl ;
      std::cout << "wMe[" << i << "] " << std::endl ;
      std::cout << wMe[i] << std::endl ;
    }
  }

  // Reset the eMc matrix to eye
  eMc.eye();

  // Compute the eMc hand to eye transformation from six poses
  // - cMo[6]: camera to object poses as six homogeneous transformations
  // - wMe[6]: world to hand (end-effector) poses as six homogeneous transformations
  vpCalibration::calibrationTsai(cMo, wMe, eMc) ;

  std::cout << std::endl << "Output: hand to eye calibration result: eMc estimated " << std::endl ;
  std::cout << eMc << std::endl ;
  eMc.extract(erc);
  std::cout << "Theta U rotation: " << vpMath::deg(erc[0]) << " " << vpMath::deg(erc[1]) << " " << vpMath::deg(erc[2]) << std::endl;
  return 0 ;
}
int
main(int argc, const char ** argv)
{
  try {
    bool opt_display = true;
    bool opt_click_allowed = true;

    // Read the command line options
    if (getOptions(argc, argv, opt_click_allowed, opt_display) == false) {
      exit (-1);
    }

    vpImage<unsigned char> I(512,512,0) ;

    // We open a window using either X11, GTK or GDI.
#if defined VISP_HAVE_X11
    vpDisplayX display;
#elif defined VISP_HAVE_GTK
    vpDisplayGTK display;
#elif defined VISP_HAVE_GDI
    vpDisplayGDI display;
#elif defined VISP_HAVE_OPENCV
    vpDisplayOpenCV display;
#endif

    if (opt_display) {
      try{
        // Display size is automatically defined by the image (I) size
        display.init(I, 100, 100,"Camera view...") ;
        // Display the image
        // The image class has a member that specify a pointer toward
        // the display that has been initialized in the display declaration
        // therefore is is no longuer necessary to make a reference to the
        // display variable.
        vpDisplay::display(I) ;
        vpDisplay::flush(I) ;
      }
      catch(...)
      {
        vpERROR_TRACE("Error while displaying the image") ;
        exit(-1);
      }
    }

    // Set the camera parameters
    double px, py ; px = py = 600 ;
    double u0, v0 ; u0 = v0 = 256 ;

    vpCameraParameters cam(px,py,u0,v0);

    vpServo task ;
    vpSimulatorCamera robot ;

    // sets the initial camera location
    vpHomogeneousMatrix cMo(0.2,0.2,1,
                            vpMath::rad(45),  vpMath::rad(45),  vpMath::rad(125));

    // Compute the position of the object in the world frame
    vpHomogeneousMatrix wMc, wMo;
    robot.getPosition(wMc) ;
    wMo = wMc * cMo;

    // sets the final camera location (for simulation purpose)
    vpHomogeneousMatrix cMod(0,0,1,
                             vpMath::rad(0),  vpMath::rad(0),  vpMath::rad(0));


    int nbline = 4;

    // sets the line coordinates (2 planes) in the world frame
    vpLine line[4] ;
    line[0].setWorldCoordinates(1,0,0,0.05,0,0,1,0);
    line[1].setWorldCoordinates(0,1,0,0.05,0,0,1,0);
    line[2].setWorldCoordinates(1,0,0,-0.05,0,0,1,0);
    line[3].setWorldCoordinates(0,1,0,-0.05,0,0,1,0);

    vpFeatureLine ld[4] ;
    vpFeatureLine l[4] ;

    // sets the desired position of the visual feature
    for(int i = 0; i < nbline; i++)
    {
      line[i].track(cMod) ;
      line[i].print() ;

      vpFeatureBuilder::create(ld[i],line[i])  ;
    }

    // computes  the line coordinates in the camera frame and its 2D coordinates
    // sets the current position of the visual feature
    for(int i = 0; i < nbline; i++)
    {
      line[i].track(cMo) ;
      line[i].print() ;

      vpFeatureBuilder::create(l[i],line[i])  ;
      l[i].print() ;
    }

    // define the task
    // - we want an eye-in-hand control law
    // - robot is controlled in the camera frame
    task.setServo(vpServo::EYEINHAND_CAMERA) ;
    task.setInteractionMatrixType(vpServo::CURRENT, vpServo::PSEUDO_INVERSE);
    //It could be also interesting to test the following tasks
    //task.setInteractionMatrixType(vpServo::DESIRED, vpServo::PSEUDO_INVERSE);
    //task.setInteractionMatrixType(vpServo::MEAN, vpServo::PSEUDO_INVERSE);

    // we want to see a four lines on four lines
    for(int i = 0; i < nbline; i++)
      task.addFeature(l[i],ld[i]) ;

    vpDisplay::display(I) ;
    vpServoDisplay::display(task,cam,I) ;
    vpDisplay::flush(I) ;

    // set the gain
    task.setLambda(1) ;

    // Display task information
    task.print() ;

    if (opt_display && opt_click_allowed) {
      std::cout << "\n\nClick in the camera view window to start..." << std::endl;
      vpDisplay::getClick(I) ;
    }

    unsigned int iter=0 ;
    // loop
    while(iter++<200)
    {
      std::cout << "---------------------------------------------" << iter <<std::endl ;
      vpColVector v ;

      // get the robot position
      robot.getPosition(wMc) ;
      // Compute the position of the camera wrt the object frame
      cMo = wMc.inverse() * wMo;

      // new line position: retrieve x,y and Z of the vpLine structure
      for(int i = 0; i < nbline; i++)
      {
        line[i].track(cMo) ;
        vpFeatureBuilder::create(l[i],line[i]);
      }

      if (opt_display) {
        vpDisplay::display(I) ;
        vpServoDisplay::display(task,cam,I) ;
        vpDisplay::flush(I) ;
      }

      // compute the control law
      v = task.computeControlLaw() ;

      // send the camera velocity to the controller
      robot.setVelocity(vpRobot::CAMERA_FRAME, v) ;

      std::cout << "|| s - s* || = " << ( task.getError() ).sumSquare() <<std::endl ; ;

    }

    if (opt_display && opt_click_allowed) {
      std::cout << "\nClick in the camera view window to end..." << std::endl;
      vpDisplay::getClick(I) ;
    }

    // Display task information
    task.print() ;
    task.kill();
    return 0;
  }
  catch(vpException e) {
    std::cout << "Catch a ViSP exception: " << e << std::endl;
    return 1;
  }
}
int
main(int argc, const char ** argv)
{
  try {
    bool opt_click_allowed = true;
    bool opt_display = true;

    // Read the command line options
    if (getOptions(argc, argv, opt_click_allowed, opt_display) == false) {
      exit (-1);
    }

    // We open two displays, one for the internal camera view, the other one for
    // the external view, using either X11, GTK or GDI.
#if defined VISP_HAVE_X11
    vpDisplayX displayInt;
#elif defined VISP_HAVE_GDI
    vpDisplayGDI displayInt;
#elif defined VISP_HAVE_OPENCV
    vpDisplayOpenCV displayInt;
#endif

    vpImage<unsigned char> Iint(480, 640, 255);

    if (opt_display) {
      // open a display for the visualization
      displayInt.init(Iint,700,0, "Internal view") ;
    }

    vpServo task;

    std::cout << std::endl ;
    std::cout << "----------------------------------------------" << std::endl ;
    std::cout << " Test program for vpServo "  <<std::endl ;
    std::cout << " Eye-in-hand task control, articular velocity are computed"
              << std::endl ;
    std::cout << " Simulation " << std::endl ;
    std::cout << " task : servo 4 points " << std::endl ;
    std::cout << "----------------------------------------------" << std::endl ;
    std::cout << std::endl ;

    // sets the initial camera location
    vpHomogeneousMatrix cMo(-0.05,-0.05,0.7,
                            vpMath::rad(10),  vpMath::rad(10),  vpMath::rad(-30));

    // sets the point coordinates in the object frame
    vpPoint point[4] ;
    point[0].setWorldCoordinates(-0.045,-0.045,0) ;
    point[3].setWorldCoordinates(-0.045,0.045,0) ;
    point[2].setWorldCoordinates(0.045,0.045,0) ;
    point[1].setWorldCoordinates(0.045,-0.045,0) ;

    // computes the point coordinates in the camera frame and its 2D coordinates
    for (unsigned int i = 0 ; i < 4 ; i++)
      point[i].track(cMo) ;

    // sets the desired position of the point
    vpFeaturePoint p[4] ;
    for (unsigned int i = 0 ; i < 4 ; i++)
      vpFeatureBuilder::create(p[i],point[i])  ;  //retrieve x,y and Z of the vpPoint structure

    // sets the desired position of the feature point s*
    vpFeaturePoint pd[4] ;

    // Desired pose
    vpHomogeneousMatrix cdMo(vpHomogeneousMatrix(0.0,0.0,0.8,vpMath::rad(0),vpMath::rad(0),vpMath::rad(0)));

    // Projection of the points
    for (unsigned int  i = 0 ; i < 4 ; i++)
      point[i].track(cdMo);

    for (unsigned int  i = 0 ; i < 4 ; i++)
      vpFeatureBuilder::create(pd[i], point[i]);

    // define the task
    // - we want an eye-in-hand control law
    // - articular velocity are computed
    task.setServo(vpServo::EYEINHAND_CAMERA);
    task.setInteractionMatrixType(vpServo::DESIRED) ;

    // we want to see a point on a point
    for (unsigned int i = 0 ; i < 4 ; i++)
      task.addFeature(p[i],pd[i]) ;

    // set the gain
    task.setLambda(0.8) ;

    // Declaration of the robot
    vpSimulatorAfma6 robot(opt_display);

    // Initialise the robot and especially the camera
    robot.init(vpAfma6::TOOL_CCMOP, vpCameraParameters::perspectiveProjWithoutDistortion);
    robot.setRobotState(vpRobot::STATE_VELOCITY_CONTROL);

    // Initialise the object for the display part*/
    robot.initScene(vpWireFrameSimulator::PLATE, vpWireFrameSimulator::D_STANDARD);

    // Initialise the position of the object relative to the pose of the robot's camera
    robot.initialiseObjectRelativeToCamera(cMo);

    // Set the desired position (for the displaypart)
    robot.setDesiredCameraPosition(cdMo);

    // Get the internal robot's camera parameters
    vpCameraParameters cam;
    robot.getCameraParameters(cam,Iint);

    if (opt_display)
    {
      //Get the internal view
      vpDisplay::display(Iint);
      robot.getInternalView(Iint);
      vpDisplay::flush(Iint);
    }

    // Display task information
    task.print() ;

    unsigned int iter=0 ;
    vpTRACE("\t loop") ;
    while(iter++<500)
    {
      std::cout << "---------------------------------------------" << iter <<std::endl ;
      vpColVector v ;

      // Get the Time at the beginning of the loop
      double t = vpTime::measureTimeMs();

      // Get the current pose of the camera
      cMo = robot.get_cMo();

      if (iter==1) {
        std::cout <<"Initial robot position with respect to the object frame:\n";
        cMo.print();
      }

      // new point position
      for (unsigned int i = 0 ; i < 4 ; i++)
      {
        point[i].track(cMo) ;
        // retrieve x,y and Z of the vpPoint structure
        vpFeatureBuilder::create(p[i],point[i])  ;
      }

      if (opt_display)
      {
        // Get the internal view and display it
        vpDisplay::display(Iint) ;
        robot.getInternalView(Iint);
        vpDisplay::flush(Iint);
      }

      if (opt_display && opt_click_allowed && iter == 1)
      {
        // suppressed for automate test
        std::cout << "Click in the internal view window to continue..." << std::endl;
        vpDisplay::getClick(Iint) ;
      }

      // compute the control law
      v = task.computeControlLaw() ;

      // send the camera velocity to the controller
      robot.setVelocity(vpRobot::CAMERA_FRAME, v) ;

      std::cout << "|| s - s* || " << ( task.getError() ).sumSquare() <<std::endl ;

      // The main loop has a duration of 10 ms at minimum
      vpTime::wait(t,10);
    }

    // Display task information
    task.print() ;
    task.kill();

    std::cout <<"Final robot position with respect to the object frame:\n";
    cMo.print();

    if (opt_display && opt_click_allowed)
    {
      // suppressed for automate test
      std::cout << "Click in the internal view window to end..." << std::endl;
      vpDisplay::getClick(Iint) ;
    }
    return 0;
  }
  catch(vpException e) {
    std::cout << "Catch a ViSP exception: " << e << std::endl;
    return 1;
  }
}
int main()
{
#if defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI) || defined(VISP_HAVE_OPENCV)
  try {
    vpHomogeneousMatrix cdMo(0, 0, 0.75, 0, 0, 0);
    vpHomogeneousMatrix cMo(0.15, -0.1, 1., vpMath::rad(10), vpMath::rad(-10), vpMath::rad(50));

    vpImage<unsigned char> I(480, 640, 255);
    vpCameraParameters cam(840, 840, I.getWidth()/2, I.getHeight()/2);

    std::vector<vpPoint> point(4) ;
    point[0].setWorldCoordinates(-0.1,-0.1, 0);
    point[1].setWorldCoordinates( 0.1,-0.1, 0);
    point[2].setWorldCoordinates( 0.1, 0.1, 0);
    point[3].setWorldCoordinates(-0.1, 0.1, 0);

    vpServo task ;
    task.setServo(vpServo::EYEINHAND_CAMERA);
    task.setInteractionMatrixType(vpServo::CURRENT);
    task.setLambda(0.5);

    vpVirtualGrabber g("./target_square.pgm", cam);
    g.acquire(I, cMo);

#if defined(VISP_HAVE_X11)
    vpDisplayX d(I, 0, 0, "Current camera view");
#elif defined(VISP_HAVE_GDI)
    vpDisplayGDI d(I, 0, 0, "Current camera view");
#elif defined(VISP_HAVE_OPENCV)
    vpDisplayOpenCV d(I, 0, 0, "Current camera view");
#else
    std::cout << "No image viewer is available..." << std::endl;
#endif

    vpDisplay::display(I);
    vpDisplay::displayText(I, 10, 10,
                           "Click in the 4 dots to initialise the tracking and start the servo",
                           vpColor::red);
    vpDisplay::flush(I);

    vpFeaturePoint p[4], pd[4];
    std::vector<vpDot2> dot(4);

    for (unsigned int i = 0 ; i < 4 ; i++) {
      point[i].track(cdMo);
      vpFeatureBuilder::create(pd[i], point[i]);

      dot[i].setGraphics(true);
      dot[i].initTracking(I);
      vpDisplay::flush(I);
      vpFeatureBuilder::create(p[i], cam, dot[i].getCog());

      task.addFeature(p[i], pd[i]);
    }

    vpHomogeneousMatrix wMc, wMo;
    vpSimulatorCamera robot;
    robot.setSamplingTime(0.040);
    robot.getPosition(wMc);
    wMo = wMc * cMo;

    for (; ; ) {
      robot.getPosition(wMc);
      cMo = wMc.inverse() * wMo;

      g.acquire(I, cMo);

      vpDisplay::display(I);

      for (unsigned int i = 0 ; i < 4 ; i++) {
        dot[i].track(I);
        vpFeatureBuilder::create(p[i], cam, dot[i].getCog());

        vpColVector cP;
        point[i].changeFrame(cMo, cP) ;
        p[i].set_Z(cP[2]);
      }

      vpColVector v = task.computeControlLaw();

      display_trajectory(I, dot);
      vpServoDisplay::display(task, cam, I, vpColor::green, vpColor::red) ;
      robot.setVelocity(vpRobot::CAMERA_FRAME, v);

      vpDisplay::flush(I);
      if (vpDisplay::getClick(I, false))
        break;

      vpTime::wait( robot.getSamplingTime() * 1000);
    }
    task.kill();
  }
  catch(vpException e) {
    std::cout << "Catch an exception: " << e << std::endl;
  }
#endif
}
int main()
{
  //////////////////////////////////////////
  // sets the initial camera location
  vpHomogeneousMatrix cMo(0.3,0.2,3,
                          vpMath::rad(0),vpMath::rad(0),vpMath::rad(40))  ;

  ///////////////////////////////////
  // initialize the robot
  vpRobotCamera robot ;
  robot.setSamplingTime(0.04); // 40ms 
  robot.setPosition(cMo) ;

  //initialize the camera parameters
  vpCameraParameters cam(800,800,240,180); 

  //Image definition
  unsigned int height = 360 ;
  unsigned int width  = 480 ;
  vpImage<unsigned char> I(height,width);

  //Display initialization
  vpDisplayGTK disp;
  disp.init(I,100,100,"Simulation display");

  ////////////////////////////////////////
  // Desired visual features initialization

  // sets the points coordinates in the object frame (in meter)
  vpPoint point[4] ;
  point[0].setWorldCoordinates(-0.1,-0.1,0) ;
  point[1].setWorldCoordinates(0.1,-0.1,0) ;
  point[2].setWorldCoordinates(0.1,0.1,0) ;
  point[3].setWorldCoordinates(-0.1,0.1,0) ;

  // sets the desired camera location
  vpHomogeneousMatrix cMo_d(0,0,1,0,0,0) ;

  // computes the 3D point coordinates in the camera frame and its 2D coordinates
  for (int i = 0 ; i < 4 ; i++)
    point[i].project(cMo_d) ;

  // creates the associated features
  vpFeaturePoint pd[4] ;
  for (int i = 0 ; i < 4 ; i++)
    vpFeatureBuilder::create(pd[i],point[i]) ;


  ///////////////////////////////////////
  // Current visual features initialization

  // computes the 3D point coordinates in the camera frame and its 2D coordinates
  for (int i = 0 ; i < 4 ; i++)
    point[i].project(cMo) ;

  // creates the associated features
  vpFeaturePoint p[4] ;
  for (int i = 0 ; i < 4 ; i++)
    vpFeatureBuilder::create(p[i],point[i])  ;


  /////////////////////////////////
  // Task defintion
  vpServo task ;
  // we want an eye-in-hand control law ;
  task.setServo(vpServo::EYEINHAND_L_cVe_eJe) ;
  task.setInteractionMatrixType(vpServo::DESIRED, vpServo::PSEUDO_INVERSE) ;

  // Set the position of the camera in the end-effector frame
  vpHomogeneousMatrix cMe ;
  vpVelocityTwistMatrix cVe(cMe) ;
  task.set_cVe(cVe) ;
  // Set the Jacobian (expressed in the end-effector frame)
  vpMatrix eJe ;
  robot.get_eJe(eJe) ;
  task.set_eJe(eJe) ;

  // we want to see a point on a point
  for (int i = 0 ; i < 4 ; i++)
    task.addFeature(p[i],pd[i]) ;
  // Set the gain
  task.setLambda(1.0) ;
  // Print the current information about the task
  task.print();


  ////////////////////////////////////////////////
  // The control loop
  int k = 0;
  while(k++ < 200){
    double t = vpTime::measureTimeMs();
    
    // Display the image background
    vpDisplay::display(I);
    
    // Update the current features
    for (int i = 0 ; i < 4 ; i++)
    {
      point[i].project(cMo) ;
      vpFeatureBuilder::create(p[i],point[i])  ;
    }
    
    // Display the task features (current and desired)
    vpServoDisplay::display(task,cam,I);
    vpDisplay::flush(I);
    
    // Update the robot Jacobian
    robot.get_eJe(eJe) ;
    task.set_eJe(eJe) ;
    
    // Compute the control law
    vpColVector v = task.computeControlLaw() ;
    
    // Send the computed velocity to the robot and compute the new robot position
    robot.setVelocity(vpRobot::ARTICULAR_FRAME, v) ;
    robot.getPosition(cMo) ;
    
    // Print the current information about the task
    task.print();
    
    // Wait 40 ms
    vpTime::wait(t,40); 
  }  
  task.kill();
  return 0;
}
int main(int argc, char **argv)
{
#if defined(VISP_HAVE_DC1394) || defined(VISP_HAVE_V4L2) || defined(VISP_HAVE_CMU1394)
#if defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI)
  try {
    vpImage<unsigned char> I; // Create a gray level image container
    double lambda = 0.1;
    // Scale parameter used to estimate the depth Z of the blob from its surface
    //double coef = 0.9/14.85;  // At 0.9m, the blob has a surface of 14.85 (Logitec sphere)
    double coef = 1.2/13.0;  // At 1m, the blob has a surface of 11.3 (AVT Pike 032C)
    double L = 0.21; // 3D horizontal segment length
    double Z_d = 0.8; // Desired distance along Z between camera and segment
    bool normalized = true; // segment normilized features are used

    // Warning: To have a non singular task of rank 3, Y_d should be different from 0 so that
    // the optical axis doesn't intersect the horizontal segment
    double Y_d = -.11;   // Desired distance along Y between camera and segment.
    vpColVector qm(2); // Measured head position
    qm = 0;
    double qm_pan = 0; // Measured pan position (tilt is not handled in that example)

#ifdef USE_REAL_ROBOT
    // Initialize the biclops head

    vpRobotBiclops biclops("/usr/share/BiclopsDefault.cfg");
    biclops.setDenavitHartenbergModel(vpBiclops::DH1);

    // Move to the initial position
    vpColVector q(2);

    q=0;
    //  q[0] = vpMath::rad(63);
    //  q[1] = vpMath::rad(12); // introduce a tilt angle to compensate camera sphere tilt so that the camera is parallel to the plane

    biclops.setRobotState(vpRobot::STATE_POSITION_CONTROL) ;
    biclops.setPosition( vpRobot::ARTICULAR_FRAME, q );
    //biclops.setPositioningVelocity(50);
    biclops.getPosition(vpRobot::ARTICULAR_FRAME, qm);
    qm_pan = qm[0];

    // Now the head will be controlled in velocity
    biclops.setRobotState(vpRobot::STATE_VELOCITY_CONTROL) ;

    // Initialize the pioneer robot
    vpRobotPioneer pioneer;
    ArArgumentParser parser(&argc, argv);
    parser.loadDefaultArguments();

    // ArRobotConnector connects to the robot, get some initial data from it such as type and name,
    // and then loads parameter files for this robot.
    ArRobotConnector robotConnector(&parser, &pioneer);
    if(!robotConnector.connectRobot())
    {
      ArLog::log(ArLog::Terse, "Could not connect to the pioneer robot.");
      if(parser.checkHelpAndWarnUnparsed())
      {
        Aria::logOptions();
        Aria::exit(1);
      }
    }
    if (!Aria::parseArgs())
    {
      Aria::logOptions();
      Aria::shutdown();
      return false;
    }

    pioneer.useSonar(false); // disable the sonar device usage

    // Wait 3 sec to be sure that the low level Aria thread used to control
    // the robot is started. Without this delay we experienced a delay (arround 2.2 sec)
    // between the velocity send to the robot and the velocity that is really applied
    // to the wheels.
    sleep(3);

    std::cout << "Pioneer robot connected" << std::endl;
#endif

    vpPioneerPan robot_pan; // Generic robot that computes the velocities for the pioneer and the biclops head

    // Camera parameters. In this experiment we don't need a precise calibration of the camera
    vpCameraParameters cam;

    // Create the camera framegrabber
#if defined(VISP_HAVE_V4L2)
    // Create a grabber based on v4l2 third party lib (for usb cameras under Linux)
    vpV4l2Grabber g;
    g.setScale(1);
    g.setInput(0);
    g.setDevice("/dev/video1");
    g.open(I);
    // Logitec sphere parameters
    cam.initPersProjWithoutDistortion(558, 555, 312, 210);
#elif defined(VISP_HAVE_DC1394)
    // Create a grabber based on libdc1394-2.x third party lib (for firewire cameras under Linux)
    vp1394TwoGrabber g(false);
    g.setVideoMode(vp1394TwoGrabber::vpVIDEO_MODE_640x480_MONO8);
    g.setFramerate(vp1394TwoGrabber::vpFRAMERATE_30);
    // AVT Pike 032C parameters
    cam.initPersProjWithoutDistortion(800, 795, 320, 216);
#elif defined(VISP_HAVE_CMU1394)
    // Create a grabber based on CMU 1394 third party lib (for firewire cameras under windows)
    vp1394CMUGrabber g;
    g.setVideoMode(0, 5); // 640x480 MONO8
    g.setFramerate(4);    // 30 Hz
    g.open(I);
    // AVT Pike 032C parameters
    cam.initPersProjWithoutDistortion(800, 795, 320, 216);
#endif

    // Acquire an image from the grabber
    g.acquire(I);

    // Create an image viewer
#if defined(VISP_HAVE_X11)
    vpDisplayX d(I, 10, 10, "Current frame");
#elif defined(VISP_HAVE_GDI)
    vpDisplayGDI d(I, 10, 10, "Current frame");
#endif
    vpDisplay::display(I);
    vpDisplay::flush(I);

    // The 3D segment consists in two horizontal dots
    vpDot2 dot[2];
    for (int i=0; i <2; i++)
    {
      dot[i].setGraphics(true);
      dot[i].setComputeMoments(true);
      dot[i].setEllipsoidShapePrecision(0.);  // to track a blob without any constraint on the shape
      dot[i].setGrayLevelPrecision(0.9);  // to set the blob gray level bounds for binarisation
      dot[i].setEllipsoidBadPointsPercentage(0.5); // to be accept 50% of bad inner and outside points with bad gray level
      dot[i].initTracking(I);
      vpDisplay::flush(I);
    }

    vpServo task;
    task.setServo(vpServo::EYEINHAND_L_cVe_eJe) ;
    task.setInteractionMatrixType(vpServo::DESIRED, vpServo::PSEUDO_INVERSE) ;
    task.setLambda(lambda) ;
    vpVelocityTwistMatrix cVe ; // keep to identity
    cVe = robot_pan.get_cVe() ;
    task.set_cVe(cVe) ;

    std::cout << "cVe: \n" << cVe << std::endl;

    vpMatrix eJe;

    // Update the robot jacobian that depends on the pan position
    robot_pan.set_eJe(qm_pan);
    // Get the robot jacobian
    eJe = robot_pan.get_eJe() ;
    task.set_eJe(eJe) ;
    std::cout << "eJe: \n" << eJe << std::endl;

    // Define a 3D horizontal segment an its cordinates in the image plane
    vpPoint P[2];
    P[0].setWorldCoordinates(-L/2, 0, 0);
    P[1].setWorldCoordinates( L/2, 0, 0);
    // Define the desired camera position
    vpHomogeneousMatrix cMo(0, Y_d, Z_d, 0, 0, 0);  // Here we are in front of the segment
    for (int i=0; i <2; i++)
    {
      P[i].changeFrame(cMo);
      P[i].project(); // Here the x,y parameters obtained by perspective projection are computed
    }

    // Estimate the depth of the segment extremity points
    double surface[2];
    double Z[2]; // Depth of the segment points
    for (int i=0; i<2; i++)
    {
      // Surface of the blob estimated from the image moment m00 and converted in meters
      surface[i] = 1./sqrt(dot[i].m00/(cam.get_px()*cam.get_py()));

      // Initial depth of the blob
      Z[i] = coef * surface[i] ;
    }

    // Use here a feature segment builder
    vpFeatureSegment s_segment(normalized), s_segment_d(normalized); // From the segment feature we use only alpha
    vpFeatureBuilder::create(s_segment, cam, dot[0], dot[1]);
    s_segment.setZ1(Z[0]);
    s_segment.setZ2(Z[1]);
    // Set the desired feature
    vpFeatureBuilder::create(s_segment_d, P[0], P[1]);
    s_segment.setZ1( P[0].get_Z() ); // Desired depth
    s_segment.setZ2( P[1].get_Z() );

    task.addFeature(s_segment, s_segment_d,
                    vpFeatureSegment::selectXc() |
                    vpFeatureSegment::selectL() |
                    vpFeatureSegment::selectAlpha());

#ifdef USE_PLOTTER
    //Create a window (500 by 500) at position (700, 10) with two graphics
    vpPlot graph(2, 500, 500, 700, 10, "Curves...");

    //The first graphic contains 3 curve and the second graphic contains 3 curves
    graph.initGraph(0,3);
    graph.initGraph(1,3);
    graph.setTitle(0, "Velocities");
    graph.setTitle(1, "Error s-s*");
    graph.setLegend(0, 0, "vx");
    graph.setLegend(0, 1, "wz");
    graph.setLegend(0, 2, "w_pan");
    graph.setLegend(1, 0, "xm/l");
    graph.setLegend(1, 1, "1/l");
    graph.setLegend(1, 2, "alpha");
#endif

    vpColVector v; // vz, wx
    unsigned int iter = 0;
    try
    {
      while(1)
      {
#ifdef USE_REAL_ROBOT
        // Get the new pan position
        biclops.getPosition(vpRobot::ARTICULAR_FRAME, qm);
#endif
        qm_pan = qm[0];

        // Acquire a new image
        g.acquire(I);
        // Set the image as background of the viewer
        vpDisplay::display(I);

        // Display the desired position of the segment
        for (int i=0; i<2; i++)
          P[i].display(I, cam, vpColor::red, 3);

        // Does the blob tracking
        for (int i=0; i<2; i++)
          dot[i].track(I);

        for (int i=0; i<2; i++)
        {
          // Surface of the blob estimated from the image moment m00 and converted in meters
          surface[i] = 1./sqrt(dot[i].m00/(cam.get_px()*cam.get_py()));

          // Initial depth of the blob
          Z[i] = coef * surface[i] ;
        }

        // Update the features
        vpFeatureBuilder::create(s_segment, cam, dot[0], dot[1]);
        // Update the depth of the point. Useful only if current interaction matrix is used
        // when task.setInteractionMatrixType(vpServo::CURRENT, vpServo::PSEUDO_INVERSE) is set
        s_segment.setZ1(Z[0]);
        s_segment.setZ2(Z[1]);

        robot_pan.get_cVe(cVe);
        task.set_cVe(cVe);

        // Update the robot jacobian that depends on the pan position
        robot_pan.set_eJe(qm_pan);
        // Get the robot jacobian
        eJe = robot_pan.get_eJe();
        // Update the jacobian that will be used to compute the control law
        task.set_eJe(eJe);

        // Compute the control law. Velocities are computed in the mobile robot reference frame
        v = task.computeControlLaw();

        //      std::cout << "-----" << std::endl;
        //      std::cout << "v: " << v.t() << std::endl;
        //      std::cout << "error: " << task.getError().t() << std::endl;
        //      std::cout << "L:\n " << task.getInteractionMatrix() << std::endl;
        //      std::cout << "eJe:\n " << task.get_eJe() << std::endl;
        //      std::cout << "cVe:\n " << task.get_cVe() << std::endl;
        //      std::cout << "L_cVe_eJe:\n" << task.getInteractionMatrix() * task.get_cVe() * task.get_eJe() << std::endl;
        //      task.print() ;
        if (task.getTaskRank() != 3)
          std::cout << "Warning: task is of rank " << task.getTaskRank() << std::endl;

#ifdef USE_PLOTTER
        graph.plot(0, iter, v); // plot velocities applied to the robot
        graph.plot(1, iter, task.getError()); // plot error vector
#endif

#ifdef USE_REAL_ROBOT
        // Send the velocity to the robot
        vpColVector v_pioneer(2); // vx, wz
        v_pioneer[0] = v[0];
        v_pioneer[1] = v[1];
        vpColVector v_biclops(2); // qdot pan and tilt
        v_biclops[0] = v[2];
        v_biclops[1] = 0;

        std::cout << "Send velocity to the pionner: " << v_pioneer[0] << " m/s "
                  << vpMath::deg(v_pioneer[1]) << " deg/s" << std::endl;
        std::cout << "Send velocity to the biclops head: " << vpMath::deg(v_biclops[0]) << " deg/s" << std::endl;

        pioneer.setVelocity(vpRobot::REFERENCE_FRAME, v_pioneer);
        biclops.setVelocity(vpRobot::ARTICULAR_FRAME, v_biclops) ;
#endif

        // Draw a vertical line which corresponds to the desired x coordinate of the dot cog
        vpDisplay::displayLine(I, 0, cam.get_u0(), 479, cam.get_u0(), vpColor::red);
        vpDisplay::flush(I);

        // A click in the viewer to exit
        if ( vpDisplay::getClick(I, false) )
          break;

        iter ++;
        //break;
      }
    }
    catch(...)
    {
    }

#ifdef USE_REAL_ROBOT
    std::cout << "Ending robot thread..." << std::endl;
    pioneer.stopRunning();

    // wait for the thread to stop
    pioneer.waitForRunExit();
#endif

    // Kill the servo task
    task.print() ;
    task.kill();
  }
  catch(vpException &e) {
    std::cout << "Catch an exception: " << e << std::endl;
    return 1;
  }
#endif
#endif
}
int
main(int argc, const char ** argv)
{
  bool opt_display = true;
  bool opt_click_allowed = true;

  // Read the command line options
  if (getOptions(argc, argv, opt_click_allowed, opt_display) == false) {
    exit (-1);
  }

  vpImage<unsigned char> I(512,512,0) ;

  // We open a window using either X11, GTK or GDI.
#if defined VISP_HAVE_X11
  vpDisplayX display;
#elif defined VISP_HAVE_GTK
  vpDisplayGTK display;
#elif defined VISP_HAVE_GDI
  vpDisplayGDI display;
#endif

  if (opt_display) {
    try{
      // Display size is automatically defined by the image (I) size
      display.init(I, 100, 100,"Camera view...") ;
      // Display the image
      // The image class has a member that specify a pointer toward
      // the display that has been initialized in the display declaration
      // therefore is is no longuer necessary to make a reference to the
      // display variable.
      vpDisplay::display(I) ;
      vpDisplay::flush(I) ;
    }
    catch(...)
    {
      vpERROR_TRACE("Error while displaying the image") ;
      exit(-1);
    }
  }

  double px, py ; px = py = 600 ;
  double u0, v0 ; u0 = v0 = 256 ;

  vpCameraParameters cam(px,py,u0,v0);

  vpServo task ;
  vpSimulatorCamera robot ;

  // sets the initial camera location
  vpHomogeneousMatrix cMo(-0.2,0.1,1,
                          vpMath::rad(5),  vpMath::rad(5),  vpMath::rad(90));

  // Compute the position of the object in the world frame
  vpHomogeneousMatrix wMc, wMo;
  robot.getPosition(wMc) ;
  wMo = wMc * cMo;

  // sets the final camera location (for simulation purpose)
  vpHomogeneousMatrix cMod(0,0,1,
                           vpMath::rad(0),  vpMath::rad(0),  vpMath::rad(0));

  // sets the line coordinates (2 planes) in the world frame
  vpColVector plane1(4) ;
  vpColVector plane2(4) ;
  plane1[0] = 0;  // z = 0
  plane1[1] = 0;
  plane1[2] = 1;
  plane1[3] = 0;
  plane2[0] = 0; // y  =0
  plane2[1] = 1;
  plane2[2] = 0;
  plane2[3] = 0;

  vpLine line ;
  line.setWorldCoordinates(plane1, plane2) ;

  // sets the desired position of the visual feature
  line.track(cMod) ;
  line.print() ;

  vpFeatureLine ld ;
  vpFeatureBuilder::create(ld,line)  ;

  // computes the line coordinates in the camera frame and its 2D coordinates
  // sets the current position of the visual feature
  line.track(cMo) ;
  line.print() ;

  vpFeatureLine l ;
  vpFeatureBuilder::create(l,line)  ;
  l.print() ;

  // define the task
  // - we want an eye-in-hand control law
  // - robot is controlled in the camera frame
  task.setServo(vpServo::EYEINHAND_CAMERA) ;

  // we want to see a line on a line

  task.addFeature(l,ld) ;
  vpDisplay::display(I) ;
  vpServoDisplay::display(task,cam,I) ;
  vpDisplay::flush(I) ; 

  // set the gain
  task.setLambda(1) ;
  // Display task information " ) ;
  task.print() ;

  if (opt_display && opt_click_allowed) {
    std::cout << "\n\nClick in the camera view window to start..." << std::endl;
    vpDisplay::getClick(I) ;
  }

  unsigned int iter=0 ;
  // loop
  while(iter++<200)
  {
    std::cout << "---------------------------------------------" << iter <<std::endl ;
    vpColVector v ;

    // get the robot position
    robot.getPosition(wMc) ;
    // Compute the position of the camera wrt the object frame
    cMo = wMc.inverse() * wMo;

    // new line position
    line.track(cMo) ;
    // retrieve x,y and Z of the vpLine structure
    vpFeatureBuilder::create(l,line);

    if (opt_display) {
      vpDisplay::display(I) ;
      vpServoDisplay::display(task,cam,I) ;
      vpDisplay::flush(I) ;
    }

    // compute the control law
    v = task.computeControlLaw() ;

    // send the camera velocity to the controller
    robot.setVelocity(vpRobot::CAMERA_FRAME, v) ;

    std::cout << "|| s - s* || = " << ( task.getError() ).sumSquare() <<std::endl ;
  }

  if (opt_display && opt_click_allowed) {
    std::cout << "\nClick in the camera view window to end..." << std::endl;
    vpDisplay::getClick(I) ;
  }

  // Display task information
  task.print() ;
  task.kill();
}
int main()
{
  try {
    vpHomogeneousMatrix cdMo(0, 0, 0.75, 0, 0, 0);
    vpHomogeneousMatrix cMo(0.15, -0.1, 1.,
                            vpMath::rad(10), vpMath::rad(-10), vpMath::rad(50));

    std::vector<vpPoint> point;
    point.push_back( vpPoint(-0.1,-0.1, 0) );
    point.push_back( vpPoint( 0.1,-0.1, 0) );
    point.push_back( vpPoint( 0.1, 0.1, 0) );
    point.push_back( vpPoint(-0.1, 0.1, 0) );

    vpServo task ;
    task.setServo(vpServo::EYEINHAND_CAMERA);
    task.setInteractionMatrixType(vpServo::CURRENT);
    task.setLambda(0.5);

    vpFeaturePoint p[4], pd[4] ;
    for (unsigned int i = 0 ; i < 4 ; i++) {
      point[i].track(cdMo);
      vpFeatureBuilder::create(pd[i], point[i]);
      point[i].track(cMo);
      vpFeatureBuilder::create(p[i], point[i]);
      task.addFeature(p[i], pd[i]);
    }

    vpHomogeneousMatrix wMc, wMo;
    vpSimulatorCamera robot;
    robot.setSamplingTime(0.040);
    robot.getPosition(wMc);
    wMo = wMc * cMo;

    vpImage<unsigned char> Iint(480, 640, 255) ;
    vpImage<unsigned char> Iext(480, 640, 255) ;
#if defined(VISP_HAVE_X11)
    vpDisplayX displayInt(Iint, 0, 0, "Internal view");
    vpDisplayX displayExt(Iext, 670, 0, "External view");
#elif defined(VISP_HAVE_GDI)
    vpDisplayGDI displayInt(Iint, 0, 0, "Internal view");
    vpDisplayGDI displayExt(Iext, 670, 0, "External view");
#elif defined(VISP_HAVE_OPENCV)
    vpDisplayOpenCV displayInt(Iint, 0, 0, "Internal view");
    vpDisplayOpenCV displayExt(Iext, 670, 0, "External view");
#else
    std::cout << "No image viewer is available..." << std::endl;
#endif


#if defined(VISP_HAVE_DISPLAY)
    vpProjectionDisplay externalview;
    for (unsigned int i = 0 ; i < 4 ; i++)
      externalview.insert(point[i]) ;
#endif
    vpCameraParameters cam(840, 840, Iint.getWidth()/2, Iint.getHeight()/2);
    vpHomogeneousMatrix cextMo(0,0,3, 0,0,0);

    while(1) {
      robot.getPosition(wMc);
      cMo = wMc.inverse() * wMo;
      for (unsigned int i = 0 ; i < 4 ; i++) {
        point[i].track(cMo);
        vpFeatureBuilder::create(p[i], point[i]);
      }
      vpColVector v = task.computeControlLaw();
      robot.setVelocity(vpRobot::CAMERA_FRAME, v);

      vpDisplay::display(Iint) ;
      vpDisplay::display(Iext) ;
      display_trajectory(Iint, point, cMo, cam);

      vpServoDisplay::display(task, cam, Iint, vpColor::green, vpColor::red);
#if defined(VISP_HAVE_DISPLAY)
      externalview.display(Iext, cextMo, cMo, cam, vpColor::red, true);
#endif
      vpDisplay::flush(Iint);
      vpDisplay::flush(Iext);

      // A click to exit
      if (vpDisplay::getClick(Iint, false) || vpDisplay::getClick(Iext, false))
        break;

      vpTime::wait( robot.getSamplingTime() * 1000);
    }
    task.kill();
  }
  catch(vpException e) {
    std::cout << "Catch an exception: " << e << std::endl;
  }
}
int
main(int argc, const char ** argv)
{
  bool opt_display = true;
  bool opt_click_allowed = true;

  // Read the command line options
  if (getOptions(argc, argv, opt_click_allowed, opt_display) == false) {
    exit (-1);
  }

  vpImage<unsigned char> I(512,512,255) ;

  // We open a window using either X11, GTK or GDI.
#if defined VISP_HAVE_X11
  vpDisplayX display;
#elif defined VISP_HAVE_GTK
  vpDisplayGTK display;
#elif defined VISP_HAVE_GDI
  vpDisplayGDI display;
#endif

  if (opt_display) {
    try{
      // Display size is automatically defined by the image (I) size
      display.init(I, 100, 100,"Camera view...") ;
      // Display the image
      // The image class has a member that specify a pointer toward
      // the display that has been initialized in the display declaration
      // therefore is is no longuer necessary to make a reference to the
      // display variable.
      vpDisplay::display(I) ;
      vpDisplay::flush(I) ;
    }
    catch(...)
      {
	vpERROR_TRACE("Error while displaying the image") ;
	exit(-1);
      }
  }

  double px, py ; px = py = 600 ;
  double u0, v0 ; u0 = v0 = 256 ;

  vpCameraParameters cam(px,py,u0,v0);

  vpServo task ;
  vpRobotCamera robot ;

  vpTRACE("sets the initial camera location " ) ;
  vpHomogeneousMatrix cMo(-0.2,0.1,2,
			  vpMath::rad(5),  vpMath::rad(5),  vpMath::rad(20));

  robot.setPosition(cMo) ;

  vpTRACE("sets the final camera location (for simulation purpose)" ) ;
  vpHomogeneousMatrix cMod(0,0,1,
			   vpMath::rad(-60),  vpMath::rad(0),  vpMath::rad(0));



  vpTRACE("sets the cylinder coordinates in the world frame "  ) ;
  vpCylinder cylinder(0,1,0,  // direction
		      0,0,0,  // point of the axis
		      0.1) ;  // radius

  vpTRACE("sets the desired position of the visual feature ") ;
  cylinder.track(cMod) ;
  cylinder.print() ;

  vpFeatureLine ld[2] ;
  int i ;
  for(i=0 ; i < 2 ; i++)
    vpFeatureBuilder::create(ld[i],cylinder,i)  ;


  vpTRACE("project : computes  the cylinder coordinates in the camera frame and its 2D coordinates"  ) ;
  vpTRACE("sets the current position of the visual feature ") ;
  cylinder.track(cMo) ;
  cylinder.print() ;

  vpFeatureLine l[2] ;
  for(i=0 ; i < 2 ; i++)
    {
      vpFeatureBuilder::create(l[i],cylinder,i)  ;
      l[i].print() ;
    }

  vpTRACE("define the task") ;
  vpTRACE("\t we want an eye-in-hand control law") ;
  vpTRACE("\t robot is controlled in the camera frame") ;
  task.setServo(vpServo::EYEINHAND_CAMERA) ;
  // task.setInteractionMatrixType(vpServo::CURRENT, vpServo::PSEUDO_INVERSE) ;
  //  it can also be interesting to test these possibilities
  //    task.setInteractionMatrixType(vpServo::MEAN, vpServo::PSEUDO_INVERSE) ;
  task.setInteractionMatrixType(vpServo::DESIRED,vpServo::PSEUDO_INVERSE) ;
  //task.setInteractionMatrixType(vpServo::DESIRED,  vpServo::TRANSPOSE) ;
  // task.setInteractionMatrixType(vpServo::CURRENT,  vpServo::TRANSPOSE) ;

  vpTRACE("\t we want to see  2 lines on 2 lines.") ;

  task.addFeature(l[0],ld[0]) ;
  task.addFeature(l[1],ld[1]) ;

  vpServoDisplay::display(task,cam,I) ;
  vpDisplay::flush(I) ;

  vpTRACE("Display task information " ) ;
  task.print() ;

  if (opt_display && opt_click_allowed) {
    std::cout << "\n\nClick in the camera view window to start..." << std::endl;
    vpDisplay::getClick(I) ;
  }

  vpTRACE("\t set the gain") ;
  task.setLambda(1) ;


  vpTRACE("Display task information " ) ;
  task.print() ;

  int iter=0 ;
  vpTRACE("\t loop") ;
  do
    {
      std::cout << "---------------------------------------------" << iter++ <<std::endl ;
      vpColVector v ;

      if (iter==1) vpTRACE("\t\t get the robot position ") ;
      robot.getPosition(cMo) ;
      if (iter==1) vpTRACE("\t\t new line position ") ;
      //retrieve x,y and Z of the vpLine structure

      cylinder.track(cMo) ;
      //    cylinder.print() ;
      for(i=0 ; i < 2 ; i++)
	{
	  vpFeatureBuilder::create(l[i],cylinder,i)  ;
	  //   l[i].print() ;
	}

      if (opt_display) {
	      vpDisplay::display(I) ;
	      vpServoDisplay::display(task,cam,I) ;
        vpDisplay::flush(I) ; 
      }

      if (iter==1) vpTRACE("\t\t compute the control law ") ;
      v = task.computeControlLaw() ;

      if (iter==1) vpTRACE("\t\t send the camera velocity to the controller ") ;
      robot.setVelocity(vpRobot::CAMERA_FRAME, v) ;

      vpTRACE("\t\t || s - s* || ") ;
      std::cout << task.error.sumSquare() <<std::endl ; ;

      //   vpDisplay::getClick(I) ;
    }
  while(task.error.sumSquare() >  1e-9) ;

  if (opt_display && opt_click_allowed) {
    std::cout << "\nClick in the camera view window to end..." << std::endl;
    vpDisplay::getClick(I) ;
  }

  vpTRACE("Display task information " ) ;
  task.print() ;
  task.kill();
}
Example #14
0
static
void *mainLoop (void *_simu)
{
  // pointer copy of the vpSimulator instance
  vpSimulator *simu = (vpSimulator *)_simu ;

  // Simulation initialization
  simu->initMainApplication() ;

  ///////////////////////////////////
  // Set the initial camera location
  vpHomogeneousMatrix cMo(0.3,0.2,3,
                          vpMath::rad(0),vpMath::rad(0),vpMath::rad(40));

  ///////////////////////////////////
  // Initialize the robot
  vpRobotCamera robot ;
  robot.setSamplingTime(0.04); // 40ms 
  robot.setPosition(cMo) ;
  // Send the robot position to the visualizator
  simu->setCameraPosition(cMo) ;
  // Initialize the camera parameters
  vpCameraParameters cam ;
  simu->getCameraParameters(cam);

  ////////////////////////////////////////
  // Desired visual features initialization

  // sets the points coordinates in the object frame (in meter)
  vpPoint point[4] ;
  point[0].setWorldCoordinates(-0.1,-0.1,0) ;
  point[1].setWorldCoordinates(0.1,-0.1,0) ;
  point[2].setWorldCoordinates(0.1,0.1,0) ;
  point[3].setWorldCoordinates(-0.1,0.1,0) ;

  // sets the desired camera location
  vpHomogeneousMatrix cMo_d(0,0,1,0,0,0) ;

  // computes the 3D point coordinates in the camera frame and its 2D coordinates
  for (int i = 0 ; i < 4 ; i++)
    point[i].project(cMo_d) ;

  // creates the associated features
  vpFeaturePoint pd[4] ;
  for (int i = 0 ; i < 4 ; i++)
    vpFeatureBuilder::create(pd[i],point[i]) ;



  ///////////////////////////////////////
  // Current visual features initialization
  unsigned int height = simu->getInternalHeight();
  unsigned int width = simu->getInternalWidth();

  // Create a greyscale image
  vpImage<unsigned char> I(height,width);

  //Display initialization
#if defined(VISP_HAVE_X11)
  vpDisplayX disp;
#elif defined(VISP_HAVE_GDI)
  vpDisplayGDI disp;
#elif defined(VISP_HAVE_GTK)
  vpDisplayGTK disp;
#endif
  disp.init(I,100,100,"Simulation display");
  //  disp(I);
  // Get the current image
  vpTime::wait(500); // wait to be sure the image is generated
  simu->getInternalImage(I);

  // Display the current image
  vpDisplay::display(I);
  vpDisplay::flush(I);

  // Initialize the four dots tracker
  std::cout << "A click in the four dots clockwise. " << std::endl;
  vpDot2 dot[4];
  vpFeaturePoint p[4]; 
  for (int i = 0 ; i < 4 ; i++)
  {
    dot[i].setGraphics(true);
    // Call for a click
    std::cout << "A click in the dot " << i << std::endl;
    dot[i].initTracking(I);
    // Create the associated feature 
    vpFeatureBuilder::create(p[i],cam,dot[i]);
    // flush the display
    vpDisplay::flush(I);
  }  


  /////////////////////////////////
  // Task defintion
  vpServo task ;
  // we want an eye-in-hand control law ;
  task.setServo(vpServo::EYEINHAND_L_cVe_eJe) ;
  task.setInteractionMatrixType(vpServo::DESIRED) ;

  // Set the position of the camera in the end-effector frame
  vpHomogeneousMatrix cMe ;
  vpVelocityTwistMatrix cVe(cMe) ;
  task.set_cVe(cVe) ;
  // Set the Jacobian (expressed in the end-effector frame)
  vpMatrix eJe ;
  robot.get_eJe(eJe) ;
  task.set_eJe(eJe) ;

  // we want to see a point on a point
  for (int i = 0 ; i < 4 ; i++)
    task.addFeature(p[i],pd[i]) ;
  // Set the gain
  task.setLambda(1.0) ;
  // Print the current information about the task
  task.print();

  vpTime::wait(500);

  ////////////////////////////////////////////////
  // The control loop
  int k = 0;
  while(k++ < 200){
    double t = vpTime::measureTimeMs();
    
    // Get the current internal camera view and display it
    simu->getInternalImage(I);
    vpDisplay::display(I);

    // Track the four dots and update the associated visual features
    for (int i = 0 ; i < 4 ; i++)
    {
      dot[i].track(I) ;
      vpFeatureBuilder::create(p[i],cam,dot[i]) ;
    }

    // Display the desired and current visual features
    vpServoDisplay::display(task,cam,I) ;
    vpDisplay::flush(I);
    
    // Update the robot Jacobian
    robot.get_eJe(eJe) ;
    task.set_eJe(eJe) ;
    
    // Compute the control law
    vpColVector v = task.computeControlLaw() ;
    
    // Send the computed velocity to the robot and compute the new robot position
    robot.setVelocity(vpRobot::ARTICULAR_FRAME, v) ;
    robot.getPosition(cMo) ;
    
    // Send the robot position to the visualizator
    simu->setCameraPosition(cMo) ;

    
    // Wait 40 ms
    vpTime::wait(t,40); 
  }  
  // Print information about the task
  task.print();
  task.kill();
  simu->closeMainApplication() ;

  void *a=NULL ;
  return a ;
}
int main()
{
#if defined(VISP_HAVE_PTHREAD)
  try {
    vpHomogeneousMatrix cdMo(0, 0, 0.75, 0, 0, 0);
    vpHomogeneousMatrix cMo(0.15, -0.1, 1., vpMath::rad(10), vpMath::rad(-10), vpMath::rad(50));

    /*
    Top view of the world frame, the camera frame and the object frame

    world, also robot base frame :  --> w_y
                                    |
                                   \|/
                                      w_x

    object :
                     o_y
                  /|\
                   |
             o_x <--


    camera :
                     c_y
                  /|\
                   |
             c_x <--

    */
    vpHomogeneousMatrix wMo(vpTranslationVector(0.40, 0, -0.15),
                            vpRotationMatrix(vpRxyzVector(-M_PI, 0, M_PI/2.)));

    std::vector<vpPoint> point;
    point.push_back( vpPoint(-0.1,-0.1, 0) );
    point.push_back( vpPoint( 0.1,-0.1, 0) );
    point.push_back( vpPoint( 0.1, 0.1, 0) );
    point.push_back( vpPoint(-0.1, 0.1, 0) );

    vpServo task ;
    task.setServo(vpServo::EYEINHAND_CAMERA);
    task.setInteractionMatrixType(vpServo::CURRENT);
    task.setLambda(0.5);

    vpFeaturePoint p[4], pd[4] ;
    for (unsigned int i = 0 ; i < 4 ; i++) {
      point[i].track(cdMo);
      vpFeatureBuilder::create(pd[i], point[i]);
      point[i].track(cMo);
      vpFeatureBuilder::create(p[i], point[i]);
      task.addFeature(p[i], pd[i]);
    }

    vpSimulatorViper850 robot(true);
    robot.setVerbose(true);

    // Enlarge the default joint limits
    vpColVector qmin = robot.getJointMin();
    vpColVector qmax = robot.getJointMax();
    qmin[0] = -vpMath::rad(180);
    qmax[0] =  vpMath::rad(180);
    qmax[1] =  vpMath::rad(0);
    qmax[2] =  vpMath::rad(270);
    qmin[4] = -vpMath::rad(180);
    qmax[4] =  vpMath::rad(180);

    robot.setJointLimit(qmin, qmax);

    std::cout << "Robot joint limits: " << std::endl;
    for (unsigned int i=0; i< qmin.size(); i ++)
      std::cout << "Joint " << i << ": min " << vpMath::deg(qmin[i]) << " max " << vpMath::deg(qmax[i]) << " (deg)" << std::endl;

    robot.init(vpViper850::TOOL_PTGREY_FLEA2_CAMERA, vpCameraParameters::perspectiveProjWithoutDistortion);
    robot.setRobotState(vpRobot::STATE_VELOCITY_CONTROL);
    robot.initScene(vpWireFrameSimulator::PLATE, vpWireFrameSimulator::D_STANDARD);
    robot.set_fMo(wMo);
    bool ret = true;
#if VISP_VERSION_INT > VP_VERSION_INT(2,7,0)
    ret =
    #endif
        robot.initialiseCameraRelativeToObject(cMo);
    if (ret == false)
      return 0; // Not able to set the position
    robot.setDesiredCameraPosition(cdMo);
    // We modify the default external camera position
    robot.setExternalCameraPosition(vpHomogeneousMatrix(vpTranslationVector(-0.4, 0.4, 2),
                                                        vpRotationMatrix(vpRxyzVector(M_PI/2,0,0))));

    vpImage<unsigned char> Iint(480, 640, 255);
#if defined(VISP_HAVE_X11)
    vpDisplayX displayInt(Iint, 700, 0, "Internal view");
#elif defined(VISP_HAVE_GDI)
    vpDisplayGDI displayInt(Iint, 700, 0, "Internal view");
#elif defined(VISP_HAVE_OPENCV)
    vpDisplayOpenCV displayInt(Iint, 700, 0, "Internal view");
#else
    std::cout << "No image viewer is available..." << std::endl;
#endif

    vpCameraParameters cam(840, 840, Iint.getWidth()/2, Iint.getHeight()/2);
    // Modify the camera parameters to match those used in the other simulations
    robot.setCameraParameters(cam);

    bool start = true;
    //for ( ; ; )
    for (int iter =0; iter < 275; iter ++)
    {
      cMo = robot.get_cMo();

      for (unsigned int i = 0 ; i < 4 ; i++) {
        point[i].track(cMo);
        vpFeatureBuilder::create(p[i], point[i]);
      }

      vpDisplay::display(Iint);
      robot.getInternalView(Iint);
      if (!start) {
        display_trajectory(Iint, point, cMo, cam);
        vpDisplay::displayText(Iint, 40, 120, "Click to stop the servo...", vpColor::red);
      }
      vpDisplay::flush(Iint);

      vpColVector v = task.computeControlLaw();
      robot.setVelocity(vpRobot::CAMERA_FRAME, v);

      // A click to exit
      if (vpDisplay::getClick(Iint, false))
        break;

      if (start) {
        start = false;
        v = 0;
        robot.setVelocity(vpRobot::CAMERA_FRAME, v);
        vpDisplay::displayText(Iint, 40, 120, "Click to start the servo...", vpColor::blue);
        vpDisplay::flush(Iint);
        //vpDisplay::getClick(Iint);
      }

      vpTime::wait(1000*robot.getSamplingTime());
    }
    task.kill();
  }
  catch(vpException e) {
    std::cout << "Catch an exception: " << e << std::endl;
  }
#endif
}
int
main(int argc, const char ** argv)
{
  bool opt_display = true;
  bool opt_click_allowed = true;

  // Read the command line options
  if (getOptions(argc, argv, opt_click_allowed, opt_display) == false) {
    exit (-1);
  }

  vpImage<unsigned char> Iint(512,512,0) ;
  vpImage<unsigned char> Iext(512,512,0) ;

  // We open a window using either X11, GTK or GDI.
#if defined VISP_HAVE_X11
  vpDisplayX displayInt;
  vpDisplayX displayExt;
#elif defined VISP_HAVE_GTK
  vpDisplayGTK displayInt;
  vpDisplayGTK displayExt;
#elif defined VISP_HAVE_GDI
  vpDisplayGDI displayInt;
  vpDisplayGDI displayExt;
#endif

  if (opt_display) {
    try{
      // Display size is automatically defined by the image (Iint) and (Iext) size
      displayInt.init(Iint, 100, 100,"Internal view") ;
      displayExt.init(Iext, (int)(130+Iint.getWidth()), 100, "External view") ;
      // Display the image
      // The image class has a member that specify a pointer toward
      // the display that has been initialized in the display declaration
      // therefore is is no longuer necessary to make a reference to the
      // display variable.
      vpDisplay::display(Iint) ;
      vpDisplay::display(Iext) ;
      vpDisplay::flush(Iint) ;
      vpDisplay::flush(Iext) ;
    }
    catch(...)
    {
      vpERROR_TRACE("Error while displaying the image") ;
      exit(-1);
    }
  }

  vpProjectionDisplay externalview ;

  //Set the camera parameters
  double px, py ; px = py = 600 ;
  double u0, v0 ; u0 = v0 = 256 ;

  vpCameraParameters cam(px,py,u0,v0);

  vpServo task ;
  vpSimulatorCamera robot ;

  // sets the initial camera location
  vpHomogeneousMatrix cMo(-0.2,0.1,2,
                          vpMath::rad(5),  vpMath::rad(5),  vpMath::rad(20));

  vpHomogeneousMatrix wMc, wMo;
  robot.getPosition(wMc) ;
  wMo = wMc * cMo; // Compute the position of the object in the world frame

  // sets the final camera location (for simulation purpose)
  vpHomogeneousMatrix cMod(0,0,1,
                           vpMath::rad(0),  vpMath::rad(0),  vpMath::rad(0));

  // sets the cylinder coordinates in the world frame
  vpCylinder cylinder(0,1,0,  // direction
                      0,0,0,  // point of the axis
                      0.1) ;  // radius

  externalview.insert(cylinder) ;

  // sets the desired position of the visual feature
  cylinder.track(cMod) ;
  cylinder.print() ;

  //Build the desired line features thanks to the cylinder and especially its paramaters in the image frame
  vpFeatureLine ld[2] ;
  int i ;
  for(i=0 ; i < 2 ; i++)
    vpFeatureBuilder::create(ld[i],cylinder,i)  ;

  // computes  the cylinder coordinates in the camera frame and its 2D coordinates
  // sets the current position of the visual feature
  cylinder.track(cMo) ;
  cylinder.print() ;

  //Build the current line features thanks to the cylinder and especially its paramaters in the image frame
  vpFeatureLine l[2] ;
  for(i=0 ; i < 2 ; i++)
  {
    vpFeatureBuilder::create(l[i],cylinder,i)  ;
    l[i].print() ;
  }

  // define the task
  // - we want an eye-in-hand control law
  // - robot is controlled in the camera frame
  task.setServo(vpServo::EYEINHAND_CAMERA) ;
  task.setInteractionMatrixType(vpServo::DESIRED,vpServo::PSEUDO_INVERSE) ;
  //  it can also be interesting to test these possibilities
  // task.setInteractionMatrixType(vpServo::CURRENT,vpServo::PSEUDO_INVERSE) ;
  // task.setInteractionMatrixType(vpServo::MEAN, vpServo::PSEUDO_INVERSE) ;
  // task.setInteractionMatrixType(vpServo::CURRENT, vpServo::PSEUDO_INVERSE) ;
  // task.setInteractionMatrixType(vpServo::DESIRED,  vpServo::TRANSPOSE) ;
  // task.setInteractionMatrixType(vpServo::CURRENT,  vpServo::TRANSPOSE) ;

  // we want to see  2 lines on 2 lines
  task.addFeature(l[0],ld[0]) ;
  task.addFeature(l[1],ld[1]) ;

  // Set the point of view of the external view
  vpHomogeneousMatrix cextMo(0,0,6,
                             vpMath::rad(40),  vpMath::rad(10),  vpMath::rad(60))   ;

  // Display the initial scene
  vpServoDisplay::display(task,cam,Iint) ;
  externalview.display(Iext,cextMo, cMo, cam, vpColor::red);
  vpDisplay::flush(Iint) ;
  vpDisplay::flush(Iext) ;

  // Display task information
  task.print() ;

  if (opt_display && opt_click_allowed) {
    std::cout << "\n\nClick in the internal camera view window to start..." << std::endl;
    vpDisplay::getClick(Iint) ;
  }

  // set the gain
  task.setLambda(1) ;

  // Display task information
  task.print() ;

  unsigned int iter=0 ;
  // The first loop is needed to reach the desired position
  do
  {
    std::cout << "---------------------------------------------" << iter++ <<std::endl ;
    vpColVector v ;

    // get the robot position
    robot.getPosition(wMc) ;
    // Compute the position of the camera wrt the object frame
    cMo = wMc.inverse() * wMo;

    // new line position
    // retrieve x,y and Z of the vpLine structure
    // Compute the parameters of the cylinder in the camera frame and in the image frame
    cylinder.track(cMo) ;

    //Build the current line features thanks to the cylinder and especially its paramaters in the image frame
    for(i=0 ; i < 2 ; i++)
    {
      vpFeatureBuilder::create(l[i],cylinder,i)  ;
    }

    // Display the current scene
    if (opt_display) {
      vpDisplay::display(Iint) ;
      vpDisplay::display(Iext) ;
      vpServoDisplay::display(task,cam,Iint) ;
      externalview.display(Iext,cextMo, cMo, cam, vpColor::red);
      vpDisplay::flush(Iint);
      vpDisplay::flush(Iext);
    }

    // compute the control law
    v = task.computeControlLaw() ;

    // send the camera velocity to the controller
    robot.setVelocity(vpRobot::CAMERA_FRAME, v) ;

    std::cout << "|| s - s* || = " << ( task.getError() ).sumSquare() <<std::endl ;
  }
  while(( task.getError() ).sumSquare() >  1e-9) ;


  // Second loop is to compute the control law while taking into account the secondary task.
  // In this example the secondary task is cut in four steps.
  // The first one consists in impose a movement of the robot along the x axis of the object frame with a velocity of 0.5.
  // The second one consists in impose a movement of the robot along the y axis of the object frame with a velocity of 0.5.
  // The third one consists in impose a movement of the robot along the x axis of the object frame with a velocity of -0.5.
  // The last one consists in impose a movement of the robot along the y axis of the object frame with a velocity of -0.5.
  // Each steps is made during 200 iterations.
  vpColVector e1(6) ; e1 = 0 ;
  vpColVector e2(6) ; e2 = 0 ;
  vpColVector proj_e1 ;
  vpColVector proj_e2 ;
  iter = 0;
  double rapport = 0;
  double vitesse = 0.5;
  unsigned int tempo = 800;

  while(iter < tempo)
  {
    vpColVector v ;

    robot.getPosition(wMc) ;
    // Compute the position of the camera wrt the object frame
    cMo = wMc.inverse() * wMo;

    cylinder.track(cMo) ;

    for(i=0 ; i < 2 ; i++)
    {
      vpFeatureBuilder::create(l[i],cylinder,i)  ;
    }

    if (opt_display) 
    {
      vpDisplay::display(Iint) ;
      vpDisplay::display(Iext) ;
      vpServoDisplay::display(task,cam,Iint) ;
      externalview.display(Iext,cextMo, cMo, cam, vpColor::red);
      vpDisplay::flush(Iint);
      vpDisplay::flush(Iext);
    }

    v = task.computeControlLaw() ;

    if ( iter%tempo < 200  /*&&  iter%tempo >= 0*/)
    {
      e2 = 0;
      e1[0] = fabs(vitesse)  ;
      proj_e1 = task.secondaryTask(e1);
      rapport = vitesse/proj_e1[0];
      proj_e1 *= rapport ;
      v += proj_e1 ;
    }

    if ( iter%tempo < 400 &&  iter%tempo >= 200)
    {
      e1 = 0;
      e2[1] = fabs(vitesse)  ;
      proj_e2 = task.secondaryTask(e2);
      rapport = vitesse/proj_e2[1];
      proj_e2 *= rapport ;
      v += proj_e2 ;
    }

    if ( iter%tempo < 600 &&  iter%tempo >= 400)
    {
      e2 = 0;
      e1[0] = -fabs(vitesse)  ;
      proj_e1 = task.secondaryTask(e1);
      rapport = -vitesse/proj_e1[0];
      proj_e1 *= rapport ;
      v += proj_e1 ;
    }

    if ( iter%tempo < 800 &&  iter%tempo >= 600)
    {
      e1 = 0;
      e2[1] = -fabs(vitesse)  ;
      proj_e2 = task.secondaryTask(e2);
      rapport = -vitesse/proj_e2[1];
      proj_e2 *= rapport ;
      v += proj_e2 ;
    }

    robot.setVelocity(vpRobot::CAMERA_FRAME, v);

    std::cout << "|| s - s* || = " << ( task.getError() ).sumSquare() <<std::endl ;

    iter++;
  }

  if (opt_display && opt_click_allowed) {
    std::cout << "\nClick in the internal camera view window to end..." << std::endl;
    vpDisplay::getClick(Iint) ;
  }

  // Display task information
  task.print() ;
  task.kill();
}
int
main()
{
  try
  {
    vpImage<unsigned char> I ;

    vp1394TwoGrabber g;
    g.setVideoMode(vp1394TwoGrabber::vpVIDEO_MODE_640x480_MONO8);
    g.setFramerate(vp1394TwoGrabber::vpFRAMERATE_60);
    g.open(I) ;

    g.acquire(I) ;

#ifdef VISP_HAVE_X11
    vpDisplayX display(I,100,100,"Current image") ;
#elif defined(VISP_HAVE_OPENCV)
    vpDisplayOpenCV display(I,100,100,"Current image") ;
#elif defined(VISP_HAVE_GTK)
    vpDisplayGTK display(I,100,100,"Current image") ;
#endif

    vpDisplay::display(I) ;
    vpDisplay::flush(I) ;

    vpServo task ;

    std::cout << std::endl ;
    std::cout << "-------------------------------------------------------" << std::endl ;
    std::cout << " Test program for vpServo "  <<std::endl ;
    std::cout << " Eye-in-hand task control, velocity computed in the camera frame" << std::endl ;
    std::cout << " Simulation " << std::endl ;
    std::cout << " task : servo a point " << std::endl ;
    std::cout << "-------------------------------------------------------" << std::endl ;
    std::cout << std::endl ;


    int i ;
    int nbline =2 ;
    vpMeLine line[nbline] ;

    vpMe me ;
    me.setRange(20) ;
    me.setPointsToTrack(100) ;
    me.setThreshold(2000) ;
    me.setSampleStep(10);

    //Initialize the tracking of the two edges of the cylinder
    for (i=0 ; i < nbline ; i++)
    {
      line[i].setDisplay(vpMeSite::RANGE_RESULT) ;
      line[i].setMe(&me) ;

      line[i].initTracking(I) ;
      line[i].track(I) ;
    }

    vpRobotAfma6 robot ;
    //robot.move("zero.pos") ;

    vpCameraParameters cam ;
    // Update camera parameters
    robot.getCameraParameters (cam, I);

    vpTRACE("sets the current position of the visual feature ") ;
    vpFeatureLine p[nbline] ;
    for (i=0 ; i < nbline ; i++)
      vpFeatureBuilder::create(p[i],cam, line[i])  ;

    vpTRACE("sets the desired position of the visual feature ") ;
    vpCylinder cyld(0,1,0,0,0,0,0.04);

    vpHomogeneousMatrix cMo(0,0,0.5,0,0,vpMath::rad(0));

    cyld.project(cMo);

    vpFeatureLine pd[nbline] ;
    vpFeatureBuilder::create(pd[0],cyld,vpCylinder::line1);
    vpFeatureBuilder::create(pd[1],cyld,vpCylinder::line2);

    //Those lines are needed to keep the conventions define in vpMeLine (Those in vpLine are less restrictive)
    //Another way to have the coordinates of the desired features is to learn them before executing the program.
    pd[0].setRhoTheta(-fabs(pd[0].getRho()),0);
    pd[1].setRhoTheta(-fabs(pd[1].getRho()),M_PI);

    vpTRACE("define the task") ;
    vpTRACE("\t we want an eye-in-hand control law") ;
    vpTRACE("\t robot is controlled in the camera frame") ;
    task.setServo(vpServo::EYEINHAND_CAMERA) ;
    task.setInteractionMatrixType(vpServo::DESIRED, vpServo::PSEUDO_INVERSE);

    vpTRACE("\t we want to see a point on a point..") ;
    std::cout << std::endl ;
    for (i=0 ; i < nbline ; i++)
      task.addFeature(p[i],pd[i]) ;

    vpTRACE("\t set the gain") ;
    task.setLambda(0.3) ;


    vpTRACE("Display task information " ) ;
    task.print() ;


    robot.setRobotState(vpRobot::STATE_VELOCITY_CONTROL) ;

    unsigned int iter=0 ;
    vpTRACE("\t loop") ;
    vpColVector v ;
    vpImage<vpRGBa> Ic ;
    double lambda_av =0.05;
    double alpha = 0.02;
    double beta =3;
    double erreur = 1;


    //First loop to reach the convergence position
    while(erreur > 0.00001)
    {
      std::cout << "---------------------------------------------" << iter <<std::endl ;

      try {
        g.acquire(I) ;
        vpDisplay::display(I) ;

        //Track the two edges and update the features
        for (i=0 ; i < nbline ; i++)
	      {
		      line[i].track(I) ;
		      line[i].display(I, vpColor::red) ;

		      vpFeatureBuilder::create(p[i],cam,line[i]);

		      p[i].display(cam, I,  vpColor::red) ;
		      pd[i].display(cam, I,  vpColor::green) ;
	      }

        vpDisplay::flush(I) ;

        //Adaptative gain
        double gain ;
        {
          if (std::fabs(alpha) <= std::numeric_limits<double>::epsilon())
            gain = lambda_av ;
          else
          {
            gain = alpha * exp (-beta * ( task.getError() ).sumSquare() ) +  lambda_av ;
          }
        }
        task.setLambda(gain) ;

        v = task.computeControlLaw() ;

        if (iter==0)  vpDisplay::getClick(I) ;
      }
      catch(...)
	    {
	      v =0 ;
	      robot.setVelocity(vpRobot::CAMERA_FRAME, v) ;
	      robot.stopMotion() ;
	      exit(1) ;
	    }

      robot.setVelocity(vpRobot::CAMERA_FRAME, v) ;
      erreur = ( task.getError() ).sumSquare();
      vpTRACE("\t\t || s - s* || = %f ", ( task.getError() ).sumSquare()) ;
      iter++;
    }

    /**********************************************************************************************/

    //Second loop is to compute the control while taking into account the secondary task.
    vpColVector e1(6) ; e1 = 0 ;
    vpColVector e2(6) ; e2 = 0 ;
    vpColVector proj_e1 ;
    vpColVector proj_e2 ;
    iter = 0;
    double rapport = 0;
    double vitesse = 0.02;
    unsigned int tempo = 1200;

    for ( ; ; )
    {
      std::cout << "---------------------------------------------" << iter <<std::endl ;

      try {
        g.acquire(I) ;
        vpDisplay::display(I) ;

        //Track the two edges and update the features
        for (i=0 ; i < nbline ; i++)
	      {
		      line[i].track(I) ;
		      line[i].display(I, vpColor::red) ;

		      vpFeatureBuilder::create(p[i],cam,line[i]);

		      p[i].display(cam, I,  vpColor::red) ;
		      pd[i].display(cam, I,  vpColor::green) ;
	      }

        vpDisplay::flush(I) ;

        v = task.computeControlLaw() ;

        //Compute the new control law corresponding to the secondary task
        if ( iter%tempo < 400  /*&&  iter%tempo >= 0*/)
        {
          e2 = 0;
          e1[0] = fabs(vitesse)  ;
          proj_e1 = task.secondaryTask(e1);
          rapport = vitesse/proj_e1[0];
          proj_e1 *= rapport ;
          v += proj_e1 ;
          if ( iter == 199 ) iter+=200;  //This line is needed to make on ly an half turn during the first cycle
        }

        if ( iter%tempo < 600 &&  iter%tempo >= 400)
        {
          e1 = 0;
          e2[1] = fabs(vitesse)  ;
          proj_e2 = task.secondaryTask(e2);
          rapport = vitesse/proj_e2[1];
          proj_e2 *= rapport ;
          v += proj_e2 ;
        }

        if ( iter%tempo < 1000 &&  iter%tempo >= 600)
        {
          e2 = 0;
          e1[0] = -fabs(vitesse)  ;
          proj_e1 = task.secondaryTask(e1);
          rapport = -vitesse/proj_e1[0];
          proj_e1 *= rapport ;
          v += proj_e1 ;
        }

        if ( iter%tempo < 1200 &&  iter%tempo >= 1000)
        {
          e1 = 0;
          e2[1] = -fabs(vitesse)  ;
          proj_e2 = task.secondaryTask(e2);
          rapport = -vitesse/proj_e2[1];
          proj_e2 *= rapport ;
          v += proj_e2 ;
        }

        robot.setVelocity(vpRobot::CAMERA_FRAME, v) ;
      }
      catch(...)
	    {
	      v =0 ;
	      robot.setVelocity(vpRobot::CAMERA_FRAME, v) ;
	      robot.stopMotion() ;
	      exit(1) ;
	    }

      vpTRACE("\t\t || s - s* || = %f ", ( task.getError() ).sumSquare()) ;
      iter++;
    }


    vpTRACE("Display task information " ) ;
    task.print() ;
    task.kill();
  }
  catch (...)
  {
    vpERROR_TRACE(" Test failed") ;
    return 0;
  }
}
Example #18
0
int main()
{
#ifdef VISP_HAVE_GTK
  try {
    std::cout << "ViSP geometric features display example" <<std::endl;
    unsigned int height = 288;
    unsigned int width = 384;
    vpImage<unsigned char> I(height,width);
    I = 255; // I is a white image

    // create a display window
    vpDisplayGTK display;
    // initialize a display attached to image I
    display.init(I,100,100,"ViSP geometric features display");
    // camera parameters to digitalize the image plane
    vpCameraParameters cam(600,600,width/2,height/2); // px,py,u0,v0

    // pose of the camera with reference to the scene
    vpTranslationVector t(0,0,1);
    vpRxyzVector rxyz(-M_PI/4,0,0);
    vpRotationMatrix R(rxyz);
    vpHomogeneousMatrix cMo(t, R);

    // scene building, geometric features definition
    vpPoint point;
    point.setWorldCoordinates(0,0,0);// (X0=0,Y0=0,Z0=0)
    vpLine line;
    line.setWorldCoordinates(1,1,0,0,0,0,1,0); // planes:(X+Y=0)&(Z=0)
    vpCylinder cylinder;
    cylinder.setWorldCoordinates(1,-1,0,0,0,0,0.1); // alpha=1,beta=-1,gamma=0,
    // X0=0,Y0=0,Z0=0,R=0.1
    vpCircle circle;
    circle.setWorldCoordinates(0,0,1,0,0,0,0.1); // plane:(Z=0),X0=0,Y0=0,Z=0,R=0.1
    vpSphere sphere;
    sphere.setWorldCoordinates(0,0,0,0.1); // X0=0,Y0=0,Z0=0,R=0.1

    // change frame to be the camera frame and project features in the image plane
    point.project(cMo);
    line.project(cMo);
    cylinder.project(cMo);
    circle.project(cMo);
    sphere.project(cMo);

    // display the scene
    vpDisplay::display(I); // display I
    // draw the projections of the 3D geometric features in the image plane.
    point.display(I,cam,vpColor::black);   // draw a black cross over I
    line.display(I,cam,vpColor::blue);     // draw a blue line over I
    cylinder.display(I,cam,vpColor::red);  // draw two red lines over I
    circle.display(I,cam,vpColor::orange); // draw an orange ellipse over I
    sphere.display(I,cam,vpColor::black);  // draw a black ellipse over I

    vpDisplay::flush(I);    // flush the display buffer
    std::cout << "A click in the display to exit" << std::endl;
    vpDisplay::getClick(I); // wait for a click in the display to exit

    // save the drawing
    vpImage<vpRGBa> Ic;
    vpDisplay::getImage(I,Ic);
    std::cout << "ViSP creates \"./geometricFeatures.ppm\" B&W image "<< std::endl;
    vpImageIo::write(Ic, "./geometricFeatures.ppm");
    return 0;
  }
  catch(vpException &e) {
    std::cout << "Catch an exception: " << e << std::endl;
    return 1;
  }

#endif
}
Example #19
0
/*!

  \example testFeatureSegment.cpp

  Shows how to build a task with a segment visual feature.

*/
int main(int argc, const char **argv)
{  
  try {
#if (defined (VISP_HAVE_X11) || defined (VISP_HAVE_GDI))
    int opt_no_display = 0;
    int opt_curves = 1;
#endif
    int opt_normalized = 1;

    // Parse the command line to set the variables
    vpParseArgv::vpArgvInfo argTable[] =
    {
  #if (defined (VISP_HAVE_X11) || defined (VISP_HAVE_GDI))
      {"-d", vpParseArgv::ARGV_CONSTANT, 0, (char *) &opt_no_display,
       "Disable display and graphics viewer."},
  #endif
      {"-normalized", vpParseArgv::ARGV_INT, (char*) NULL, (char *) &opt_normalized,
       "1 to use normalized features, 0 for non normalized."},
      {"-h", vpParseArgv::ARGV_HELP, (char*) NULL, (char *) NULL,
       "Print the help."},
      {(char*) NULL, vpParseArgv::ARGV_END, (char*) NULL, (char*) NULL, (char*) NULL}
    } ;

    // Read the command line options
    if(vpParseArgv::parse(&argc, argv, argTable,
                          vpParseArgv::ARGV_NO_LEFTOVERS |
                          vpParseArgv::ARGV_NO_ABBREV |
                          vpParseArgv::ARGV_NO_DEFAULTS)) {
      return (false);
    }

    std::cout << "Used options: " << std::endl;
#if (defined (VISP_HAVE_X11) || defined (VISP_HAVE_GDI))
    opt_curves = (opt_no_display == 0) ? 1 : 0;
    std::cout << " - no display: " << opt_no_display << std::endl;
    std::cout << " - curves    : " << opt_curves << std::endl;
#endif
    std::cout << " - normalized: " << opt_normalized << std::endl;

    vpCameraParameters cam(640.,480.,320.,240.);

#if defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI)
    vpDisplay *display = NULL;
    if (!opt_no_display) {
#if defined(VISP_HAVE_X11)
      display = new vpDisplayX;
#elif defined VISP_HAVE_GDI
      display = new vpDisplayGDI;
#endif
    }
#endif
    vpImage<unsigned char> I(480,640,0);

#if (defined (VISP_HAVE_X11) || defined (VISP_HAVE_GDI))
    if (!opt_no_display)
      display->init(I);
#endif

    vpHomogeneousMatrix wMo; // Set to indentity. Robot world frame is equal to object frame
    vpHomogeneousMatrix cMo (-0.5, 0.5, 2., vpMath::rad(10), vpMath::rad(20), vpMath::rad(30));
    vpHomogeneousMatrix cdMo(0., 0., 1., vpMath::rad(0), vpMath::rad(0), vpMath::rad(0));
    vpHomogeneousMatrix wMc; // Camera location in the robot world frame

    vpPoint P[4]; // 4 points in the object frame
    P[0].setWorldCoordinates( .1,  .1, 0.);
    P[1].setWorldCoordinates(-.1,  .1, 0.);
    P[2].setWorldCoordinates(-.1, -.1, 0.);
    P[3].setWorldCoordinates( .1, -.1, 0.);

    vpPoint Pd[4]; // 4 points in the desired camera frame
    for (int i=0; i<4; i++) {
      Pd[i] = P[i];
      Pd[i].project(cdMo);
    }
    vpPoint Pc[4]; // 4 points in the current camera frame
    for (int i=0; i<4; i++) {
      Pc[i] = P[i];
      Pc[i].project(cMo);
    }

    vpFeatureSegment seg_cur[2], seg_des[2]; // Current and desired features
    for (int i=0; i <2; i++)
    {
      if (opt_normalized) {
        seg_cur[i].setNormalized(true);
        seg_des[i].setNormalized(true);
      }
      else {
        seg_cur[i].setNormalized(false);
        seg_des[i].setNormalized(false);
      }
      vpFeatureBuilder::create(seg_cur[i], Pc[i*2], Pc[i*2+1]);
      vpFeatureBuilder::create(seg_des[i], Pd[i*2], Pd[i*2+1]);
      seg_cur[i].print();
      seg_des[i].print();
    }

    //define visual servoing task
    vpServo task;
    task.setServo(vpServo::EYEINHAND_CAMERA);
    task.setInteractionMatrixType(vpServo::CURRENT);
    task.setLambda(2.) ;

    for (int i=0; i <2; i++)
      task.addFeature(seg_cur[i], seg_des[i]);

#if (defined (VISP_HAVE_X11) || defined(VISP_HAVE_GDI))
    if (!opt_no_display) {
      vpDisplay::display(I);
      for (int i=0; i <2; i++) {
        seg_cur[i].display(cam, I, vpColor::red);
        seg_des[i].display(cam, I, vpColor::green);
        vpDisplay::flush(I);
      }
    }
#endif

#if (defined (VISP_HAVE_X11) || defined (VISP_HAVE_GDI))
    vpPlot *graph = NULL;
    if (opt_curves)
    {
      //Create a window (700 by 700) at position (100, 200) with two graphics
      graph = new vpPlot(2, 500, 500, 700, 10, "Curves...");

      //The first graphic contains 3 curve and the second graphic contains 3 curves
      graph->initGraph(0,6);
      graph->initGraph(1,8);
      //     graph->setTitle(0, "Velocities");
      //     graph->setTitle(1, "Error s-s*");
    }
#endif

    //param robot
    vpSimulatorCamera robot;
    float sampling_time = 0.02f; // Sampling period in seconds
    robot.setSamplingTime(sampling_time);
    robot.setMaxTranslationVelocity(5.);
    robot.setMaxRotationVelocity(vpMath::rad(90.));
    wMc = wMo * cMo.inverse();
    robot.setPosition(wMc);
    int iter=0;

    do {
      double t = vpTime::measureTimeMs();
      wMc = robot.getPosition();
      cMo = wMc.inverse() * wMo;
      for (int i=0; i <4; i++)
        Pc[i].project(cMo);

      for (int i=0; i <2; i++)
        vpFeatureBuilder::create(seg_cur[i], Pc[i*2], Pc[i*2+1]);

#if (defined (VISP_HAVE_X11) || defined(VISP_HAVE_GDI))
      if (!opt_no_display) {
        vpDisplay::display(I);
        for (int i=0; i <2; i++) {
          seg_cur[i].display(cam, I, vpColor::red);
          seg_des[i].display(cam, I, vpColor::green);
          vpDisplay::flush(I);
        }
      }
#endif

      vpColVector v = task.computeControlLaw();
      robot.setVelocity(vpRobot::CAMERA_FRAME, v) ;

#if (defined (VISP_HAVE_X11) || defined (VISP_HAVE_GDI))
      if (opt_curves)
      {
        graph->plot(0, iter, v); // plot velocities applied to the robot
        graph->plot(1, iter, task.getError()); // plot error vector
      }
#endif

      vpTime::wait(t, sampling_time * 1000); // Wait 10 ms
      iter ++;

    } while(( task.getError() ).sumSquare() > 0.0005);

    // A call to kill() is requested here to destroy properly the current
    // and desired feature lists.
    task.kill();

#if (defined (VISP_HAVE_X11) || defined (VISP_HAVE_GDI))
    if (graph != NULL)
      delete graph;
#endif
#if (defined (VISP_HAVE_X11) || defined (VISP_HAVE_GDI))
    if (!opt_no_display && display != NULL)
      delete display;
#endif

    std::cout << "final error=" << ( task.getError() ).sumSquare() << std::endl;
    return 0;
  }
  catch(vpException &e) {
    std::cout << "Catch an exception: " << e << std::endl;
    return 1;
  }
}
int
main(int argc, const char ** argv)
{
  // Read the command line options
  if (getOptions(argc, argv) == false) {
    exit (-1);
  }

  // Log file creation in /tmp/$USERNAME/log.dat
  // This file contains by line:
  // - the 6 computed camera velocities (m/s, rad/s) to achieve the task
  // - the 6 values of s - s*
  std::string username;
  // Get the user login name
  vpIoTools::getUserName(username);

  // Create a log filename to save velocities...
  std::string logdirname;
#ifdef WIN32
  logdirname ="C:/temp/" + username;
#else
  logdirname ="/tmp/" + username;
#endif
  // Test if the output path exist. If no try to create it
  if (vpIoTools::checkDirectory(logdirname) == false) {
    try {
      // Create the dirname
      vpIoTools::makeDirectory(logdirname);
    }
    catch (...) {
      std::cerr << std::endl
		<< "ERROR:" << std::endl;
      std::cerr << "  Cannot create " << logdirname << std::endl;
      exit(-1);
    }
  }
  std::string logfilename;
  logfilename = logdirname + "/log.dat";

  // Open the log file name
  std::ofstream flog(logfilename.c_str());

  vpServo task ;
  vpRobotCamera robot ;

  std::cout << std::endl ;
  std::cout << "-------------------------------------------------------" << std::endl ;
  std::cout << " Test program for vpServo "  <<std::endl ;
  std::cout << " Eye-in-hand task control, velocity computed in the camera frame" << std::endl ;
  std::cout << " Simulation " << std::endl ;
  std::cout << " task :  3D visual servoing " << std::endl ;
  std::cout << "-------------------------------------------------------" << std::endl ;
  std::cout << std::endl ;


  // Sets the initial camera location
  vpPoseVector c_r_o(// Translation tx,ty,tz
		     0.1, 0.2, 2, 
		     // ThetaU rotation 
		     vpMath::rad(20), vpMath::rad(10),  vpMath::rad(50) ) ;
  
  // From the camera pose build the corresponding homogeneous matrix
  vpHomogeneousMatrix cMo(c_r_o) ;

  // Set the robot initial position
  robot.setPosition(cMo) ;

  // Sets the desired camera location
  vpPoseVector cd_r_o(// Translation tx,ty,tz
		      0, 0, 1, 
		      // ThetaU rotation 
		      vpMath::rad(0),vpMath::rad(0),vpMath::rad(0)) ; 
  // From the camera desired pose build the corresponding homogeneous matrix
  vpHomogeneousMatrix cdMo(cd_r_o) ;

  // Compute the homogeneous transformation from the desired camera position to the initial one
  vpHomogeneousMatrix cdMc ;
  cdMc = cdMo*cMo.inverse() ;

  // Build the current visual features s = (c*tc, thetaU_c*Rc)^T
  vpFeatureTranslation t(vpFeatureTranslation::cdMc) ;
  vpFeatureThetaU tu(vpFeatureThetaU::cdRc); // current feature
  t.buildFrom(cdMc) ;
  tu.buildFrom(cdMc) ;

  // Sets the desired rotation (always zero !)  since s is the
  // rotation that the camera has to achieve. Here s* = (0, 0)^T
  vpFeatureTranslation td(vpFeatureTranslation::cdMc) ;
  vpFeatureThetaU tud(vpFeatureThetaU::cdRc); // desired feature

  // Define the task
  // - we want an eye-in-hand control law
  // - the robot is controlled in the camera frame
  task.setServo(vpServo::EYEINHAND_CAMERA) ;
  // - we use here the interaction matrix computed with the 
  //   current features
  task.setInteractionMatrixType(vpServo::CURRENT);

  // Add the current and desired visual features
  task.addFeature(t,td) ;   // 3D translation
  task.addFeature(tu,tud) ; // 3D rotation

  // - set the constant gain to 1.0
  task.setLambda(1) ;

  // Display task information
  task.print() ;

  int iter=0 ;
  // Start the visual servoing loop. We stop the servo after 200 iterations
  while(iter++ < 200) {
    std::cout << "-----------------------------------" << iter <<std::endl ;
    vpColVector v ;

    // get the robot position
    robot.getPosition(cMo) ;

    // new displacement to achieve
    cdMc = cdMo*cMo.inverse() ;

    // Update the current visual features
    t.buildFrom(cdMc) ;
    tu.buildFrom(cdMc) ;

    // Compute the control law
    v = task.computeControlLaw() ;

    // Display task information
    if (iter==1) task.print() ;

    // Send the camera velocity to the controller
    robot.setVelocity(vpRobot::CAMERA_FRAME, v) ;
      
    // Retrieve the error 
    std::cout << task.error.sumSquare() <<std::endl ;
    
    // Save log
    flog << v.t() << " " << task.error.t() << std::endl;
  }
  // Display task information
  task.print() ;

  // Kill the task
  task.kill();

  // Close the log file
  flog.close();
}
int
main(int argc, const char ** argv)
{
  try {
    bool opt_click_allowed = true;
    bool opt_display = true;

    // Read the command line options
    if (getOptions(argc, argv, opt_click_allowed, opt_display) == false) {
      exit (-1);
    }

    // We open two displays, one for the internal camera view, the other one for
    // the external view, using either X11, GTK or GDI.
#if defined VISP_HAVE_X11
    vpDisplayX displayInt;
    vpDisplayX displayExt;
#elif defined VISP_HAVE_GTK
    vpDisplayGTK displayInt;
    vpDisplayGTK displayExt;
#elif defined VISP_HAVE_GDI
    vpDisplayGDI displayInt;
    vpDisplayGDI displayExt;
#elif defined VISP_HAVE_OPENCV
    vpDisplayOpenCV displayInt;
    vpDisplayOpenCV displayExt;
#endif

    // open a display for the visualization

    vpImage<unsigned char> Iint(300, 300, 0) ;
    vpImage<unsigned char> Iext(300, 300, 0) ;

    if (opt_display) {
      displayInt.init(Iint,0,0, "Internal view") ;
      displayExt.init(Iext,330,000, "External view") ;

    }
    vpProjectionDisplay externalview ;

    double px, py ; px = py = 500 ;
    double u0, v0 ; u0 = 150, v0 = 160 ;

    vpCameraParameters cam(px,py,u0,v0);

    int i ;
    vpServo task ;
    vpSimulatorCamera robot ;

    std::cout << std::endl ;
    std::cout << "----------------------------------------------" << std::endl ;
    std::cout << " Test program for vpServo "  <<std::endl ;
    std::cout << " Eye-in-hand task control, articular velocity are computed"
              << std::endl ;
    std::cout << " Simulation " << std::endl ;
    std::cout << " task : servo 4 points " << std::endl ;
    std::cout << "----------------------------------------------" << std::endl ;
    std::cout << std::endl ;

    // sets the initial camera location
    vpHomogeneousMatrix cMo(-0.1,-0.1,1,
                            vpMath::rad(40),  vpMath::rad(10),  vpMath::rad(60))   ;

    // Compute the position of the object in the world frame
    vpHomogeneousMatrix wMc, wMo;
    robot.getPosition(wMc) ;
    wMo = wMc * cMo;

    vpHomogeneousMatrix cextMo(0,0,2,
                               0,0,0) ;//vpMath::rad(40),  vpMath::rad(10),  vpMath::rad(60))   ;

    // sets the point coordinates in the object frame
    vpPoint point[4] ;
    point[0].setWorldCoordinates(-0.1,-0.1,0) ;
    point[1].setWorldCoordinates(0.1,-0.1,0) ;
    point[2].setWorldCoordinates(0.1,0.1,0) ;
    point[3].setWorldCoordinates(-0.1,0.1,0) ;

    for (i = 0 ; i < 4 ; i++)
      externalview.insert(point[i]) ;

    // computes  the point coordinates in the camera frame and its 2D coordinates
    for (i = 0 ; i < 4 ; i++)
      point[i].track(cMo) ;

    // sets the desired position of the point
    vpFeaturePoint p[4] ;
    for (i = 0 ; i < 4 ; i++)
      vpFeatureBuilder::create(p[i],point[i])  ;  //retrieve x,y and Z of the vpPoint structure

    // sets the desired position of the feature point s*
    vpFeaturePoint pd[4] ;

    pd[0].buildFrom(-0.1,-0.1, 1) ;
    pd[1].buildFrom( 0.1,-0.1, 1) ;
    pd[2].buildFrom( 0.1, 0.1, 1) ;
    pd[3].buildFrom(-0.1, 0.1, 1) ;

    // define the task
    // - we want an eye-in-hand control law
    // - articular velocity are computed
    task.setServo(vpServo::EYEINHAND_L_cVe_eJe) ;
    task.setInteractionMatrixType(vpServo::MEAN) ;

    // Set the position of the camera in the end-effector frame ") ;
    vpHomogeneousMatrix cMe ;
    vpVelocityTwistMatrix cVe(cMe) ;
    task.set_cVe(cVe) ;

    // Set the Jacobian (expressed in the end-effector frame)
    vpMatrix eJe ;
    robot.get_eJe(eJe) ;
    task.set_eJe(eJe) ;

    // we want to see a point on a point
    for (i = 0 ; i < 4 ; i++)
      task.addFeature(p[i],pd[i]) ;

    // set the gain
    task.setLambda(1) ;

    // Display task information " ) ;
    task.print() ;

    unsigned int iter=0 ;
    // loop
    while(iter++<200)
    {
      std::cout << "---------------------------------------------" << iter <<std::endl ;
      vpColVector v ;

      // Set the Jacobian (expressed in the end-effector frame)
      // since q is modified eJe is modified
      robot.get_eJe(eJe) ;
      task.set_eJe(eJe) ;

      // get the robot position
      robot.getPosition(wMc) ;
      // Compute the position of the camera wrt the object frame
      cMo = wMc.inverse() * wMo;

      // update new point position and corresponding features
      for (i = 0 ; i < 4 ; i++)
      {
        point[i].track(cMo) ;
        //retrieve x,y and Z of the vpPoint structure
        vpFeatureBuilder::create(p[i],point[i])  ;
      }
      // since vpServo::MEAN interaction matrix is used, we need also to update the desired features at each iteration
      pd[0].buildFrom(-0.1,-0.1, 1) ;
      pd[1].buildFrom( 0.1,-0.1, 1) ;
      pd[2].buildFrom( 0.1, 0.1, 1) ;
      pd[3].buildFrom(-0.1, 0.1, 1) ;

      if (opt_display) {
        vpDisplay::display(Iint) ;
        vpDisplay::display(Iext) ;
        vpServoDisplay::display(task,cam,Iint) ;
        externalview.display(Iext,cextMo, cMo, cam, vpColor::green) ;
        vpDisplay::flush(Iint);
        vpDisplay::flush(Iext);
      }

      // compute the control law
      v = task.computeControlLaw() ;

      // send the camera velocity to the controller
      robot.setVelocity(vpRobot::CAMERA_FRAME, v) ;

      std::cout << "|| s - s* || = " << ( task.getError() ).sumSquare() <<std::endl ;
    }

    // Display task information
    task.print() ;
    task.kill();

    std::cout <<"Final robot position with respect to the object frame:\n";
    cMo.print();

    if (opt_display && opt_click_allowed) {
      // suppressed for automate test
      std::cout << "\n\nClick in the internal view window to end..." << std::endl;
      vpDisplay::getClick(Iint) ;
    }
    return 0;
  }
  catch(vpException e) {
    std::cout << "Catch a ViSP exception: " << e << std::endl;
    return 1;
  }
}
int main()
{
  try {
    vpHomogeneousMatrix cdMo(0, 0, 0.75, 0, 0, 0);
    vpHomogeneousMatrix cMo(0.15, -0.1, 1., vpMath::rad(10), vpMath::rad(-10), vpMath::rad(50));

    std::vector<vpPoint> point(4) ;
    point[0].setWorldCoordinates(-0.1,-0.1, 0);
    point[1].setWorldCoordinates( 0.1,-0.1, 0);
    point[2].setWorldCoordinates( 0.1, 0.1, 0);
    point[3].setWorldCoordinates(-0.1, 0.1, 0);

    vpServo task ;
    task.setServo(vpServo::EYEINHAND_CAMERA);
    task.setInteractionMatrixType(vpServo::CURRENT);
    task.setLambda(0.5);

    vpFeaturePoint p[4], pd[4] ;
    for (unsigned int i = 0 ; i < 4 ; i++) {
      point[i].track(cdMo);
      vpFeatureBuilder::create(pd[i], point[i]);
      point[i].track(cMo);
      vpFeatureBuilder::create(p[i], point[i]);
      task.addFeature(p[i], pd[i]);
    }

    vpHomogeneousMatrix wMc, wMo;
    vpSimulatorCamera robot;
    robot.setSamplingTime(0.040);
    robot.getPosition(wMc);
    wMo = wMc * cMo;

    vpImage<unsigned char> Iint(480, 640, 0) ;
    vpImage<unsigned char> Iext(480, 640, 0) ;
#if defined VISP_HAVE_X11
    vpDisplayX displayInt(Iint, 0, 0, "Internal view");
    vpDisplayX displayExt(Iext, 670, 0, "External view");
#elif  defined VISP_HAVE_GDI
    vpDisplayGDI displayInt(Iint, 0, 0, "Internal view");
    vpDisplayGDI displayExt(Iext, 670, 0, "External view");
#elif  defined VISP_HAVE_OPENCV
    vpDisplayOpenCV displayInt(Iint, 0, 0, "Internal view");
    vpDisplayOpenCV displayExt(Iext, 670, 0, "External view");
#else
    std::cout << "No image viewer is available..." << std::endl;
#endif

    vpCameraParameters cam(840, 840, Iint.getWidth()/2, Iint.getHeight()/2);
    vpHomogeneousMatrix cextMo(0,0,3, 0,0,0);

    vpWireFrameSimulator sim;
    sim.initScene(vpWireFrameSimulator::PLATE, vpWireFrameSimulator::D_STANDARD);
    sim.setCameraPositionRelObj(cMo);
    sim.setDesiredCameraPosition(cdMo);
    sim.setExternalCameraPosition(cextMo);
    sim.setInternalCameraParameters(cam);
    sim.setExternalCameraParameters(cam);

    while(1) {
      robot.getPosition(wMc);
      cMo = wMc.inverse() * wMo;
      for (unsigned int i = 0 ; i < 4 ; i++) {
        point[i].track(cMo);
        vpFeatureBuilder::create(p[i], point[i]);
      }
      vpColVector v = task.computeControlLaw();
      robot.setVelocity(vpRobot::CAMERA_FRAME, v);

      sim.setCameraPositionRelObj(cMo);

      vpDisplay::display(Iint) ;
      vpDisplay::display(Iext) ;

      sim.getInternalImage(Iint);
      sim.getExternalImage(Iext);

      display_trajectory(Iint, point, cMo, cam);
      vpDisplay::flush(Iint);
      vpDisplay::flush(Iext);

      // A click in the internal view to exit
      if (vpDisplay::getClick(Iint, false))
        break;
      vpTime::wait(1000*robot.getSamplingTime());
    }
    task.kill();
  }
  catch(vpException &e) {
    std::cout << "Catch an exception: " << e << std::endl;
  }
}
int
main()
{
  try
  {
    vpImage<unsigned char> I ;

    vp1394TwoGrabber g;
    g.setVideoMode(vp1394TwoGrabber::vpVIDEO_MODE_640x480_MONO8);
    g.setFramerate(vp1394TwoGrabber::vpFRAMERATE_60);
    g.open(I) ;

    g.acquire(I) ;

#ifdef VISP_HAVE_X11
    vpDisplayX display(I,100,100,"Current image") ;
#elif defined(VISP_HAVE_OPENCV)
    vpDisplayOpenCV display(I,100,100,"Current image") ;
#elif defined(VISP_HAVE_GTK)
    vpDisplayGTK display(I,100,100,"Current image") ;
#endif

    vpDisplay::display(I) ;
    vpDisplay::flush(I);

    vpServo task ;


    std::cout << std::endl ;
    std::cout << "-------------------------------------------------------" << std::endl ;
    std::cout << " Test program for vpServo "  <<std::endl ;
    std::cout << " Eye-in-hand task control, velocity computed in the camera frame" << std::endl ;
    std::cout << " Simulation " << std::endl ;
    std::cout << " task : servo a line " << std::endl ;
    std::cout << "-------------------------------------------------------" << std::endl ;
    std::cout << std::endl ;

    int i ;
    int nbline =4 ;

    vpMeLine line[nbline] ;

    vpMe me ;
    me.setRange(10) ;
    me.setPointsToTrack(100) ;
    me.setThreshold(50000) ;
    me.setSampleStep(10);

    //Initialize the tracking. Define the four lines to track.
    for (i=0 ; i < nbline ; i++)
    {
      line[i].setDisplay(vpMeSite::RANGE_RESULT) ;
      line[i].setMe(&me) ;

      line[i].initTracking(I) ;
      line[i].track(I) ;
    }

    vpRobotAfma6 robot ;
    //robot.move("zero.pos") ;

    vpCameraParameters cam ;
    // Update camera parameters
    robot.getCameraParameters (cam, I);

    vpTRACE("sets the current position of the visual feature ") ;
    vpFeatureLine p[nbline] ;
    for (i=0 ; i < nbline ; i++)
      vpFeatureBuilder::create(p[i],cam, line[i])  ;

    vpTRACE("sets the desired position of the visual feature ") ;
    vpLine lined[nbline];
    lined[0].setWorldCoordinates(1,0,0,0.05,0,0,1,0);
    lined[1].setWorldCoordinates(0,1,0,0.05,0,0,1,0);
    lined[2].setWorldCoordinates(1,0,0,-0.05,0,0,1,0);
    lined[3].setWorldCoordinates(0,1,0,-0.05,0,0,1,0);

    vpHomogeneousMatrix cMo(0,0,0.5,0,0,vpMath::rad(0));

    lined[0].project(cMo);
    lined[1].project(cMo);
    lined[2].project(cMo);
    lined[3].project(cMo);

    //Those lines are needed to keep the conventions define in vpMeLine (Those in vpLine are less restrictive)
    //Another way to have the coordinates of the desired features is to learn them before executing the program.
    lined[0].setRho(-fabs(lined[0].getRho()));
    lined[0].setTheta(0);
    lined[1].setRho(-fabs(lined[1].getRho()));
    lined[1].setTheta(M_PI/2);
    lined[2].setRho(-fabs(lined[2].getRho()));
    lined[2].setTheta(M_PI);
    lined[3].setRho(-fabs(lined[3].getRho()));
    lined[3].setTheta(-M_PI/2);

    vpFeatureLine pd[nbline] ;

    vpFeatureBuilder::create(pd[0],lined[0]);
    vpFeatureBuilder::create(pd[1],lined[1]);
    vpFeatureBuilder::create(pd[2],lined[2]);
    vpFeatureBuilder::create(pd[3],lined[3]);

    vpTRACE("define the task") ;
    vpTRACE("\t we want an eye-in-hand control law") ;
    vpTRACE("\t robot is controlled in the camera frame") ;
    task.setServo(vpServo::EYEINHAND_CAMERA) ;
    task.setInteractionMatrixType(vpServo::DESIRED, vpServo::PSEUDO_INVERSE);

    vpTRACE("\t we want to see a point on a point..") ;
    std::cout << std::endl ;
    for (i=0 ; i < nbline ; i++)
      task.addFeature(p[i],pd[i]) ;

    vpTRACE("\t set the gain") ;
    task.setLambda(0.2) ;


    vpTRACE("Display task information " ) ;
    task.print() ;

    robot.setRobotState(vpRobot::STATE_VELOCITY_CONTROL) ;

    unsigned int iter=0 ;
    vpTRACE("\t loop") ;
    vpColVector v ;
    vpImage<vpRGBa> Ic ;
    double lambda_av =0.05;
    double alpha = 0.05 ;
    double beta =3 ;

    for ( ; ; )
    {
      std::cout << "---------------------------------------------" << iter <<std::endl ;

      try {
        g.acquire(I) ;
        vpDisplay::display(I) ;

        //Track the lines and update the features
        for (i=0 ; i < nbline ; i++)
	      {
		      line[i].track(I) ;
		      line[i].display(I, vpColor::red) ;

		      vpFeatureBuilder::create(p[i],cam,line[i]);

		      p[i].display(cam, I,  vpColor::red) ;
		      pd[i].display(cam, I,  vpColor::green) ;
	      }

        double gain ;
        {
          if (std::fabs(alpha) <= std::numeric_limits<double>::epsilon())
            gain = lambda_av ;
          else
          {
            gain = alpha * exp (-beta * ( task.getError() ).sumSquare() ) +  lambda_av ;
          }
        }

        task.setLambda(gain) ;

        v = task.computeControlLaw() ;

        vpDisplay::flush(I) ;

        if (iter==0)  vpDisplay::getClick(I) ;
        if (v.sumSquare() > 0.5)
	      {
		      v =0 ;
		      robot.setVelocity(vpRobot::CAMERA_FRAME, v) ;
		      robot.stopMotion() ;
		      vpDisplay::getClick(I) ;
	      }

        robot.setVelocity(vpRobot::CAMERA_FRAME, v) ;

      }
      catch(...)
	    {
	      v =0 ;
	      robot.setVelocity(vpRobot::CAMERA_FRAME, v) ;
	      robot.stopMotion() ;
	      exit(1) ;
	    }

      vpTRACE("\t\t || s - s* || = %f ", ( task.getError() ).sumSquare()) ;
      iter++;
    }

    vpTRACE("Display task information " ) ;
    task.print() ;
    task.kill();
  }
  catch (...)
  {
    vpERROR_TRACE(" Test failed") ;
    return 0;
  }
}
int
main(int argc, const char ** argv)
{
  try {
    bool opt_display = true;
    bool opt_click_allowed = true;

    // Read the command line options
    if (getOptions(argc, argv, opt_click_allowed, opt_display) == false) {
      exit (-1);
    }

    vpImage<unsigned char> I(512,512,0) ;

    // We open a window using either X11, GTK or GDI.
#if defined VISP_HAVE_X11
    vpDisplayX display;
#elif defined VISP_HAVE_GTK
    vpDisplayGTK display;
#elif defined VISP_HAVE_GDI
    vpDisplayGDI display;
#elif defined VISP_HAVE_OPENCV
    vpDisplayOpenCV display;
#endif

    if (opt_display) {
      try{
        // Display size is automatically defined by the image (I) size
        display.init(I, 100, 100,"Camera view...") ;
        // Display the image
        // The image class has a member that specify a pointer toward
        // the display that has been initialized in the display declaration
        // therefore is is no longuer necessary to make a reference to the
        // display variable.
        vpDisplay::display(I) ;
        vpDisplay::flush(I) ;
      }
      catch(...)
      {
        vpERROR_TRACE("Error while displaying the image") ;
        exit(-1);
      }
    }

    double px, py ; px = py = 600 ;
    double u0, v0 ; u0 = v0 = 256 ;

    vpCameraParameters cam(px,py,u0,v0);

    vpServo task ;
    vpSimulatorCamera robot ;

    // sets the initial camera location
    vpHomogeneousMatrix cMo(0,0,1,
                            vpMath::rad(0),  vpMath::rad(80),  vpMath::rad(30))   ;
    vpHomogeneousMatrix wMc, wMo;
    robot.getPosition(wMc) ;
    wMo = wMc * cMo; // Compute the position of the object in the world frame

    vpHomogeneousMatrix cMod(-0.1,-0.1,0.7,
                             vpMath::rad(40),  vpMath::rad(10),  vpMath::rad(30))   ;

    // sets the circle coordinates in the world frame
    vpCircle circle ;
    circle.setWorldCoordinates(0,0,1,
                               0,0,0,
                               0.1) ;

    // sets the desired position of the visual feature
    vpFeatureEllipse pd ;
    circle.track(cMod) ;
    vpFeatureBuilder::create(pd,circle)  ;

    // project : computes the circle coordinates in the camera frame and its 2D coordinates
    // sets the current position of the visual feature
    vpFeatureEllipse p ;
    circle.track(cMo) ;
    vpFeatureBuilder::create(p,circle)  ;

    // define the task
    // - we want an eye-in-hand control law
    // - robot is controlled in the camera frame
    task.setServo(vpServo::EYEINHAND_CAMERA) ;
    task.setInteractionMatrixType(vpServo::DESIRED) ;
    // - we want to see a circle on a circle
    task.addFeature(p,pd) ;
    // - set the gain
    task.setLambda(1) ;

    // Display task information
    task.print() ;

    unsigned int iter=0 ;
    // loop
    while(iter++ < 200)
    {
      std::cout << "---------------------------------------------" << iter <<std::endl ;
      vpColVector v ;

      // get the robot position
      robot.getPosition(wMc) ;
      // Compute the position of the camera wrt the object frame
      cMo = wMc.inverse() * wMo;

      // new circle position
      // retrieve x,y and Z of the vpCircle structure
      circle.track(cMo) ;
      vpFeatureBuilder::create(p,circle);
      circle.print() ;
      p.print() ;

      if (opt_display) {
        vpDisplay::display(I) ;
        vpServoDisplay::display(task,cam,I) ;
        vpDisplay::flush(I) ;
      }

      // compute the control law
      v = task.computeControlLaw() ;
      std::cout << "task rank: " << task.getTaskRank() <<std::endl ;
      // send the camera velocity to the controller
      robot.setVelocity(vpRobot::CAMERA_FRAME, v) ;

      std::cout << "|| s - s* || = " << ( task.getError() ).sumSquare() <<std::endl ;
    }

    // Display task information
    task.print() ;
    task.kill();

    if (opt_display && opt_click_allowed) {
      std::cout << "Click in the camera view window to end..." << std::endl;
      vpDisplay::getClick(I) ;
    }
    return 0;
  }
  catch(vpException e) {
    std::cout << "Catch a ViSP exception: " << e << std::endl;
    return 1;
  }
}
int
main(int argc, const char ** argv)
{
  // Read the command line options
  if (getOptions(argc, argv) == false) {
    exit (-1);
  }

  // Log file creation in /tmp/$USERNAME/log.dat
  // This file contains by line:
  // - the 6 computed camera velocities (m/s, rad/s) to achieve the task
  // - the 6 values of s - s*
  std::string username;
  // Get the user login name
  vpIoTools::getUserName(username);

  // Create a log filename to save velocities...
  std::string logdirname;
#ifdef WIN32
  logdirname ="C:/temp/" + username;
#else
  logdirname ="/tmp/" + username;
#endif
  // Test if the output path exist. If no try to create it
  if (vpIoTools::checkDirectory(logdirname) == false) {
    try {
      // Create the dirname
      vpIoTools::makeDirectory(logdirname);
    }
    catch (...) {
      std::cerr << std::endl
		<< "ERROR:" << std::endl;
      std::cerr << "  Cannot create " << logdirname << std::endl;
      exit(-1);
    }
  }
  std::string logfilename;
  logfilename = logdirname + "/log.dat";

  // Open the log file name
  std::ofstream flog(logfilename.c_str());

  vpRobotCamera robot ;

  std::cout << std::endl ;
  std::cout << "-------------------------------------------------------" << std::endl ;
  std::cout << " Test program for vpServo "  <<std::endl ;
  std::cout << " Eye-in-hand task control, velocity computed in the camera frame" << std::endl ;
  std::cout << " Simulation " << std::endl ;
  std::cout << " task :  3D visual servoing " << std::endl ;
  std::cout << "-------------------------------------------------------" << std::endl ;
  std::cout << std::endl ;

  // Sets the initial camera location
  vpPoseVector c_r_o(// Translation tx,ty,tz
		     0.1, 0.2, 2, 
		     // ThetaU rotation 
		     vpMath::rad(20), vpMath::rad(10),  vpMath::rad(50) ) ;

  // From the camera pose build the corresponding homogeneous matrix
  vpHomogeneousMatrix cMo(c_r_o) ;

  // Set the robot initial position
  robot.setPosition(cMo) ;

  // Sets the desired camera location
  vpPoseVector cd_r_o(// Translation tx,ty,tz
		      0, 0, 1, 
		      // ThetaU rotation 
		      vpMath::rad(0),vpMath::rad(0),vpMath::rad(0)) ; 

  // From the camera desired pose build the corresponding homogeneous matrix
  vpHomogeneousMatrix cdMo(cd_r_o) ;

  vpHomogeneousMatrix cMcd; // Transformation between current and desired camera frame
  vpRotationMatrix cRcd; // Rotation between current and desired camera frame
 
  // Set the constant gain of the servo
  double lambda = 1;

  int iter=0 ;
  // Start the visual servoing loop. We stop the servo after 200 iterations
  while(iter++ < 200) {
    std::cout << "------------------------------------" << iter <<std::endl ;

    // get the robot position
    robot.getPosition(cMo) ;

    // new displacement to achieve
    cMcd = cMo*cdMo.inverse() ;
      
    // Extract the translation vector ctc* which is the current
    // translational visual feature. 
    vpTranslationVector ctcd;
    cMcd.extract(ctcd);
    // Compute the current theta U visual feature
    vpThetaUVector tu_cRcd(cMcd);

    // Create the identity matrix
    vpMatrix I(3,3);
    I.setIdentity();

    // Compute the camera translational velocity
    vpColVector v(3);
    v = lambda * ( I - vpColVector::skew(tu_cRcd) ) * ctcd; 
    // Compute the camera rotational velocity
    vpColVector w(3);
    w = lambda * tu_cRcd;

    // Update the complete camera velocity vector
    vpColVector velocity(6);
    for (int i=0; i<3; i++) {
      velocity[i]   = v[i]; // Translational velocity
      velocity[i+3] = w[i]; // Rotational velocity
    }

    // Send the camera velocity to the controller
    robot.setVelocity(vpRobot::CAMERA_FRAME, velocity) ;

    // Retrieve the error (s-s*)
    std::cout << ctcd.t() << " " << tu_cRcd.t() << std::endl;

    // Save log
    flog << velocity.t() << " " << ctcd.t() << " " << tu_cRcd.t() << std::endl;
  }

  // Close the log file
  flog.close();
}